diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..e397e8ed --- /dev/null +++ b/.flake8 @@ -0,0 +1,12 @@ +[flake8] +max-line-length = 88 +max-complexity = 16 +# B = bugbear +# B9 = bugbear opinionated (incl line length) +select = C,E,F,W,B,B9 +# E203: whitespace before ':' (black behaviour) +# E501: flake8 line length (covered by bugbear B950) +# W503: line break before binary operator (black behaviour) +ignore = E203,E501,W503 +per-file-ignores= + __init__.py:F401 diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 00000000..78018898 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,36 @@ +name: pre-commit + +on: + pull_request: + branches: + - "*" + push: + branches: + - "master" + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + with: + python-version: "3.8" + - name: Get python version + run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV + - uses: actions/cache@v1 + with: + path: ~/.cache/pre-commit + key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} + - name: Install pre-commit + run: pip install pre-commit + - name: Run pre-commit + run: pre-commit run --all-files --show-diff-on-failure --color=always + - name: Check that all files generated by pre-commit are in git + run: | + newfiles="$(git ls-files --others --exclude-from=.gitignore)" + if [ "$newfiles" != "" ] ; then + echo "Please check-in the following files:" + echo "$newfiles" + exit 1 + fi diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000..0ec187ef --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,13 @@ +[settings] +; see https://github.com/psf/black +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +combine_as_imports=True +use_parentheses=True +line_length=88 +known_odoo=odoo +known_odoo_addons=odoo.addons +sections=FUTURE,STDLIB,THIRDPARTY,ODOO,ODOO_ADDONS,FIRSTPARTY,LOCALFOLDER +default_section=THIRDPARTY +ensure_newline_before_comments = True diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..b7feb4ec --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,106 @@ +exclude: | + (?x) + # NOT INSTALLABLE ADDONS + # END NOT INSTALLABLE ADDONS + # Files and folders generated by bots, to avoid loops + ^setup/|/static/description/index\.html$| + # We don't want to mess with tool-generated files + .svg$|/tests/([^/]+/)?cassettes/|^.copier-answers.yml$|^.github/| + # Maybe reactivate this when all README files include prettier ignore tags? + ^README\.md$| + # Library files can have extraneous formatting (even minimized) + /static/(src/)?lib/| + # Repos using Sphinx to generate docs don't need prettying + ^docs/.*$| + # You don't usually want a bot to modify your legal texts + (LICENSE.*|COPYING.*) +default_language_version: + python: python3 + node: "16.17.0" +repos: + - repo: local + hooks: + # These files are most likely copier diff rejection junks; if found, + # review them manually, fix the problem (if needed) and remove them + - id: forbidden-files + name: forbidden files + entry: found forbidden files; remove them + language: fail + files: "\\.rej$" + - id: en-po-files + name: en.po files cannot exist + entry: found a en.po file + language: fail + files: '[a-zA-Z0-9_]*/i18n/en\.po$' + - repo: https://github.com/myint/autoflake + rev: v1.6.1 + hooks: + - id: autoflake + args: + - --expand-star-imports + - --ignore-init-module-imports + - --in-place + - --remove-all-unused-imports + - --remove-duplicate-keys + - --remove-unused-variables + - repo: https://github.com/psf/black + rev: 22.8.0 + hooks: + - id: black + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.7.1 + hooks: + - id: prettier + name: prettier (with plugin-xml) + additional_dependencies: + - "prettier@2.7.1" + - "@prettier/plugin-xml@2.2.0" + args: + - --plugin=@prettier/plugin-xml + files: \.(css|htm|html|js|json|jsx|less|md|scss|toml|ts|xml|yaml|yml)$ + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: trailing-whitespace + # exclude autogenerated files + exclude: /README\.rst$|\.pot?$ + - id: end-of-file-fixer + # exclude autogenerated files + exclude: /README\.rst$|\.pot?$ + - id: debug-statements + - id: check-case-conflict + - id: check-docstring-first + - id: check-executables-have-shebangs + - id: check-merge-conflict + # exclude files where underlines are not distinguishable from merge conflicts + exclude: /README\.rst$|^docs/.*\.rst$ + - id: check-symlinks + - id: check-xml + - id: mixed-line-ending + args: ["--fix=lf"] + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort except __init__.py + args: + - --settings=. + exclude: /__init__\.py$ + - repo: https://github.com/PyCQA/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + name: flake8 + additional_dependencies: ["flake8-bugbear==21.9.2"] + - repo: https://github.com/OCA/pylint-odoo + rev: 7.0.2 + hooks: + - id: pylint_odoo + name: pylint with optional checks + args: + - --rcfile=.pylintrc + - --exit-zero + verbose: true + - id: pylint_odoo + args: + - --rcfile=.pylintrc-mandatory diff --git a/.prettierrc.yml b/.prettierrc.yml new file mode 100644 index 00000000..5b6d4b36 --- /dev/null +++ b/.prettierrc.yml @@ -0,0 +1,8 @@ +# Defaults for all prettier-supported languages. +# Prettier will complete this with settings from .editorconfig file. +bracketSpacing: false +printWidth: 88 +proseWrap: always +semi: true +trailingComma: "es5" +xmlWhitespaceSensitivity: "strict" diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 00000000..3c7ee76e --- /dev/null +++ b/.pylintrc @@ -0,0 +1,107 @@ + + +[MASTER] +load-plugins=pylint_odoo +score=n + +[MESSAGES CONTROL] +disable=all + +# This .pylintrc contains optional AND mandatory checks and is meant to be +# loaded in an IDE to have it check everything, in the hope this will make +# optional checks more visible to contributors who otherwise never look at a +# green travis to see optional checks that failed. +# .pylintrc-mandatory containing only mandatory checks is used the pre-commit +# config as a blocking check. + +enable=anomalous-backslash-in-string, + api-one-deprecated, + api-one-multi-together, + assignment-from-none, + attribute-deprecated, + class-camelcase, + dangerous-default-value, + dangerous-view-replace-wo-priority, + development-status-allowed, + duplicate-id-csv, + duplicate-key, + duplicate-xml-fields, + duplicate-xml-record-id, + eval-referenced, + eval-used, + incoherent-interpreter-exec-perm, + method-compute, + method-inverse, + method-required-super, + method-search, + openerp-exception-warning, + pointless-statement, + pointless-string-statement, + print-used, + redundant-keyword-arg, + redundant-modulename-xml, + reimported, + relative-import, + return-in-init, + rst-syntax-error, + sql-injection, + too-few-format-args, + translation-field, + translation-required, + unreachable, + use-vim-comment, + wrong-tabs-instead-of-spaces, + xml-syntax-error, + attribute-string-redundant, + character-not-valid-in-resource-link, + consider-merging-classes-inherited, + context-overridden, + create-user-wo-reset-password, + dangerous-filter-wo-user, + dangerous-qweb-replace-wo-priority, + deprecated-data-xml-node, + deprecated-openerp-xml-node, + duplicate-po-message-definition, + except-pass, + file-not-used, + invalid-commit, + manifest-maintainers-list, + missing-newline-extrafiles, + missing-return, + odoo-addons-relative-import, + old-api7-method-defined, + po-msgstr-variables, + po-syntax-error, + renamed-field-parameter, + resource-not-exist, + str-format-used, + test-folder-imported, + translation-contains-variable, + translation-positional-used, + unnecessary-utf8-coding-comment, + website-manifest-key-not-valid-uri, + xml-attribute-translatable, + xml-deprecated-qweb-directive, + xml-deprecated-tree-attribute, + external-request-timeout, + # messages that do not cause the lint step to fail + consider-merging-classes-inherited, + create-user-wo-reset-password, + dangerous-filter-wo-user, + deprecated-module, + file-not-used, + invalid-commit, + missing-manifest-dependency, + missing-newline-extrafiles, + no-utf8-coding-comment, + odoo-addons-relative-import, + old-api7-method-defined, + redefined-builtin, + too-complex, + unnecessary-utf8-coding-comment + + +[REPORTS] +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} +output-format=colorized +reports=no diff --git a/.pylintrc-mandatory b/.pylintrc-mandatory new file mode 100644 index 00000000..bf4f1961 --- /dev/null +++ b/.pylintrc-mandatory @@ -0,0 +1,96 @@ + +[MASTER] +load-plugins=pylint_odoo +score=n + +[ODOOLINT] +readme_template_url="https://github.com/OCA/maintainer-tools/blob/master/template/module/README.rst" +manifest_required_authors=Odoo Community Association (OCA) +manifest_required_keys=license +manifest_deprecated_keys=description,active +license_allowed=AGPL-3,GPL-2,GPL-2 or any later version,GPL-3,GPL-3 or any later version,LGPL-3 + +[MESSAGES CONTROL] +disable=all + +enable=anomalous-backslash-in-string, + api-one-deprecated, + api-one-multi-together, + assignment-from-none, + attribute-deprecated, + class-camelcase, + dangerous-default-value, + dangerous-view-replace-wo-priority, + development-status-allowed, + duplicate-id-csv, + duplicate-key, + duplicate-xml-fields, + duplicate-xml-record-id, + eval-referenced, + eval-used, + incoherent-interpreter-exec-perm, + license-allowed, + manifest-author-string, + manifest-deprecated-key, + manifest-required-author, + manifest-required-key, + method-compute, + method-inverse, + method-required-super, + method-search, + openerp-exception-warning, + pointless-statement, + pointless-string-statement, + print-used, + redundant-keyword-arg, + redundant-modulename-xml, + reimported, + relative-import, + return-in-init, + rst-syntax-error, + sql-injection, + too-few-format-args, + translation-field, + translation-required, + unreachable, + use-vim-comment, + wrong-tabs-instead-of-spaces, + xml-syntax-error, + attribute-string-redundant, + character-not-valid-in-resource-link, + consider-merging-classes-inherited, + context-overridden, + create-user-wo-reset-password, + dangerous-filter-wo-user, + dangerous-qweb-replace-wo-priority, + deprecated-data-xml-node, + deprecated-openerp-xml-node, + duplicate-po-message-definition, + except-pass, + file-not-used, + invalid-commit, + manifest-maintainers-list, + missing-newline-extrafiles, + missing-readme, + missing-return, + odoo-addons-relative-import, + old-api7-method-defined, + po-msgstr-variables, + po-syntax-error, + renamed-field-parameter, + resource-not-exist, + str-format-used, + test-folder-imported, + translation-contains-variable, + translation-positional-used, + unnecessary-utf8-coding-comment, + website-manifest-key-not-valid-uri, + xml-attribute-translatable, + xml-deprecated-qweb-directive, + xml-deprecated-tree-attribute, + external-request-timeout + +[REPORTS] +msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg} +output-format=colorized +reports=no diff --git a/.travis.yml b/.travis.yml index f1890a4b..e536100c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,12 +3,12 @@ language: python sudo: false python: - - "3.6" # But switching to 2.7 in the script + - "3.6" # But switching to 2.7 in the script addons: apt: packages: - - python3-lxml # because pip installation is slow + - python3-lxml # because pip installation is slow - python3-simplejson - python3-serial - python3-yaml @@ -27,39 +27,51 @@ script: - python setup.py install # Run functional tests from 6.1 to 11.0 - set -e - - git clone https://github.com/oca/openupgrade --depth 1 --no-single-branch -b 6.1 ~/openupgrade + - git clone https://github.com/oca/openupgrade --depth 1 --no-single-branch -b 6.1 + ~/openupgrade - cd ~/openupgrade - pip install psycopg2-binary - - git reset --hard origin/8.0 # earliest version to include requirements - - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r /dev/stdin + - git reset --hard origin/8.0 # earliest version to include requirements + - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r + /dev/stdin - createdb testdb - git reset --hard origin/6.1 - - ./openerp-server -d testdb -i openupgradelib_tests --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - ./openerp-server -d testdb -i openupgradelib_tests --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # Run the migration of the test addon in 6.1 - - echo "UPDATE ir_module_module SET latest_version = '6.0.1.0.0' WHERE name = 'openupgradelib_tests'" | psql testdb - - ./openerp-server -d testdb -u openupgradelib_tests --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - echo "UPDATE ir_module_module SET latest_version = '6.0.1.0.0' WHERE name = + 'openupgradelib_tests'" | psql testdb + - ./openerp-server -d testdb -u openupgradelib_tests --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init - echo "UPDATE ir_module_module SET demo = FALSE" | psql testdb # 7.0 -> 8.0 - git reset --hard origin/7.0 - - ./openerp-server -d testdb -u all --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - ./openerp-server -d testdb -u all --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # 7.0 -> 8.0 - git reset --hard origin/8.0 - - ./openerp-server -d testdb -u all --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - ./openerp-server -d testdb -u all --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # 8.0 -> 9.0 - git reset --hard origin/9.0 - - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r /dev/stdin - - ./openerp-server -d testdb -u all --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r + /dev/stdin + - ./openerp-server -d testdb -u all --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # 9.0 -> 10.0 - git reset --hard origin/10.0 - - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r /dev/stdin - - ./odoo-bin -d testdb -u all --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r + /dev/stdin + - ./odoo-bin -d testdb -u all --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # 10.0 -> 11.0 (switch to Python 3.6) - source ~/virtualenv/python3.6/bin/activate - pushd /home/travis/build/OCA/openupgradelib && python setup.py install && popd - pip install psycopg2-binary - git reset --hard origin/11.0 - - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r /dev/stdin - - flake8 ~/build/OCA/openupgradelib - - ./odoo-bin -d testdb -u all --addons-path addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init + - egrep -v "(openupgradelib)|(psycopg2)" requirements.txt | pip install -q -r + /dev/stdin + - ./odoo-bin -d testdb -u all --addons-path + addons,/home/travis/build/OCA/openupgradelib/tests/addons --stop-after-init # Build docs - pip install -q sphinx; sh ~/openupgrade/scripts/build_openupgrade_docs diff --git a/openupgradelib/__init__.py b/openupgradelib/__init__.py old mode 100755 new mode 100644 index 3e3214c8..18783063 --- a/openupgradelib/__init__.py +++ b/openupgradelib/__init__.py @@ -1,8 +1,7 @@ -# -*- coding: utf-8 -*- import sys -__author__ = 'Odoo Community Association (OCA)' -__email__ = 'support@odoo-community.org' +__author__ = "Odoo Community Association (OCA)" +__email__ = "support@odoo-community.org" __doc__ = """A library with support functions to be called from Odoo \ migration scripts.""" __license__ = "AGPL-3" @@ -13,12 +12,12 @@ from importlib.metadata import version, PackageNotFoundError else: from importlib_metadata import version, PackageNotFoundError -except ImportError: +except ImportError: # pylint: disable=W7938 # this happens when setup.py imports openupgradelib pass else: try: __version__ = version("openupgradelib") - except PackageNotFoundError: + except PackageNotFoundError: # pylint: disable=W7938 # package is not installed pass diff --git a/openupgradelib/openupgrade.py b/openupgradelib/openupgrade.py index bf8c1601..5b64ced8 100644 --- a/openupgradelib/openupgrade.py +++ b/openupgradelib/openupgrade.py @@ -1,25 +1,28 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 # Copyright 2011-2020 Therp BV . # Copyright 2016-2020 Tecnativa - Pedro M. Baeza. # Copyright Odoo Community Association (OCA) # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html). -import sys -import os import inspect -import uuid import logging as _logging_module +import os +import sys +import uuid from datetime import datetime from functools import wraps + try: from StringIO import StringIO except ImportError: from io import StringIO + from contextlib import contextmanager + try: - from psycopg2 import errorcodes, ProgrammingError, IntegrityError + from psycopg2 import IntegrityError, ProgrammingError, errorcodes except ImportError: - from psycopg2cffi import errorcodes, ProgrammingError, IntegrityError + from psycopg2cffi import IntegrityError, ProgrammingError, errorcodes try: from contextlib import ExitStack except ImportError: @@ -40,9 +43,11 @@ def __exit__(self, exc_type, exc_value, traceback): while self._cms: self._cms.pop().__exit__(exc_type, exc_value, traceback) + +from lxml import etree from psycopg2 import sql from psycopg2.extensions import AsIs -from lxml import etree + from . import openupgrade_tools core = None @@ -53,7 +58,7 @@ def __exit__(self, exc_type, exc_value, traceback): except ImportError: # < 10.0 import openerp as core from openerp.modules import registry -if hasattr(core, 'release'): +if hasattr(core, "release"): release = core.release else: import release @@ -67,8 +72,8 @@ def __exit__(self, exc_type, exc_value, traceback): UserError = False ValidationError = False -if not hasattr(release, 'version_info'): - version_info = tuple(map(int, release.version.split('.'))) +if not hasattr(release, "version_info"): + version_info = tuple(map(int, release.version.split("."))) else: version_info = release.version_info @@ -76,7 +81,7 @@ def __exit__(self, exc_type, exc_value, traceback): tools = core.tools SUPERUSER_ID = core.SUPERUSER_ID - if hasattr(core, 'osv') and hasattr(core.osv, 'fields'): + if hasattr(core, "osv") and hasattr(core.osv, "fields"): except_orm = core.osv.orm.except_orm many2many = core.osv.fields.many2many one2many = core.osv.fields.one2many @@ -97,16 +102,17 @@ def __exit__(self, exc_type, exc_value, traceback): else: # version < 6.1 import tools + SUPERUSER_ID = 1 - from tools.yaml_import import yaml_import - from osv.osv import except_osv as except_orm from osv.fields import many2many, one2many + from osv.osv import except_osv as except_orm + from tools.yaml_import import yaml_import def do_raise(error): if UserError: raise UserError(error) - raise except_orm('Error', error) + raise except_orm("Error", error) # pylint: disable=C8107 if sys.version_info[0] == 3: @@ -130,60 +136,60 @@ def do_raise(error): # so to log at loglevel debug we need to set it # manually here. As a consequence, DEBUG messages from # this file are always logged -logger = _logging_module.getLogger('OpenUpgrade') +logger = _logging_module.getLogger("OpenUpgrade") logger.setLevel(_logging_module.DEBUG) __all__ = [ - 'migrate', - 'logging', - 'load_data', - 'add_fields', - 'copy_columns', - 'copy_fields_multilang', - 'remove_tables_fks', - 'rename_columns', - 'rename_fields', - 'rename_tables', - 'rename_models', - 'merge_models', - 'rename_xmlids', - 'add_xmlid', - 'chunked', - 'drop_columns', - 'delete_model_workflow', - 'update_field_multilang', - 'update_workflow_workitems', - 'warn_possible_dataloss', - 'set_defaults', - 'logged_query', - 'column_exists', - 'table_exists', - 'update_module_moved_fields', - 'update_module_moved_models', - 'update_module_names', - 'add_ir_model_fields', - 'get_legacy_name', - 'm2o_to_x2m', - 'float_to_integer', - 'message', - 'check_values_selection_field', - 'move_field_m2o', - 'convert_field_to_html', - 'map_values', - 'deactivate_workflow_transitions', - 'reactivate_workflow_transitions', - 'date_to_datetime_tz', - 'lift_constraints', - 'rename_property', - 'delete_record_translations', - 'disable_invalid_filters', - 'safe_unlink', - 'delete_records_safely_by_xml_id', - 'delete_sql_constraint_safely', - 'set_xml_ids_noupdate_value', - 'convert_to_company_dependent', - 'cow_templates_mark_if_equal_to_upstream', - 'cow_templates_replicate_upstream', + "migrate", + "logging", + "load_data", + "add_fields", + "copy_columns", + "copy_fields_multilang", + "remove_tables_fks", + "rename_columns", + "rename_fields", + "rename_tables", + "rename_models", + "merge_models", + "rename_xmlids", + "add_xmlid", + "chunked", + "drop_columns", + "delete_model_workflow", + "update_field_multilang", + "update_workflow_workitems", + "warn_possible_dataloss", + "set_defaults", + "logged_query", + "column_exists", + "table_exists", + "update_module_moved_fields", + "update_module_moved_models", + "update_module_names", + "add_ir_model_fields", + "get_legacy_name", + "m2o_to_x2m", + "float_to_integer", + "message", + "check_values_selection_field", + "move_field_m2o", + "convert_field_to_html", + "map_values", + "deactivate_workflow_transitions", + "reactivate_workflow_transitions", + "date_to_datetime_tz", + "lift_constraints", + "rename_property", + "delete_record_translations", + "disable_invalid_filters", + "safe_unlink", + "delete_records_safely_by_xml_id", + "delete_sql_constraint_safely", + "set_xml_ids_noupdate_value", + "convert_to_company_dependent", + "cow_templates_mark_if_equal_to_upstream", + "cow_templates_replicate_upstream", ] @@ -217,13 +223,14 @@ def allow_pgcodes(cr, *codes): """ try: with cr.savepoint(): - with core.tools.mute_logger('odoo.sql_db'): + with core.tools.mute_logger("odoo.sql_db"): yield except (ProgrammingError, IntegrityError) as error: msg = "Code: {code}. Class: {class_}. Error: {error}.".format( code=error.pgcode, class_=errorcodes.lookup(error.pgcode[:2]), - error=errorcodes.lookup(error.pgcode)) + error=errorcodes.lookup(error.pgcode), + ) if error.pgcode in codes or error.pgcode[:2] in codes: logger.info(msg) else: @@ -241,19 +248,25 @@ def check_values_selection_field(cr, table_name, field_name, allowed_values): .. versionadded:: 8.0 """ res = True - cr.execute("SELECT %s, count(*) FROM %s GROUP BY %s;" % - (field_name, table_name, field_name)) + cr.execute( # pylint: disable=E8103 + "SELECT %s, count(*) FROM %s GROUP BY %s;" + % (field_name, table_name, field_name) + ) for row in cr.fetchall(): if row[0] not in allowed_values: logger.error( "Invalid value '%s' in the table '%s' " "for the field '%s'. (%s rows).", - row[0], table_name, field_name, row[1]) + row[0], + table_name, + field_name, + row[1], + ) res = False return res -def load_data(cr, module_name, filename, idref=None, mode='init'): +def load_data(cr, module_name, filename, idref=None, mode="init"): """ Load an xml, csv or yml data file from your post script. The usual case for this is the @@ -289,35 +302,40 @@ def load_data(cr, module_name, filename, idref=None, mode='init'): if idref is None: idref = {} - logger.info('%s: loading %s' % (module_name, filename)) + logger.info("%s: loading %s" % (module_name, filename)) _, ext = os.path.splitext(filename) pathname = os.path.join(module_name, filename) try: fp = tools.file_open(pathname) except OSError: - if tools.config.get('upgrade_path'): - for path in tools.config['upgrade_path'].split(','): + if tools.config.get("upgrade_path"): + for path in tools.config["upgrade_path"].split(","): pathname = os.path.join(path, module_name, filename) try: fp = open(pathname) break - except OSError: + except OSError: # pylint: disable=W7938 pass else: raise try: - if ext == '.csv': + if ext == ".csv": noupdate = True tools.convert_csv_import( - cr, module_name, pathname, fp.read(), idref, mode, noupdate) - elif ext == '.yml': + cr, module_name, pathname, fp.read(), idref, mode, noupdate + ) + elif ext == ".yml": yaml_import(cr, module_name, fp, None, idref=idref, mode=mode) - elif mode == 'init_no_create': + elif mode == "init_no_create": for fp2 in _get_existing_records(cr, fp, module_name): tools.convert_xml_import( - cr, module_name, fp2, idref, mode='init', + cr, + module_name, + fp2, + idref, + mode="init", ) else: tools.convert_xml_import(cr, module_name, fp, idref, mode=mode) @@ -329,31 +347,34 @@ def _get_existing_records(cr, fp, module_name): """yield file like objects per 'leaf' node in the xml file that exists. This is for not trying to create a record with partial data in case the record was removed in the database.""" + def yield_element(node, path=None): - if node.tag not in ['openerp', 'odoo', 'data']: - if node.tag == 'record': - xmlid = node.attrib['id'] - if '.' not in xmlid: + if node.tag not in ["openerp", "odoo", "data"]: + if node.tag == "record": + xmlid = node.attrib["id"] + if "." not in xmlid: module = module_name else: - module, xmlid = xmlid.split('.', 1) + module, xmlid = xmlid.split(".", 1) cr.execute( - 'select id from ir_model_data where module=%s and name=%s', - (module, xmlid) + "select id from ir_model_data where module=%s and name=%s", + (module, xmlid), ) if not cr.rowcount: return - result = StringIO(etree.tostring(path, encoding='unicode')) + result = StringIO(etree.tostring(path, encoding="unicode")) result.name = None yield result else: for child in node: for value in yield_element( - child, - etree.SubElement(path, node.tag, node.attrib) - if path else etree.Element(node.tag, node.attrib) + child, + etree.SubElement(path, node.tag, node.attrib) + if path + else etree.Element(node.tag, node.attrib), ): yield value + return yield_element(etree.parse(fp).getroot()) @@ -381,29 +402,43 @@ def copy_columns(cr, column_spec): if new is None: new = get_legacy_name(old) if field_type is None: - cr.execute(""" + cr.execute( + """ SELECT data_type FROM information_schema.columns WHERE table_name=%s AND column_name = %s; - """, (table_name, old)) + """, + (table_name, old), + ) field_type = cr.fetchone()[0] - logged_query(cr, """ + logged_query( + cr, + """ ALTER TABLE %(table_name)s ADD COLUMN %(new)s %(field_type)s; UPDATE %(table_name)s SET %(new)s=%(old)s; - """ % { - 'table_name': table_name, - 'old': old, - 'field_type': field_type, - 'new': new, - }) - - -def copy_fields_multilang(cr, destination_model, destination_table, - destination_columns, relation_column, - source_model=None, source_table=None, - source_columns=None, translations_only=False): + """ + % { + "table_name": table_name, + "old": old, + "field_type": field_type, + "new": new, + }, + ) + + +def copy_fields_multilang( + cr, + destination_model, + destination_table, + destination_columns, + relation_column, + source_model=None, + source_table=None, + source_columns=None, + translations_only=False, +): """Copy field contents including translations. :param str destination_model: @@ -453,25 +488,28 @@ def copy_fields_multilang(cr, destination_model, destination_table, assert len(source_columns) == cols_len > 0 # Basic copy if not translations_only: - query = sql.SQL(""" + query = sql.SQL( + """ UPDATE {dst_t} AS dt SET {set_part} FROM {src_t} AS st WHERE dt.{rel_col} = st.id - """).format( + """ + ).format( dst_t=sql.Identifier(destination_table), set_part=sql.SQL(", ").join( sql.SQL("{} = st.{}").format( - sql.Identifier(dest_col), - sql.Identifier(src_col)) - for (dest_col, src_col) - in zip(destination_columns, source_columns)), + sql.Identifier(dest_col), sql.Identifier(src_col) + ) + for (dest_col, src_col) in zip(destination_columns, source_columns) + ), src_t=sql.Identifier(source_table), rel_col=sql.Identifier(relation_column), ) logged_query(cr, query) # Translations copy - query = sql.SQL(""" + query = sql.SQL( + """ INSERT INTO ir_translation ( lang, module, @@ -502,7 +540,8 @@ def copy_fields_multilang(cr, destination_model, destination_table, it.name LIKE %(src_m)s || ',' || %(src_c)s || ',%%' OR (%(src_m)s = 'ir.ui.view' AND it.type = 'view') ON CONFLICT DO NOTHING - """) + """ + ) for dest_col, src_col in zip(destination_columns, source_columns): logged_query( cr, @@ -570,16 +609,21 @@ def rename_columns(cr, column_spec): for (old, new) in column_spec[table]: if new is None: new = get_legacy_name(old) - logger.info("table %s, column %s: renaming to %s", - table, old, new) - cr.execute( - 'ALTER TABLE "%s" RENAME "%s" TO "%s"' % (table, old, new,)) + logger.info("table %s, column %s: renaming to %s", table, old, new) + cr.execute( # pylint: disable=E8103 + 'ALTER TABLE "%s" RENAME "%s" TO "%s"' + % ( + table, + old, + new, + ) + ) old_index_name = "%s_%s_index" % (table, old) new_index_name = "%s_%s_index" % (table, new) if len(new_index_name) <= 63: - cr.execute( - 'ALTER INDEX IF EXISTS "%s" RENAME TO "%s"' % - (old_index_name, new_index_name) + cr.execute( # pylint: disable=E8103 + 'ALTER INDEX IF EXISTS "%s" RENAME TO "%s"' + % (old_index_name, new_index_name) ) @@ -615,104 +659,123 @@ def rename_fields(env, field_spec, no_deep=False): if column_exists(cr, table, old_field): rename_columns(cr, {table: [(old_field, new_field)]}) # Rename corresponding field entry - cr.execute(""" + cr.execute( + """ UPDATE ir_model_fields SET name = %s WHERE name = %s AND model = %s - """, (new_field, old_field, model), + """, + (new_field, old_field, model), ) # Rename translations if version_info[0] < 16: - cr.execute(""" + cr.execute( + """ UPDATE ir_translation SET name = %s WHERE name = %s AND type = 'model' - """, ( + """, + ( "%s,%s" % (model, new_field), "%s,%s" % (model, old_field), ), ) # Rename possible attachments (if field is Binary with attachment=True) if column_exists(cr, "ir_attachment", "res_field"): - cr.execute(""" + cr.execute( + """ UPDATE ir_attachment SET res_field = %s WHERE res_model = %s AND res_field = %s - """, (new_field, model, old_field) + """, + (new_field, model, old_field), ) # Rename appearances on export profiles # TODO: Rename when the field is part of a submodel (ex. m2one.field) - cr.execute(""" + cr.execute( + """ UPDATE ir_exports_line iel SET name = %s FROM ir_exports ie WHERE iel.name = %s AND ie.id = iel.export_id AND ie.resource = %s - """, (new_field, old_field, model), + """, + (new_field, old_field, model), ) # Rename appearances on filters # Example of replaced domain: [['field', '=', self], ...] # TODO: Rename when the field is part of a submodel (ex. m2one.field) - cr.execute(""" + cr.execute( + """ UPDATE ir_filters SET domain = regexp_replace( domain, %(old_pattern)s, %(new_pattern)s, 'g' ) WHERE model_id = %%s AND domain ~ %(old_pattern)s - """ % { - 'old_pattern': r"""$$('|")%s('|")$$""" % old_field, - 'new_pattern': r"$$\1%s\2$$" % new_field, - }, (model, ), + """ + % { + "old_pattern": r"""$$('|")%s('|")$$""" % old_field, + "new_pattern": r"$$\1%s\2$$" % new_field, + }, + (model,), ) # Examples of replaced contexts: # {'group_by': ['field', 'other_field'], 'other_key':value} # {'group_by': ['date_field:month']} # {'other_key': value, 'group_by': ['other_field', 'field']} # {'group_by': ['other_field'],'col_group_by': ['field']} - cr.execute(r""" + cr.execute( + r""" UPDATE ir_filters SET context = regexp_replace( context, %(old_pattern)s, %(new_pattern)s, 'g' ) WHERE model_id = %%s AND context ~ %(old_pattern)s - """ % { - 'old_pattern': ( + """ + % { + "old_pattern": ( r"""$$('group_by'|'col_group_by'|'graph_groupbys' |'pivot_measures'|'pivot_row_groupby' |'pivot_column_groupby' ):([\s*][^\]]*)""" r"'%s(:day|:week|:month|:year){0,1}'(.*?\])$$" - ) % old_field, - 'new_pattern': r"$$\1:\2'%s\3'\4$$" % new_field, - }, (model, ), + ) + % old_field, + "new_pattern": r"$$\1:\2'%s\3'\4$$" % new_field, + }, + (model,), ) # Examples of replaced contexts: # {'graph_measure': 'field' - cr.execute(r""" + cr.execute( + r""" UPDATE ir_filters SET context = regexp_replace( context, %(old_pattern)s, %(new_pattern)s, 'g' ) WHERE model_id = %%s AND context ~ %(old_pattern)s - """ % { - 'old_pattern': ( - r"$$'graph_measure':([\s*])'%s" - r"(:day|:week|:month|:year){0,1}'$$" - ) % old_field, - 'new_pattern': r"$$'graph_measure':\1'%s\2'$$" % new_field, - }, (model, ), + """ + % { + "old_pattern": ( + r"$$'graph_measure':([\s*])'%s" r"(:day|:week|:month|:year){0,1}'$$" + ) + % old_field, + "new_pattern": r"$$'graph_measure':\1'%s\2'$$" % new_field, + }, + (model,), ) # TODO: Rename when the field in ir_ui_view_custom - if table_exists(env.cr, 'mail_alias'): + if table_exists(env.cr, "mail_alias"): # Rename appearances on mail alias - cr.execute(""" + cr.execute( + """ UPDATE mail_alias ma SET alias_defaults = replace(alias_defaults, %(old_pattern)s, %(new_pattern)s) @@ -720,10 +783,12 @@ def rename_fields(env, field_spec, no_deep=False): WHERE ma.alias_model_id = im.id AND im.model = %%s AND ma.alias_defaults ~ %(old_pattern)s - """ % { - 'old_pattern': "$$'%s'$$" % old_field, - 'new_pattern': "$$'%s'$$" % new_field, - }, (model, ), + """ + % { + "old_pattern": "$$'%s'$$" % old_field, + "new_pattern": "$$'%s'$$" % new_field, + }, + (model,), ) @@ -742,15 +807,19 @@ def rename_tables(cr, table_spec): for old, new in list(table_spec): if new is None: new = get_legacy_name(old) - if (table_exists(cr, old + '_id_seq') and - old + '_id_seq' not in to_rename): - table_spec.append((old + '_id_seq', new + '_id_seq')) + if table_exists(cr, old + "_id_seq") and old + "_id_seq" not in to_rename: + table_spec.append((old + "_id_seq", new + "_id_seq")) for (old, new) in table_spec: if new is None: new = get_legacy_name(old) - logger.info("table %s: renaming to %s", - old, new) - cr.execute('ALTER TABLE "%s" RENAME TO "%s"' % (old, new,)) + logger.info("table %s: renaming to %s", old, new) + cr.execute( # pylint: disable=E8103 + 'ALTER TABLE "%s" RENAME TO "%s"' + % ( + old, + new, + ) + ) # Rename indexes old_table_prefix_pattern = r"%s\_%%" % old.replace("_", r"\_") cr.execute( @@ -763,11 +832,12 @@ def rename_tables(cr, table_spec): """, (new, old_table_prefix_pattern), ) - for old_index, in cr.fetchall(): + for (old_index,) in cr.fetchall(): new_index = old_index.replace(old, new, 1) cr.execute( sql.SQL("ALTER INDEX {} RENAME TO {}").format( - sql.Identifier(old_index), sql.Identifier(new_index), + sql.Identifier(old_index), + sql.Identifier(new_index), ) ) # Rename constraints @@ -780,7 +850,7 @@ def rename_tables(cr, table_spec): """, (new, old_table_prefix_pattern), ) - for old_constraint, in cr.fetchall(): + for (old_constraint,) in cr.fetchall(): new_constraint = old_constraint.replace(old, new, 1) cr.execute( sql.SQL("ALTER TABLE {} RENAME CONSTRAINT {} TO {}").format( @@ -804,29 +874,37 @@ def rename_models(cr, model_spec): you need to call `rename_tables` method. """ for (old, new) in model_spec: - logger.info("model %s: renaming to %s", - old, new) - _old = old.replace('.', '_') - _new = new.replace('.', '_') + logger.info("model %s: renaming to %s", old, new) + _old = old.replace(".", "_") + _new = new.replace(".", "_") logged_query( cr, - 'UPDATE ir_model SET model = %s ' - 'WHERE model = %s', (new, old,), + "UPDATE ir_model SET model = %s " "WHERE model = %s", + ( + new, + old, + ), ) logged_query( cr, - 'UPDATE ir_model_data SET model = %s ' - 'WHERE model = %s', (new, old,), + "UPDATE ir_model_data SET model = %s " "WHERE model = %s", + ( + new, + old, + ), ) logged_query( cr, - "UPDATE ir_model_data SET name=%s " - "WHERE name=%s AND model = 'ir.model'", - ('model_' + _new, 'model_' + _old,), + "UPDATE ir_model_data SET name=%s " "WHERE name=%s AND model = 'ir.model'", + ( + "model_" + _new, + "model_" + _old, + ), ) underscore = "_" if version_info[0] < 12 else "__" logged_query( - cr, """UPDATE ir_model_data imd + cr, + """UPDATE ir_model_data imd SET name = 'field_' || '%s' || '%s' || imf.name FROM ir_model_fields imf WHERE imd.model = 'ir.model.fields' @@ -836,13 +914,19 @@ def rename_models(cr, model_spec): ) logged_query( cr, - 'UPDATE ir_attachment SET res_model = %s ' - 'WHERE res_model = %s', (new, old,), + "UPDATE ir_attachment SET res_model = %s " "WHERE res_model = %s", + ( + new, + old, + ), ) logged_query( cr, - 'UPDATE ir_model_fields SET model = %s ' - 'WHERE model = %s', (new, old,), + "UPDATE ir_model_fields SET model = %s " "WHERE model = %s", + ( + new, + old, + ), ) if version_info[0] < 16: logged_query( @@ -850,18 +934,23 @@ def rename_models(cr, model_spec): "UPDATE ir_translation SET " "name=%s || substr(name, strpos(name, ',')) " "WHERE name LIKE %s", - (new, old + ',%'), + (new, old + ",%"), ) logged_query( cr, - "UPDATE ir_filters SET model_id = %s " - "WHERE model_id = %s", (new, old,), + "UPDATE ir_filters SET model_id = %s " "WHERE model_id = %s", + ( + new, + old, + ), ) logged_query( - cr, """ + cr, + """ UPDATE ir_property SET res_id = replace(res_id, %(old_string)s, %(new_string)s) - WHERE res_id like %(old_pattern)s""", { + WHERE res_id like %(old_pattern)s""", + { "old_pattern": "%s,%%" % old, "old_string": "%s," % old, "new_string": "%s," % new, @@ -871,35 +960,43 @@ def rename_models(cr, model_spec): logged_query( cr, "SELECT id FROM ir_model_fields " - "WHERE relation = %s AND ttype = 'many2one'", (old, ), + "WHERE relation = %s AND ttype = 'many2one'", + (old,), ) field_ids = [x[0] for x in cr.fetchall()] logged_query( cr, - 'UPDATE ir_model_fields SET relation = %s ' - 'WHERE relation = %s', (new, old,), + "UPDATE ir_model_fields SET relation = %s " "WHERE relation = %s", + ( + new, + old, + ), ) if field_ids: logged_query( - cr, """ + cr, + """ UPDATE ir_property SET value_reference = replace( value_reference, %(old_string)s, %(new_string)s) - WHERE value_reference like %(old_pattern)s""", { + WHERE value_reference like %(old_pattern)s""", + { "old_pattern": "%s,%%" % old, "old_string": "%s," % old, "new_string": "%s," % new, }, ) # Handle models that reference to this model using reference fields - cr.execute(""" + cr.execute( + """ SELECT model, name FROM ir_model_fields WHERE ttype='reference' - """) + """ + ) rows = cr.fetchall() for row in rows: - table = row[0].replace('.', '_') + table = row[0].replace(".", "_") if not table_exists(cr, table): continue column = row[1] @@ -912,68 +1009,74 @@ def rename_models(cr, model_spec): WHERE {column} LIKE %(old_like)s """ sql_query = sql.SQL(query).format( - table=sql.Identifier(table), - column=sql.Identifier(column) + table=sql.Identifier(table), column=sql.Identifier(column) ) logged_query( cr, sql_query, { - "old": old + ',', - "old_like": old + ',%%', - "new": new + ',', + "old": old + ",", + "old_like": old + ",%%", + "new": new + ",", }, skip_no_result=True, ) # Update export profiles references logged_query( - cr, "UPDATE ir_exports SET resource = %s WHERE resource = %s", + cr, + "UPDATE ir_exports SET resource = %s WHERE resource = %s", (new, old), ) - if column_exists(cr, 'ir_act_server', 'model_name'): + if column_exists(cr, "ir_act_server", "model_name"): # model_name is a related field that in v11 becomes stored logged_query( cr, - 'UPDATE ir_act_server SET model_name = %s ' - 'WHERE model_name = %s', (new, old,), + "UPDATE ir_act_server SET model_name = %s " "WHERE model_name = %s", + ( + new, + old, + ), ) - if is_module_installed(cr, 'email_template'): - if table_exists(cr, 'email_template') and column_exists( - cr, 'email_template', 'model'): + if is_module_installed(cr, "email_template"): + if table_exists(cr, "email_template") and column_exists( + cr, "email_template", "model" + ): logged_query( cr, - 'UPDATE email_template SET model=%s ' - 'where model=%s', (new, old), + "UPDATE email_template SET model=%s " "where model=%s", + (new, old), ) - if is_module_installed(cr, 'mail'): + if is_module_installed(cr, "mail"): # fortunately, the data model didn't change up to now logged_query( cr, - 'UPDATE mail_message SET model=%s where model=%s', (new, old), + "UPDATE mail_message SET model=%s where model=%s", + (new, old), ) - if table_exists(cr, 'mail_message_subtype'): + if table_exists(cr, "mail_message_subtype"): logged_query( cr, - 'UPDATE mail_message_subtype SET res_model=%s ' - 'where res_model=%s', (new, old), + "UPDATE mail_message_subtype SET res_model=%s " + "where res_model=%s", + (new, old), ) - if table_exists(cr, 'mail_template'): + if table_exists(cr, "mail_template"): logged_query( cr, - 'UPDATE mail_template SET model=%s ' - 'where model=%s', (new, old), + "UPDATE mail_template SET model=%s " "where model=%s", + (new, old), ) - if table_exists(cr, 'mail_followers'): + if table_exists(cr, "mail_followers"): logged_query( cr, - 'UPDATE mail_followers SET res_model=%s ' - 'where res_model=%s', (new, old), + "UPDATE mail_followers SET res_model=%s " "where res_model=%s", + (new, old), ) - if table_exists(cr, 'mail_activity'): + if table_exists(cr, "mail_activity"): logged_query( cr, - 'UPDATE mail_activity SET res_model=%s ' - 'where res_model=%s', (new, old), + "UPDATE mail_activity SET res_model=%s " "where res_model=%s", + (new, old), ) # TODO: signal where the model occurs in references to ir_model @@ -991,28 +1094,27 @@ def merge_models(cr, old_model, new_model, ref_field): tables. You should have to do that previously in the migration scripts. """ logger.info("model %s: merging to %s", old_model, new_model) - model_table = new_model.replace('.', '_') + model_table = new_model.replace(".", "_") renames = [ - ('ir_attachment', 'res_model', 'res_id', ''), - ('ir_model_data', 'model', 'res_id', ''), - ('ir_filters', 'model_id', '', ''), - ('ir_exports', 'resource', '', ''), + ("ir_attachment", "res_model", "res_id", ""), + ("ir_model_data", "model", "res_id", ""), + ("ir_filters", "model_id", "", ""), + ("ir_exports", "resource", "", ""), ] - if is_module_installed(cr, 'mail'): + if is_module_installed(cr, "mail"): renames += [ - ('mail_message', 'model', 'res_id', ''), - ('mail_message_subtype', 'res_model', '', ''), - ('mail_activity', 'res_model', 'res_id', 'res_model_id'), - ('mail_template', 'model', '', 'model_id'), - ('mail_alias', '', '', 'alias_model_id'), - ('mail_alias', '', 'alias_parent_thread_id', - 'alias_parent_model_id'), + ("mail_message", "model", "res_id", ""), + ("mail_message_subtype", "res_model", "", ""), + ("mail_activity", "res_model", "res_id", "res_model_id"), + ("mail_template", "model", "", "model_id"), + ("mail_alias", "", "", "alias_model_id"), + ("mail_alias", "", "alias_parent_thread_id", "alias_parent_model_id"), # mail_followers: special case handled below ] if version_info[0] < 15: - renames.append(('mail_activity_type', '', '', 'res_model_id')) + renames.append(("mail_activity_type", "", "", "res_model_id")) else: - renames.append(('mail_activity_type', 'res_model', '', '')) + renames.append(("mail_activity_type", "res_model", "", "")) for (table, model_name_column, res_id_column, model_id_column) in renames: if not table_exists(cr, table): continue @@ -1029,38 +1131,59 @@ def merge_models(cr, old_model, new_model, ref_field): SELECT id FROM ir_model WHERE model = %(model)s""" - cr.execute(sql.SQL(pre_query), { - "model": new_model, - }) + cr.execute( + sql.SQL(pre_query), + { + "model": new_model, + }, + ) new_model_id = cr.fetchone()[0] query_3 = """, {model_id_column} = %(new_model_id)s""" if not model_name_column: - cr.execute(sql.SQL(pre_query), { - "model": old_model, - }) + cr.execute( + sql.SQL(pre_query), + { + "model": old_model, + }, + ) old_model_id = cr.fetchone()[0] query_1a = "" query_1b = """t.{model_id_column} = %(old_model_id)s""" query_3 = """{model_id_column} = %(new_model_id)s""" - query = """ + query = ( + """ UPDATE {table} t - SET """ + query_1a + query_3 + query_2a + """ - WHERE """ + query_1b + query_2b - logged_query(cr, sql.SQL(query).format( - model_table=sql.Identifier(model_table), - table=sql.Identifier(table), - model_name_column=sql.Identifier(model_name_column), - res_id_column=sql.Identifier(res_id_column), - model_id_column=sql.Identifier(model_id_column), - ref_field=sql.Identifier(ref_field) - ), { - "old_model": old_model, - "old_model_id": old_model_id, - "new_model": new_model, - "new_model_id": new_model_id, - }) - if table_exists(cr, 'mail_followers'): - logged_query(cr, sql.SQL(""" + SET """ + + query_1a + + query_3 + + query_2a + + """ + WHERE """ + + query_1b + + query_2b + ) + logged_query( + cr, + sql.SQL(query).format( + model_table=sql.Identifier(model_table), + table=sql.Identifier(table), + model_name_column=sql.Identifier(model_name_column), + res_id_column=sql.Identifier(res_id_column), + model_id_column=sql.Identifier(model_id_column), + ref_field=sql.Identifier(ref_field), + ), + { + "old_model": old_model, + "old_model_id": old_model_id, + "new_model": new_model, + "new_model_id": new_model_id, + }, + ) + if table_exists(cr, "mail_followers"): + logged_query( + cr, + sql.SQL( + """ UPDATE mail_followers mf SET res_model = %(new_model)s, res_id = mt.id FROM {model_table} mt @@ -1072,13 +1195,16 @@ def merge_models(cr, old_model, new_model, ref_field): AND mf2.res_model = %(new_model)s AND mf2.partner_id = mf1.partner_id) WHERE mf.id = mf1.id AND mf2.id IS NULL - """).format( + """ + ).format( model_table=sql.Identifier(model_table), - ref_field=sql.Identifier(ref_field) - ), { + ref_field=sql.Identifier(ref_field), + ), + { "old_model": old_model, "new_model": new_model, - }) + }, + ) def rename_xmlids(cr, xmlids_spec, allow_merge=False): @@ -1098,48 +1224,58 @@ def rename_xmlids(cr, xmlids_spec, allow_merge=False): the applied merge method is by SQL which is incomplete and can lead to inconsistencies in the database. """ - get_data_query = ( - """SELECT res_id, model FROM ir_model_data - WHERE module=%s AND name=%s""") + get_data_query = """SELECT res_id, model FROM ir_model_data + WHERE module=%s AND name=%s""" for (old, new) in xmlids_spec: - if '.' not in old or '.' not in new: + if "." not in old or "." not in new: logger.error( - 'Cannot rename XMLID %s to %s: need the module ' - 'reference to be specified in the IDs' % (old, new)) + "Cannot rename XMLID %s to %s: need the module " + "reference to be specified in the IDs" % (old, new) + ) continue - cr.execute(get_data_query, tuple(old.split('.'))) + cr.execute(get_data_query, tuple(old.split("."))) old_row = cr.fetchone() if not old_row: - logger.info('XMLID %s not found when renaming to %s', old, new) + logger.info("XMLID %s not found when renaming to %s", old, new) continue if allow_merge: - cr.execute(get_data_query, tuple(new.split('.'))) + cr.execute(get_data_query, tuple(new.split("."))) new_row = cr.fetchone() if new_row: logger.info( - 'XMLID %s already exists when renaming from %s: Merging.', - new, old) + "XMLID %s already exists when renaming from %s: Merging.", new, old + ) if new_row[1] != old_row[1]: do_raise( "Cannot merge XMLIDs %s, %s because they don't belong " - "to the same model (%s, %s)" % ( - old, new, old_row[1], new_row[1])) - table = old_row[1].replace('.', '_') + "to the same model (%s, %s)" + % (old, new, old_row[1], new_row[1]) + ) + table = old_row[1].replace(".", "_") if not table_exists(cr, table): do_raise( "Cannot merge XMLIDs %s, %s because the table I " "guessed (%s) based on the model name (%s) does not " - "exist." % (old, new, table, old_row[1])) + "exist." % (old, new, table, old_row[1]) + ) # Cannot import merge_records until after Odoo initialization from .openupgrade_merge_records import merge_records + env = api.Environment(cr, SUPERUSER_ID, {}) merge_records( - env, old_row[1], [old_row[0]], new_row[0], - method="sql", model_table=table) + env, + old_row[1], + [old_row[0]], + new_row[0], + method="sql", + model_table=table, + ) continue - query = ("UPDATE ir_model_data SET module = %s, name = %s " - "WHERE module = %s and name = %s") - logged_query(cr, query, tuple(new.split('.') + old.split('.'))) + query = ( + "UPDATE ir_model_data SET module = %s, name = %s " + "WHERE module = %s and name = %s" + ) + logged_query(cr, query, tuple(new.split(".") + old.split("."))) def add_xmlid(cr, module, xmlid, model, res_id, noupdate=False): @@ -1155,9 +1291,9 @@ def add_xmlid(cr, module, xmlid, model, res_id, noupdate=False): """ # Check if the XMLID doesn't already exists cr.execute( - "SELECT id FROM ir_model_data WHERE module=%s AND name=%s " - "AND model=%s", - (module, xmlid, model)) + "SELECT id FROM ir_model_data WHERE module=%s AND name=%s " "AND model=%s", + (module, xmlid, model), + ) already_exists = cr.fetchone() if already_exists: return False @@ -1212,14 +1348,13 @@ def drop_columns(cr, column_spec): :param column_spec: a list of (table, column) tuples """ for (table, column) in column_spec: - logger.info("table %s: drop column %s", - table, column) + logger.info("table %s: drop column %s", table, column) if column_exists(cr, table, column): - cr.execute('ALTER TABLE "%s" DROP COLUMN "%s"' % - (table, column)) + cr.execute( # pylint: disable=E8103 + 'ALTER TABLE "%s" DROP COLUMN "%s"' % (table, column) + ) else: - logger.warning("table %s: column %s did not exist", - table, column) + logger.warning("table %s: column %s did not exist", table, column) def update_workflow_workitems(cr, pool, ref_spec_actions): @@ -1238,31 +1373,36 @@ def update_workflow_workitems(cr, pool, ref_spec_actions): .. versionadded:: 7.0 """ - workflow_workitems = pool['workflow.workitem'] - ir_model_data_model = pool['ir.model.data'] + workflow_workitems = pool["workflow.workitem"] + ir_model_data_model = pool["ir.model.data"] for (target_external_id, fallback_external_id) in ref_spec_actions: target_activity = ir_model_data_model.get_object( - cr, SUPERUSER_ID, + cr, + SUPERUSER_ID, target_external_id.split(".")[0], target_external_id.split(".")[1], ) fallback_activity = ir_model_data_model.get_object( - cr, SUPERUSER_ID, + cr, + SUPERUSER_ID, fallback_external_id.split(".")[0], fallback_external_id.split(".")[1], ) ids = workflow_workitems.search( - cr, SUPERUSER_ID, [('act_id', '=', target_activity.id)] + cr, SUPERUSER_ID, [("act_id", "=", target_activity.id)] ) if ids: logger.info( "Moving %d items in the removed workflow action (%s) to a " "fallback action (%s): %s", - len(ids), target_activity.name, fallback_activity.name, ids + len(ids), + target_activity.name, + fallback_activity.name, + ids, ) workflow_workitems.write( - cr, SUPERUSER_ID, ids, {'act_id': fallback_activity.id} + cr, SUPERUSER_ID, ids, {"act_id": fallback_activity.id} ) @@ -1303,10 +1443,12 @@ def _index_loop(): for index, table, col in _index_loop(): logged_query( cr, - sql.SQL(""" + sql.SQL( + """ CREATE INDEX IF NOT EXISTS {} ON {} USING BTREE({}) - """).format(index, table, col) + """ + ).format(index, table, col), ) # Delete workflows logged_query( @@ -1316,13 +1458,13 @@ def _index_loop(): " FROM wkf_activity, wkf " " WHERE wkf_id = wkf.id AND " " wkf.osv = %s" - ")", (model,)) - logged_query( - cr, - "DELETE FROM wkf WHERE osv = %s", (model,)) + ")", + (model,), + ) + logged_query(cr, "DELETE FROM wkf WHERE osv = %s", (model,)) # Remove temporary indexes if asked to do so if drop_indexes: - for index, table, col in _index_loop(): + for index, _table, _col in _index_loop(): logged_query(cr, sql.SQL("DROP INDEX {}").format(index)) @@ -1343,18 +1485,21 @@ def warn_possible_dataloss(cr, pool, old_module, fields): .. versionadded:: 7.0 """ - module_obj = pool.get('ir.module.module') + module_obj = pool.get("ir.module.module") for field in fields: module_ids = module_obj.search( - cr, SUPERUSER_ID, [ - ('name', '=', field['new_module']), - ('state', 'in', ['installed', 'to upgrade', 'to install']) - ]) + cr, + SUPERUSER_ID, + [ + ("name", "=", field["new_module"]), + ("state", "in", ["installed", "to upgrade", "to install"]), + ], + ) if not module_ids: - cr.execute( + cr.execute( # pylint: disable=E8103 "SELECT count(*) FROM (SELECT %s from %s group by %s) " - "as tmp" % ( - field['field'], field['table'], field['field'])) + "as tmp" % (field["field"], field["table"], field["field"]) + ) row = cr.fetchone() if row[0] == 1: # not a problem, that field wasn't used. @@ -1363,17 +1508,25 @@ def warn_possible_dataloss(cr, pool, old_module, fields): "Field '%s' from module '%s' was moved to module " "'%s' which is not installed: " "No dataloss detected, only loss of functionality" - % (field['field'], old_module, field['new_module'])) + % (field["field"], old_module, field["new_module"]) + ) else: # there is data loss after the migration. message( - cr, old_module, None, None, + cr, + old_module, + None, + None, "Field '%s' was moved to module " "'%s' which is not installed: " "There were %s distinct values in this field.", - field['field'], field['new_module'], row[0]) + field["field"], + field["new_module"], + row[0], + ) +# flake8: noqa: C901 def set_defaults(cr, pool, default_spec, force=False, use_orm=False): """ Set default value. Useful for fields that are newly required. Uses orm, so @@ -1397,7 +1550,11 @@ def set_defaults(cr, pool, default_spec, force=False, use_orm=False): def write_value(ids, field, value): logger.debug( "model %s, field %s: setting default value of resources %s to %s", - model, field, ids, unicode(value)) + model, + field, + ids, + unicode(value), + ) if use_orm: if version_info[0] <= 7: for res_id in ids: @@ -1410,7 +1567,9 @@ def write_value(ids, field, value): obj.write(cr, SUPERUSER_ID, ids, {field: value}) else: query, params = "UPDATE %s SET %s = %%s WHERE id IN %%s" % ( - obj._table, field), (value, tuple(ids)) + obj._table, + field, + ), (value, tuple(ids)) # handle fields inherited from somewhere else if version_info[0] >= 10: columns = obj._fields @@ -1428,14 +1587,17 @@ def write_value(ids, field, value): nids = [] for sub_ids in cr.split_for_in_conditions(ids): cr.execute( - 'SELECT DISTINCT %s FROM %s WHERE id IN %%s' % ( - col, obj._table), (sub_ids,)) + "SELECT DISTINCT %s FROM %s WHERE id IN %%s" + % (col, obj._table), + (sub_ids,), + ) nids.extend(x for x, in cr.fetchall()) - query, params = "UPDATE %s SET %s = %%s WHERE id IN %%s" %\ - (pool[model_name]._table, field), (value, tuple(nids)) + query, params = "UPDATE %s SET %s = %%s WHERE id IN %%s" % ( + pool[model_name]._table, + field, + ), (value, tuple(nids)) if not query: - do_raise("Can't set default for %s on %s!" % ( - field, obj._name)) + do_raise("Can't set default for %s on %s!" % (field, obj._name)) # cope with really big tables for sub_ids in cr.split_for_in_conditions(params[1]): cr.execute(query, (params[0], sub_ids)) @@ -1444,11 +1606,10 @@ def write_value(ids, field, value): try: obj = pool[model] except KeyError: - do_raise( - "Migration: error setting default, no such model: %s" % model) + do_raise("Migration: error setting default, no such model: %s" % model) for field, value in default_spec[model]: - domain = not force and [(field, '=', False)] or [] + domain = not force and [(field, "=", False)] or [] if api and isinstance(pool, api.Environment): ids = obj.search(domain).ids else: @@ -1460,8 +1621,7 @@ def write_value(ids, field, value): if api and isinstance(pool, api.Environment): value = obj.default_get([field]).get(field) else: - value = obj.default_get( - cr, SUPERUSER_ID, [field]).get(field) + value = obj.default_get(cr, SUPERUSER_ID, [field]).get(field) if value: write_value(ids, field, value) else: @@ -1473,23 +1633,27 @@ def write_value(ids, field, value): write_value(ids, field, obj._defaults[field]) else: cr.execute( - "SELECT id, COALESCE(create_uid, 1) FROM %s " % - obj._table + "WHERE id in %s", (tuple(ids),)) + "SELECT id, COALESCE(create_uid, 1) FROM %s " + % obj._table + + "WHERE id in %s", + (tuple(ids),), + ) # Execute the function once per user_id user_id_map = {} for row in cr.fetchall(): - user_id_map.setdefault(row[1], []).append( - row[0]) + user_id_map.setdefault(row[1], []).append(row[0]) for user_id in user_id_map: write_value( - user_id_map[user_id], field, - obj._defaults[field]( - obj, cr, user_id, None)) + user_id_map[user_id], + field, + obj._defaults[field](obj, cr, user_id, None), + ) else: error = ( "OpenUpgrade: error setting default, field %s " - "with None default value not in %s' _defaults" % ( - field, model)) + "with None default value not in %s' _defaults" + % (field, model) + ) logger.error(error) # this exc. seems to get lost in a higher up try block except_orm("OpenUpgrade", error) @@ -1521,8 +1685,10 @@ def logged_query(cr, query, args=None, skip_no_result=False): raise else: if not skip_no_result or cr.rowcount: - log_msg = ('%(rowcount)d rows affected after ' - '%(duration)s running %(full_query)s') + log_msg = ( + "%(rowcount)d rows affected after " + "%(duration)s running %(full_query)s" + ) finally: duration = datetime.now() - start if log_msg: @@ -1530,11 +1696,15 @@ def logged_query(cr, query, args=None, skip_no_result=False): full_query = tools.ustr(cr._obj.query) except AttributeError: full_query = tools.ustr(cr.mogrify(query, args)) - logger.log(log_level, log_msg, { - "full_query": full_query, - "rowcount": cr.rowcount, - "duration": duration, - }) + logger.log( + log_level, + log_msg, + { + "full_query": full_query, + "rowcount": cr.rowcount, + "duration": duration, + }, + ) return cr.rowcount @@ -1565,29 +1735,32 @@ def update_module_names(cr, namespec, merge_modules=False): else: query = "UPDATE ir_module_module SET name = %s WHERE name = %s" logged_query(cr, query, (new_name, old_name)) - query = ("UPDATE ir_model_data SET name = %s " - "WHERE name = %s AND module = 'base' AND " - "model='ir.module.module' ") - logged_query(cr, query, - ("module_%s" % new_name, "module_%s" % old_name)) + query = ( + "UPDATE ir_model_data SET name = %s " + "WHERE name = %s AND module = 'base' AND " + "model='ir.module.module' " + ) + logged_query(cr, query, ("module_%s" % new_name, "module_%s" % old_name)) # The subselect allows to avoid duplicated XML-IDs - query = ("UPDATE ir_model_data SET module = %s " - "WHERE module = %s AND name NOT IN " - "(SELECT name FROM ir_model_data WHERE module = %s)") + query = ( + "UPDATE ir_model_data SET module = %s " + "WHERE module = %s AND name NOT IN " + "(SELECT name FROM ir_model_data WHERE module = %s)" + ) logged_query(cr, query, (new_name, old_name, new_name)) # Rename the remaining occurrences for let Odoo's update process # to auto-remove related resources - query = ("UPDATE ir_model_data " - "SET name = name || '_openupgrade_' || id, " - "module = %s, noupdate = FALSE " - "WHERE module = %s") + query = ( + "UPDATE ir_model_data " + "SET name = name || '_openupgrade_' || id, " + "module = %s, noupdate = FALSE " + "WHERE module = %s" + ) logged_query(cr, query, (new_name, old_name)) - query = ("UPDATE ir_module_module_dependency SET name = %s " - "WHERE name = %s") + query = "UPDATE ir_module_module_dependency SET name = %s " "WHERE name = %s" logged_query(cr, query, (new_name, old_name)) if version_info[0] > 7 and version_info[0] < 16: - query = ("UPDATE ir_translation SET module = %s " - "WHERE module = %s") + query = "UPDATE ir_translation SET module = %s " "WHERE module = %s" logged_query(cr, query, (new_name, old_name)) if merge_modules: # Conserve old_name's state if new_name is uninstalled @@ -1605,7 +1778,7 @@ def update_module_names(cr, namespec, merge_modules=False): cr, "DELETE FROM ir_model_data WHERE module = 'base' " "AND model='ir.module.module' AND name = %s", - ('module_%s' % old_name,), + ("module_%s" % old_name,), ) @@ -1621,8 +1794,7 @@ def add_ir_model_fields(cr, columnspec): :param columnspec: tuple of (column name, column type) """ for column in columnspec: - query = 'ALTER TABLE ir_model_fields ADD COLUMN %s %s' % ( - column) + query = "ALTER TABLE ir_model_fields ADD COLUMN %s %s" % (column) logged_query(cr, query, []) @@ -1635,8 +1807,12 @@ def get_legacy_name(original_name): :param original_name: the original name of the column :param version: current version as passed to migrate() """ - return 'openupgrade_legacy_' + '_'.join( - map(str, version_info[0:2])) + '_' + original_name + return ( + "openupgrade_legacy_" + + "_".join(map(str, version_info[0:2])) + + "_" + + original_name + ) def m2o_to_x2m(cr, model, table, field, source_field): @@ -1657,10 +1833,14 @@ def m2o_to_x2m(cr, model, table, field, source_field): .. versionadded:: 8.0 """ - columns = getattr(model, '_columns', False) or getattr(model, '_fields') + try: + columns = model._fields + except AttributeError: + columns = model._columns if not columns.get(field): - do_raise("m2o_to_x2m: field %s doesn't exist in model %s" % ( - field, model._name)) + do_raise( + "m2o_to_x2m: field %s doesn't exist in model %s" % (field, model._name) + ) m2m_types = [] if many2many: m2m_types.append(many2many) @@ -1673,9 +1853,9 @@ def m2o_to_x2m(cr, model, table, field, source_field): o2m_types.append(One2many) if isinstance(columns[field], tuple(m2m_types)): column = columns[field] - if hasattr(many2many, '_sql_names'): # >= 6.1 and < 10.0 + if hasattr(many2many, "_sql_names"): # >= 6.1 and < 10.0 rel, id1, id2 = many2many._sql_names(column, model) - elif hasattr(column, 'relation'): # >= 10.0 + elif hasattr(column, "relation"): # >= 10.0 rel, id1, id2 = column.relation, column.column1, column.column2 else: # <= 6.0 rel, id1, id2 = column._rel, column._id1, column._id2 @@ -1686,8 +1866,9 @@ def m2o_to_x2m(cr, model, table, field, source_field): SELECT id, %s FROM %s WHERE %s is not null - """ % - (rel, id1, id2, source_field, table, source_field)) + """ + % (rel, id1, id2, source_field, table, source_field), + ) elif isinstance(columns[field], tuple(o2m_types)): if isinstance(columns[field], One2many): # >= 8.0 target_table = model.env[columns[field].comodel_name]._table @@ -1702,14 +1883,19 @@ def m2o_to_x2m(cr, model, table, field, source_field): SET %(target_field)s=source.id FROM %(source_table)s AS source WHERE source.%(source_field)s=target.id - """ % {'target_table': target_table, - 'target_field': target_field, - 'source_field': source_field, - 'source_table': table}) + """ + % { + "target_table": target_table, + "target_field": target_field, + "source_field": source_field, + "source_table": table, + }, + ) else: do_raise( "m2o_to_x2m: field %s of model %s is not a " - "many2many/one2many one" % (field, model._name)) + "many2many/one2many one" % (field, model._name) + ) # Backwards compatibility @@ -1746,15 +1932,17 @@ def float_to_integer(cr, table, field): cr, "ALTER TABLE %(table)s " "ALTER COLUMN %(field)s " - "TYPE integer" % { - 'table': table, - 'field': field, - }) + "TYPE integer" + % { + "table": table, + "field": field, + }, + ) def map_values( - cr, source_column, target_column, mapping, - model=None, table=None, write='sql'): + cr, source_column, target_column, mapping, model=None, table=None, write="sql" +): """ Map old values to new values within the same model or table. Old values presumably come from a legacy column. @@ -1780,10 +1968,10 @@ def map_values( .. versionadded:: 8.0 """ - if write not in ('sql', 'orm'): + if write not in ("sql", "orm"): logger.exception( - "map_values is called with unknown value for write param: %s", - write) + "map_values is called with unknown value for write param: %s", write + ) if not table: if not model: logger.exception("map_values is called with no table and no model") @@ -1792,46 +1980,54 @@ def map_values( logger.exception( "map_values is called with the same value for source and old" " columns : %s", - source_column) + source_column, + ) for old, new in mapping: new = "'%s'" % new if old is True: - old = 'NOT NULL' - op = 'IS' + old = "NOT NULL" + op = "IS" elif old is False: - old = 'NULL' - op = 'IS' + old = "NULL" + op = "IS" else: old = "'%s'" % old - op = '=' + op = "=" values = { - 'table': table, - 'source': source_column, - 'target': target_column, - 'old': old, - 'new': new, - 'op': op, + "table": table, + "source": source_column, + "target": target_column, + "old": old, + "new": new, + "op": op, } - if write == 'sql': - query = """UPDATE %(table)s + if write == "sql": + query = ( + """UPDATE %(table)s SET %(target)s = %(new)s - WHERE %(source)s %(op)s %(old)s""" % values + WHERE %(source)s %(op)s %(old)s""" + % values + ) else: - query = """SELECT id FROM %(table)s - WHERE %(source)s %(op)s %(old)s""" % values + query = ( + """SELECT id FROM %(table)s + WHERE %(source)s %(op)s %(old)s""" + % values + ) logged_query(cr, query, values) - if write == 'orm': + if write == "orm": model.write( - cr, SUPERUSER_ID, + cr, + SUPERUSER_ID, [row[0] for row in cr.fetchall()], - {target_column: new}) + {target_column: new}, + ) -def message(cr, module, table, column, - message, *args, **kwargs): +def message(cr, module, table, column, message, *args, **kwargs): """ Log handler for non-critical notifications about the upgrade. To be extended with logging to a table for reporting purposes. @@ -1844,15 +2040,15 @@ def message(cr, module, table, column, .. versionadded:: 7.0 """ argslist = list(args or []) - prefix = ': ' + prefix = ": " if column: argslist.insert(0, column) - prefix = ', column %s' + prefix + prefix = ", column %s" + prefix if table: argslist.insert(0, table) - prefix = ', table %s' + prefix + prefix = ", table %s" + prefix argslist.insert(0, module) - prefix = 'Module %s' + prefix + prefix = "Module %s" + prefix logger.warning(prefix + message, *argslist, **kwargs) @@ -1874,30 +2070,34 @@ def deactivate_workflow_transitions(cr, model, transitions=None): """ transition_ids = [] if transitions: - data_obj = registry.get(cr.dbname)['ir.model.data'] + data_obj = registry.get(cr.dbname)["ir.model.data"] for module, name in transitions: try: transition_ids.append( - data_obj.get_object_reference( - cr, SUPERUSER_ID, module, name)[1]) + data_obj.get_object_reference(cr, SUPERUSER_ID, module, name)[1] + ) except ValueError: continue else: cr.execute( - '''select distinct t.id + """select distinct t.id from wkf w join wkf_activity a on a.wkf_id=w.id join wkf_transition t on t.act_from=a.id or t.act_to=a.id - where w.osv=%s''', (model,)) + where w.osv=%s""", + (model,), + ) transition_ids = [i for i, in cr.fetchall()] cr.execute( - 'select id, condition from wkf_transition where id in %s', - (tuple(transition_ids),)) + "select id, condition from wkf_transition where id in %s", + (tuple(transition_ids),), + ) transition_conditions = dict(cr.fetchall()) cr.execute( "update wkf_transition set condition = 'False' WHERE id in %s", - (tuple(transition_ids),)) + (tuple(transition_ids),), + ) return transition_conditions @@ -1915,8 +2115,9 @@ def reactivate_workflow_transitions(cr, transition_conditions): """ for transition_id, condition in transition_conditions.iteritems(): cr.execute( - 'update wkf_transition set condition = %s where id = %s', - (condition, transition_id)) + "update wkf_transition set condition = %s where id = %s", + (condition, transition_id), + ) # Global var to count call quantity to an openupgrade function @@ -1955,8 +2156,8 @@ def migrate(cr, version): migrate_partner(cr, partner) """ - def wrap(func): + def wrap(func): @wraps(func) def wrapped_function(*args, **kwargs): to_log = True @@ -1964,7 +2165,7 @@ def wrapped_function(*args, **kwargs): # Count calls if step: # Compute unique name - unique_name = '%s.%s' % (func.__module__, func.__name__) + unique_name = "%s.%s" % (func.__module__, func.__name__) if unique_name not in openupgrade_call_logging: openupgrade_call_logging[unique_name] = 0 openupgrade_call_logging[unique_name] += 1 @@ -1986,6 +2187,7 @@ def wrapped_function(*args, **kwargs): return func(*args, **kwargs) return wrapped_function + return wrap @@ -2014,32 +2216,30 @@ def migrate(no_version=False, use_env=None, uid=None, context=None): Retrieve debug context data from the frame above for logging purposes. """ - def wrap(func): + def wrap(func): @wraps(func) def wrapped_function(cr, version): - stage = 'unknown' - module = 'unknown' - filename = 'unknown' + stage = "unknown" + module = "unknown" + filename = "unknown" with ExitStack() as contextmanagers: contextmanagers.enter_context(savepoint(cr)) use_env2 = use_env is None and version_info[0] >= 10 or use_env if use_env2: - assert version_info[0] >= 8, 'you need at least v8' + assert version_info[0] >= 8, "you need at least v8" if version_info[0] <= 14: contextmanagers.enter_context(api.Environment.manage()) try: frame = inspect.getargvalues(inspect.stack()[1][0]) - stage = frame.locals['stage'] - module = frame.locals['pkg'].name + stage = frame.locals["stage"] + module = frame.locals["pkg"].name # Python3: fetch pyfile from locals, not fp - filename = frame.locals.get( - 'pyfile') or frame.locals['fp'].name + filename = frame.locals.get("pyfile") or frame.locals["fp"].name except Exception as exc: logger.error( - "'migrate' decorator: failed to inspect " - "the frame above: %s", - exc + "'migrate' decorator: failed to inspect " "the frame above: %s", + exc, ) if not version and not no_version: return @@ -2047,32 +2247,44 @@ def wrapped_function(cr, version): "%s: %s-migration script called with version %s", module, stage, - version + version, ) try: # The actual function is called here func( - api.Environment( - cr, uid or SUPERUSER_ID, context or {}) - if use_env2 else cr, version) + api.Environment(cr, uid or SUPERUSER_ID, context or {}) + if use_env2 + else cr, + version, + ) except Exception as exc: - error_message = \ - repr(exc) if sys.version_info[0] == 2 else str(exc) + error_message = repr(exc) if sys.version_info[0] == 2 else str(exc) logger.error( "%s: error in migration script %s: %s", - module, filename, error_message) + module, + filename, + error_message, + ) logger.exception(exc) raise return wrapped_function + return wrap def move_field_m2o( - cr, pool, - registry_old_model, field_old_model, m2o_field_old_model, - registry_new_model, field_new_model, - quick_request=True, compute_func=None, binary_field=False): + cr, + pool, + registry_old_model, + field_old_model, + m2o_field_old_model, + registry_new_model, + field_new_model, + quick_request=True, + compute_func=None, + binary_field=False, +): """ Use that function in the following case: A field moves from a model A to the model B with : A -> m2o -> B. @@ -2099,7 +2311,8 @@ def move_field_m2o( .. versionadded:: 8.0 """ - def default_func(cr, pool, id, vals): + + def default_func(cr, pool, rec_id, vals): """This function return the value the most present in vals.""" quantity = {}.fromkeys(set(vals), 0) for val in vals: @@ -2110,20 +2323,21 @@ def default_func(cr, pool, id, vals): res = val return res - logger.info("Moving data from '%s'.'%s' to '%s'.'%s'" % ( - registry_old_model, field_old_model, - registry_new_model, field_new_model)) + logger.info( + "Moving data from '%s'.'%s' to '%s'.'%s'" + % (registry_old_model, field_old_model, registry_new_model, field_new_model) + ) table_old_model = pool[registry_old_model]._table table_new_model = pool[registry_new_model]._table # Manage regular case (all the value are identical) - cr.execute( + cr.execute( # pylint: disable=E8103 " SELECT %s" " FROM %s" " GROUP BY %s" - " HAVING count(*) = 1;" % ( - m2o_field_old_model, table_old_model, m2o_field_old_model - )) + " HAVING count(*) = 1;" + % (m2o_field_old_model, table_old_model, m2o_field_old_model) + ) ok_ids = [x[0] for x in cr.fetchall()] if quick_request: query = ( @@ -2133,45 +2347,59 @@ def default_func(cr, pool, id, vals): " FROM %s as old_table" " WHERE old_table.%s=new_table.id" " LIMIT 1) " - " WHERE id in %%s" % ( - table_new_model, field_new_model, field_old_model, - table_old_model, m2o_field_old_model)) + " WHERE id in %%s" + % ( + table_new_model, + field_new_model, + field_old_model, + table_old_model, + m2o_field_old_model, + ) + ) logged_query(cr, query, [tuple(ok_ids)]) else: query = ( " SELECT %s, %s" " FROM %s " " WHERE %s in %%s" - " GROUP BY %s, %s" % ( - m2o_field_old_model, field_old_model, table_old_model, - m2o_field_old_model, m2o_field_old_model, field_old_model)) + " GROUP BY %s, %s" + % ( + m2o_field_old_model, + field_old_model, + table_old_model, + m2o_field_old_model, + m2o_field_old_model, + field_old_model, + ) + ) cr.execute(query, [tuple(ok_ids)]) for res in cr.fetchall(): if res[1] and binary_field: pool[registry_new_model].write( - cr, SUPERUSER_ID, res[0], - {field_new_model: res[1][:]}) + cr, SUPERUSER_ID, res[0], {field_new_model: res[1][:]} + ) else: pool[registry_new_model].write( - cr, SUPERUSER_ID, res[0], - {field_new_model: res[1]}) + cr, SUPERUSER_ID, res[0], {field_new_model: res[1]} + ) # Manage non-determinist case (some values are different) func = compute_func if compute_func else default_func - cr.execute( + cr.execute( # pylint: disable=E8103 " SELECT %s " " FROM %s " - " GROUP BY %s having count(*) != 1;" % ( - m2o_field_old_model, table_old_model, m2o_field_old_model - )) + " GROUP BY %s having count(*) != 1;" + % (m2o_field_old_model, table_old_model, m2o_field_old_model) + ) ko_ids = [x[0] for x in cr.fetchall()] for ko_id in ko_ids: query = ( " SELECT %s" " FROM %s" - " WHERE %s = %s;" % ( - field_old_model, table_old_model, m2o_field_old_model, ko_id)) - cr.execute(query) + " WHERE %s = %s;" + % (field_old_model, table_old_model, m2o_field_old_model, ko_id) + ) + cr.execute(query) # pylint: disable=E8103 if binary_field: vals = [str(x[0][:]) if x[0] else False for x in cr.fetchall()] else: @@ -2181,36 +2409,37 @@ def default_func(cr, pool, id, vals): query = ( " UPDATE %s" " SET %s=%%s" - " WHERE id = %%s" % (table_new_model, field_new_model)) - logged_query( - cr, query, (value, ko_id)) + " WHERE id = %%s" % (table_new_model, field_new_model) + ) + logged_query(cr, query, (value, ko_id)) else: pool[registry_new_model].write( - cr, SUPERUSER_ID, [ko_id], - {field_new_model: value}) + cr, SUPERUSER_ID, [ko_id], {field_new_model: value} + ) -def convert_field_to_html( - cr, table, field_name, html_field_name, verbose=True): +def convert_field_to_html(cr, table, field_name, html_field_name, verbose=True): """ Convert field value to HTML value. .. versionadded:: 7.0 """ if version_info[0] < 7: - logger.error("You cannot use this method in an OpenUpgrade version " - "prior to 7.0.") + logger.error( + "You cannot use this method in an OpenUpgrade version " "prior to 7.0." + ) return - cr.execute( - "SELECT id, %(field)s FROM %(table)s WHERE %(field)s IS NOT NULL" % { - 'field': field_name, - 'table': table, + cr.execute( # pylint: disable=E8103 + "SELECT id, %(field)s FROM %(table)s WHERE %(field)s IS NOT NULL" + % { + "field": field_name, + "table": table, } ) for row in cr.fetchall(): query = "UPDATE %(table)s SET %(field)s = %%s WHERE id = %%s" % { - 'field': html_field_name, - 'table': table, + "field": html_field_name, + "table": table, } if verbose: logged_query(cr, query, (plaintext2html(row[1]), row[0])) @@ -2219,7 +2448,8 @@ def convert_field_to_html( def date_to_datetime_tz( - cr, table_name, user_field_name, date_field_name, datetime_field_name): + cr, table_name, user_field_name, date_field_name, datetime_field_name +): """ Take the related user's timezone into account when converting date field to datetime in a given table. This function must be call in post migration script. @@ -2232,22 +2462,27 @@ def date_to_datetime_tz( .. versionadded:: 8.0 """ - cr.execute( + cr.execute( # pylint: disable=E8103 """ SELECT distinct(rp.tz) FROM %s my_table, res_users ru, res_partner rp WHERE rp.tz IS NOT NULL AND my_table.%s=ru.id AND ru.partner_id=rp.id - """ % (table_name, user_field_name,)) - for timezone, in cr.fetchall(): + """ + % ( + table_name, + user_field_name, + ) + ) + for (timezone,) in cr.fetchall(): cr.execute("SET TIMEZONE=%s", (timezone,)) values = { - 'table_name': table_name, - 'date_field_name': date_field_name, - 'datetime_field_name': datetime_field_name, - 'timezone': timezone, - 'user_field_name': user_field_name, + "table_name": table_name, + "date_field_name": date_field_name, + "datetime_field_name": datetime_field_name, + "timezone": timezone, + "user_field_name": user_field_name, } logged_query( cr, @@ -2260,18 +2495,22 @@ def date_to_datetime_tz( AND my_table.%(user_field_name)s=ru.id AND ru.partner_id=rp.id AND rp.tz='%(timezone)s'; - """ % values) + """ + % values, + ) cr.execute("RESET TIMEZONE") def is_module_installed(cr, module): - """ Check if `module` is installed. + """Check if `module` is installed. :return: True / False """ cr.execute( "SELECT id FROM ir_module_module " - "WHERE name=%s and state IN ('installed', 'to upgrade')", (module,)) + "WHERE name=%s and state IN ('installed', 'to upgrade')", + (module,), + ) return bool(cr.fetchone()) @@ -2281,46 +2520,47 @@ def lift_constraints(cr, table, column): for many2one fields with changed target objects. If everything went right, the constraints will be recreated""" cr.execute( - 'select relname, array_agg(conname) from ' - '(select t1.relname, c.conname ' - 'from pg_constraint c ' - 'join pg_attribute a ' - 'on c.confrelid=a.attrelid and a.attnum=any(c.conkey) ' - 'join pg_class t on t.oid=a.attrelid ' - 'join pg_class t1 on t1.oid=c.conrelid ' - 'where t.relname=%(table)s and attname=%(column)s ' - 'union select t.relname, c.conname ' - 'from pg_constraint c ' - 'join pg_attribute a ' - 'on c.conrelid=a.attrelid and a.attnum=any(c.conkey) ' - 'join pg_class t on t.oid=a.attrelid ' - 'where relname=%(table)s and attname=%(column)s) in_out ' - 'group by relname', + "select relname, array_agg(conname) from " + "(select t1.relname, c.conname " + "from pg_constraint c " + "join pg_attribute a " + "on c.confrelid=a.attrelid and a.attnum=any(c.conkey) " + "join pg_class t on t.oid=a.attrelid " + "join pg_class t1 on t1.oid=c.conrelid " + "where t.relname=%(table)s and attname=%(column)s " + "union select t.relname, c.conname " + "from pg_constraint c " + "join pg_attribute a " + "on c.conrelid=a.attrelid and a.attnum=any(c.conkey) " + "join pg_class t on t.oid=a.attrelid " + "where relname=%(table)s and attname=%(column)s) in_out " + "group by relname", { - 'table': table, - 'column': column, - }) + "table": table, + "column": column, + }, + ) for table, constraints in cr.fetchall(): cr.execute( - 'alter table %s drop constraint %s', - (AsIs(table), AsIs(', drop constraint '.join(constraints))) + "alter table %s drop constraint %s", + (AsIs(table), AsIs(", drop constraint ".join(constraints))), ) @contextmanager def savepoint(cr): """return a context manager wrapping postgres savepoints""" - if hasattr(cr, 'savepoint'): + if hasattr(cr, "savepoint"): with cr.savepoint(): yield else: name = uuid.uuid1().hex - cr.execute('SAVEPOINT "%s"' % name) + cr.execute('SAVEPOINT "%s"' % name) # pylint: disable=E8103 try: yield - cr.execute('RELEASE SAVEPOINT "%s"' % name) + cr.execute('RELEASE SAVEPOINT "%s"' % name) # pylint: disable=E8103 except Exception: - cr.execute('ROLLBACK TO SAVEPOINT "%s"' % name) + cr.execute('ROLLBACK TO SAVEPOINT "%s"' % name) # pylint: disable=E8103 raise @@ -2332,15 +2572,17 @@ def rename_property(cr, model, old_name, new_name): "from ir_model m " "where m.id=f.model_id and m.model=%s and f.name=%s " "returning f.id", - (new_name, model, old_name)) + (new_name, model, old_name), + ) field_ids = tuple(i for i, in cr.fetchall()) cr.execute( "update ir_model_data set name=%s where model='ir.model.fields' and " "res_id in %s", - ('%s,%s' % (model, new_name), field_ids)) + ("%s,%s" % (model, new_name), field_ids), + ) cr.execute( - "update ir_property set name=%s where fields_id in %s", - (new_name, field_ids)) + "update ir_property set name=%s where fields_id in %s", (new_name, field_ids) + ) def delete_record_translations(cr, module, xml_ids): @@ -2352,19 +2594,34 @@ def delete_record_translations(cr, module, xml_ids): if not isinstance(xml_ids, (list, tuple)): do_raise("XML IDs %s must be a tuple or list!" % (xml_ids)) - cr.execute(""" + cr.execute( + """ SELECT model, res_id FROM ir_model_data WHERE module = %s AND name in %s - """, (module, tuple(xml_ids),)) + """, + ( + module, + tuple(xml_ids), + ), + ) for row in cr.fetchall(): - query = (""" + query = """ DELETE FROM ir_translation WHERE module = %s AND name LIKE %s AND res_id = %s; - """) - logged_query(cr, query, (module, row[0] + ',%', row[1],)) + """ + logged_query( + cr, + query, + ( + module, + row[0] + ",%", + row[1], + ), + ) +# flake8: noqa: C901 def disable_invalid_filters(env): """It analyzes all the existing active filters to check if they are still correct. If not, they are disabled for avoiding errors when clicking on @@ -2378,13 +2635,16 @@ def disable_invalid_filters(env): if target_version and not is_target_version: logger.info( "Deferring `disable_invalid_filters` until this migration reaches " - "target version %s", target_version) + "target version %s", + target_version, + ) return try: from odoo.tools.safe_eval import safe_eval except ImportError: from openerp.tools.safe_eval import safe_eval import time + try: basestring # noqa: F823 except NameError: # For Python 3 compatibility @@ -2399,44 +2659,49 @@ def format_message(f): msg += " for model '%s' has been disabled " % f.model_id return msg - filters = env['ir.filters'].search([('domain', '!=', '[]')]) + filters = env["ir.filters"].search([("domain", "!=", "[]")]) for f in filters: if f.model_id not in env: continue # Obsolete or invalid model model = env[f.model_id] - columns = ( - getattr(model, '_columns', False) or getattr(model, '_fields') - ) - - globaldict = {'uid': env.uid} + try: + columns = model._fields + except AttributeError: + columns = model._columns + globaldict = {"uid": env.uid} if version_info[0] < 14: - globaldict.update({'time': time}) + globaldict.update({"time": time}) if version_info[0] >= 13: try: - from odoo.tools.safe_eval import datetime as safe_eval_datetime - from odoo.tools.safe_eval import dateutil + from odoo.tools.safe_eval import ( + datetime as safe_eval_datetime, + dateutil, + ) except ImportError: import datetime as safe_eval_datetime + import dateutil - globaldict.update({ - 'datetime': safe_eval_datetime, - 'context_today': safe_eval_datetime.datetime.now, - 'relativedelta': dateutil.relativedelta.relativedelta, - }) + globaldict.update( + { + "datetime": safe_eval_datetime, + "context_today": safe_eval_datetime.datetime.now, + "relativedelta": dateutil.relativedelta.relativedelta, + } + ) # DOMAIN try: with savepoint(env.cr): # Strange artifact found in a filter - domain = f.domain.replace('%%', '%') + domain = f.domain.replace("%%", "%") model.search( safe_eval(domain, globaldict), limit=1, ) except Exception as e: logger.warning( - format_message(f) + - "as it contains an invalid domain %s. Detail: %s", - f.domain, e + format_message(f) + "as it contains an invalid domain %s. Detail: %s", + f.domain, + e, ) f.active = False continue @@ -2446,13 +2711,13 @@ def format_message(f): assert isinstance(context, dict) except Exception as e: logger.warning( - format_message(f) + - "as it contains an invalid context %s. Detail: %s", - f.context, e + format_message(f) + "as it contains an invalid context %s. Detail: %s", + f.context, + e, ) f.active = False continue - keys = ['group_by', 'col_group_by'] + keys = ["group_by", "col_group_by"] for key in keys: if not context.get(key): continue @@ -2462,11 +2727,10 @@ def format_message(f): if isinstance(g, basestring): g = [g] for field_expr in g: - field = field_expr.split(':')[0] # Remove date specifiers + field = field_expr.split(":")[0] # Remove date specifiers if not columns.get(field): logger.warning( - format_message(f) + - "as it contains an invalid %s." % key + format_message(f) + "as it contains an invalid %s." % key ) f.active = False break @@ -2506,23 +2770,23 @@ def add_fields(env, field_spec): in the column for existing records. """ sql_type_mapping = { - 'binary': 'bytea', # If there's attachment, no SQL. Force it manually - 'boolean': 'bool', - 'char': 'varchar', # Force it manually if there's size limit - 'date': 'date', - 'datetime': 'timestamp', - 'float': 'numeric', # Force manually to double precision if no digits - 'html': 'text', - 'integer': 'int4', - 'many2many': False, # No need to create SQL column - 'many2one': 'int4', - 'many2one_reference': 'int4', - 'monetary': 'numeric', - 'one2many': False, # No need to create SQL column - 'reference': 'varchar', - 'selection': 'varchar', # Can be sometimes integer. Force it manually - 'text': 'text', - 'serialized': 'text', + "binary": "bytea", # If there's attachment, no SQL. Force it manually + "boolean": "bool", + "char": "varchar", # Force it manually if there's size limit + "date": "date", + "datetime": "timestamp", + "float": "numeric", # Force manually to double precision if no digits + "html": "text", + "integer": "int4", + "many2many": False, # No need to create SQL column + "many2one": "int4", + "many2one_reference": "int4", + "monetary": "numeric", + "one2many": False, # No need to create SQL column + "reference": "varchar", + "selection": "varchar", # Can be sometimes integer. Force it manually + "text": "text", + "serialized": "text", } for vals in field_spec: field_name = vals[0] @@ -2549,24 +2813,25 @@ def add_fields(env, field_spec): args.append(init_value) logged_query(env.cr, query, args) if init_value: - logged_query(env.cr, sql.SQL( - "ALTER TABLE {} ALTER COLUMN {} DROP DEFAULT").format( + logged_query( + env.cr, + sql.SQL("ALTER TABLE {} ALTER COLUMN {} DROP DEFAULT").format( sql.Identifier(table_name), sql.Identifier(field_name), - ) + ), ) # Add ir.model.fields entry env.cr.execute( - "SELECT id FROM ir_model WHERE model = %s", (model_name, ), + "SELECT id FROM ir_model WHERE model = %s", + (model_name,), ) row = env.cr.fetchone() if not row: continue model_id = row[0] env.cr.execute( - "SELECT id FROM ir_model_fields " - "WHERE model_id = %s AND name = %s", - (model_id, field_name) + "SELECT id FROM ir_model_fields " "WHERE model_id = %s AND name = %s", + (model_id, field_name), ) row = env.cr.fetchone() field_id = row and row[0] or False @@ -2574,8 +2839,10 @@ def add_fields(env, field_spec): logger.warning( "add_fields: There's already an entry for %s in %s. This may " "mean that there's some misconfiguration, or simply that " - "another module added the same field previously." % ( - field_name, model_name, + "another module added the same field previously." + % ( + field_name, + model_name, ) ) else: @@ -2615,19 +2882,21 @@ def add_fields(env, field_spec): # Add ir.model.data entry if not module or version_info[0] >= 12: continue - name1 = 'field_%s_%s' % (model_name.replace('.', '_'), field_name) + name1 = "field_%s_%s" % (model_name.replace(".", "_"), field_name) try: with env.cr.savepoint(): logged_query( - env.cr, """ + env.cr, + """ INSERT INTO ir_model_data ( name, date_init, date_update, module, model, res_id ) VALUES ( %s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s - )""", (name1, module, 'ir.model.fields', field_id), + )""", + (name1, module, "ir.model.fields", field_id), ) - except IntegrityError: + except IntegrityError: # pylint: disable=W7938 # Do not fail if already present pass @@ -2649,15 +2918,14 @@ def update_field_multilang(records, field, method): installed_langs = [(records.env.lang or "en_US", "English")] if records._fields[field].translate: installed_langs = records.env["res.lang"].get_installed() - for lang_code, lang_name in installed_langs: + for lang_code, _lang_name in installed_langs: for record in records.with_context(lang=lang_code): new_value = method(record[field], lang_code, record) if record[field] != new_value: record[field] = new_value -def update_module_moved_fields( - cr, model, moved_fields, old_module, new_module): +def update_module_moved_fields(cr, model, moved_fields, old_module, new_module): """Update module for field definition in general tables that have been moved from one module to another. @@ -2676,17 +2944,21 @@ def update_module_moved_fields( do_raise("moved_fields %s must be a tuple or list!" % moved_fields) logger.info( "Moving fields %s in model %s from module '%s' to module '%s'", - ', '.join(moved_fields), model, old_module, new_module, + ", ".join(moved_fields), + model, + old_module, + new_module, ) vals = { - 'new_module': new_module, - 'old_module': old_module, - 'model': model, - 'fields': tuple(moved_fields), + "new_module": new_module, + "old_module": old_module, + "model": model, + "fields": tuple(moved_fields), } # update xml-id entries logged_query( - cr, """ + cr, + """ UPDATE ir_model_data imd SET module = %(new_module)s FROM ir_model_fields imf @@ -2699,14 +2971,16 @@ def update_module_moved_fields( imd.id NOT IN ( SELECT id FROM ir_model_data WHERE module = %(new_module)s ) - """, vals, + """, + vals, ) # update ir_translation - it covers both <=v8 through type='field' and # >=v9 through type='model' + name if version_info[0] > 15: return logged_query( - cr, """ + cr, + """ UPDATE ir_translation it SET module = %(new_module)s FROM ir_model_fields imf @@ -2720,7 +2994,8 @@ def update_module_moved_fields( ) OR ( it.type = 'field' )) - """, vals, + """, + vals, ) @@ -2732,15 +3007,13 @@ def update_module_moved_models(cr, model, old_module, new_module): :param old_module: Previous module of the models :param new_module: New module of the models """ - table = model.replace('.', '_') + table = model.replace(".", "_") logger.info( - "Moving model %s from module '%s' to module '%s'", - model, old_module, new_module + "Moving model %s from module '%s' to module '%s'", model, old_module, new_module ) logged_query( cr, - 'UPDATE ir_model_data SET module=%s ' - 'WHERE model = %s AND module = %s', + "UPDATE ir_model_data SET module=%s " "WHERE model = %s AND module = %s", (new_module, model, old_module), ) logged_query( @@ -2770,7 +3043,8 @@ def update_module_moved_models(cr, model, old_module, new_module): ) underscore = "_" if version_info[0] < 12 else "__" logged_query( - cr, """UPDATE ir_model_data imd + cr, + """UPDATE ir_model_data imd SET module = %s FROM ir_model_fields imf WHERE imd.model = 'ir.model.fields' @@ -2782,9 +3056,8 @@ def update_module_moved_models(cr, model, old_module, new_module): return logged_query( cr, - "UPDATE ir_translation SET module=%s " - "WHERE name LIKE %s AND module = %s", - (new_module, model + ',%', old_module), + "UPDATE ir_translation SET module=%s " "WHERE name LIKE %s AND module = %s", + (new_module, model + ",%", old_module), ) @@ -2808,8 +3081,7 @@ def safe_unlink(records, do_raise=False): except Exception as e: if do_raise: raise - logger.info("Error deleting %s#%s: %s", - record._name, record.id, repr(e)) + logger.info("Error deleting %s#%s: %s", record._name, record.id, repr(e)) def delete_records_safely_by_xml_id(env, xml_ids, delete_childs=False): @@ -2832,7 +3104,7 @@ def delete_records_safely_by_xml_id(env, xml_ids, delete_childs=False): if version_info[0] > 8: errors = errors + (UserError,) for xml_id in xml_ids: - logger.debug('Deleting record for XML-ID %s', xml_id) + logger.debug("Deleting record for XML-ID %s", xml_id) try: # This can raise an environment KeyError if the model is not loaded record = env.ref(xml_id, raise_if_not_found=False) @@ -2840,13 +3112,14 @@ def delete_records_safely_by_xml_id(env, xml_ids, delete_childs=False): continue if delete_childs: child_and_parent_records = env["ir.ui.view"].search( - [("inherit_id", "child_of", record.id)], order="id desc") + [("inherit_id", "child_of", record.id)], order="id desc" + ) safe_unlink(child_and_parent_records, do_raise=True) else: safe_unlink(record, do_raise=True) except errors as e: - logger.info('Error deleting XML-ID %s: %s', xml_id, repr(e)) - module, name = xml_id.split('.') + logger.info("Error deleting XML-ID %s: %s", xml_id, repr(e)) + module, name = xml_id.split(".") imd = env["ir.model.data"].search( [("module", "=", module), ("name", "=", name)] ) @@ -2875,21 +3148,21 @@ def delete_sql_constraint_safely(env, module, table, name): def chunked(records, single=True): - """ Memory and performance friendly method to iterate over a potentially + """Memory and performance friendly method to iterate over a potentially large number of records. Yields either a whole chunk or a single record - at the time. Don't nest calls to this method. """ + at the time. Don't nest calls to this method.""" if version_info[0] > 10: invalidate = records.env.cache.invalidate elif version_info[0] > 7: invalidate = records.env.invalidate_all else: - raise Exception('Not supported Odoo version for this method.') + raise Exception("Not supported Odoo version for this method.") size = core.models.PREFETCH_MAX model = records._name ids = records.with_context(prefetch_fields=False).ids for i in range(0, len(ids), size): invalidate() - chunk = records.env[model].browse(ids[i:i + size]) + chunk = records.env[model].browse(ids[i : i + size]) if single: for record in chunk: yield record @@ -2907,11 +3180,19 @@ def set_xml_ids_noupdate_value(env, module, xml_ids, value): if not isinstance(xml_ids, (list, tuple)): do_raise("XML IDs %s must be a tuple or list!" % xml_ids) - logged_query(env.cr, """ + logged_query( + env.cr, + """ UPDATE ir_model_data SET noupdate = %s WHERE module = %s AND name in %s - """, (value, module, tuple(xml_ids),)) + """, + ( + value, + module, + tuple(xml_ids), + ), + ) def convert_to_company_dependent( @@ -2921,7 +3202,7 @@ def convert_to_company_dependent( destination_field_name, model_table_name=None, ): - """ For each row in a given table, the value of a given field is + """For each row in a given table, the value of a given field is set in another 'company dependant' field of the same table. Useful in cases when from one version to another one, some field in a model becomes a 'company dependent' field. @@ -2939,51 +3220,58 @@ def convert_to_company_dependent( the table name is taken from the model (so the model must be registered previously). """ - logger.debug("Converting {} in {} to company_dependent field {}.".format( - origin_field_name, model_name, destination_field_name)) + logger.debug( + "Converting {} in {} to company_dependent field {}.".format( + origin_field_name, model_name, destination_field_name + ) + ) if origin_field_name == destination_field_name: - do_raise("A field can't be converted to property without changing " - "its name.") + do_raise("A field can't be converted to property without changing " "its name.") cr = env.cr mapping_type2field = { - 'char': 'value_text', - 'float': 'value_float', - 'boolean': 'value_integer', - 'integer': 'value_integer', - 'text': 'value_text', - 'binary': 'value_binary', - 'many2one': 'value_reference', - 'date': 'value_datetime', - 'datetime': 'value_datetime', - 'selection': 'value_text', + "char": "value_text", + "float": "value_float", + "boolean": "value_integer", + "integer": "value_integer", + "text": "value_text", + "binary": "value_binary", + "many2one": "value_reference", + "date": "value_datetime", + "datetime": "value_datetime", + "selection": "value_text", } # Determine field id, field type and the model name of the relation # in case of many2one. - cr.execute("SELECT id, relation, ttype FROM ir_model_fields " - "WHERE name=%s AND model=%s", - (destination_field_name, model_name)) + cr.execute( + "SELECT id, relation, ttype FROM ir_model_fields " "WHERE name=%s AND model=%s", + (destination_field_name, model_name), + ) destination_field_id, relation, d_field_type = cr.fetchone() value_field_name = mapping_type2field.get(d_field_type) field_select = sql.Identifier(origin_field_name) args = { - 'model_name': model_name, - 'fields_id': destination_field_id, - 'name': destination_field_name, - 'type': d_field_type, + "model_name": model_name, + "fields_id": destination_field_id, + "name": destination_field_name, + "type": d_field_type, } - if d_field_type == 'many2one': + if d_field_type == "many2one": field_select = sql.SQL("%(relation)s || ',' || {}::TEXT").format( - sql.Identifier(origin_field_name)) - args['relation'] = relation - elif d_field_type == 'boolean': + sql.Identifier(origin_field_name) + ) + args["relation"] = relation + elif d_field_type == "boolean": field_select = sql.SQL("CASE WHEN {} = true THEN 1 ELSE 0 END").format( - sql.Identifier(origin_field_name)) + sql.Identifier(origin_field_name) + ) cr.execute("SELECT id FROM res_company") company_ids = [x[0] for x in cr.fetchall()] for company_id in company_ids: - args['company_id'] = company_id + args["company_id"] = company_id logged_query( - cr, sql.SQL(""" + cr, + sql.SQL( + """ INSERT INTO ir_property ( fields_id, company_id, res_id, name, type, {value_field_name} ) @@ -2992,14 +3280,14 @@ def convert_to_company_dependent( %(model_name)s || ',' || id::TEXT, %(name)s, %(type)s, {field_select} FROM {table_name} WHERE {origin_field_name} IS NOT NULL; - """).format( + """ + ).format( value_field_name=sql.Identifier(value_field_name), field_select=field_select, origin_field_name=sql.Identifier(origin_field_name), - table_name=sql.Identifier( - model_table_name or env[model_name]._table - ) - ), args, + table_name=sql.Identifier(model_table_name or env[model_name]._table), + ), + args, ) @@ -3020,14 +3308,18 @@ def cow_templates_mark_if_equal_to_upstream(cr, mark_colname=None): if not column_exists(cr, "ir_ui_view", mark_colname): logged_query( cr, - sql.SQL("ALTER TABLE ir_ui_view ADD COLUMN {} BOOLEAN") - .format(mark_identifier)) + sql.SQL("ALTER TABLE ir_ui_view ADD COLUMN {} BOOLEAN").format( + mark_identifier + ), + ) # Map all qweb views - cr.execute(""" + cr.execute( + """ SELECT id, arch_db, key, website_id FROM ir_ui_view WHERE type = 'qweb' AND key IS NOT NULL - """) + """ + ) views_map = {} for id_, arch_db, key, website_id in cr.fetchall(): views_map[(key, website_id)] = (id_, arch_db) @@ -3052,8 +3344,9 @@ def cow_templates_mark_if_equal_to_upstream(cr, mark_colname=None): # Mark equal views logged_query( cr, - sql.SQL("UPDATE ir_ui_view SET {} = TRUE WHERE id = ANY(%s)") - .format(mark_identifier), + sql.SQL("UPDATE ir_ui_view SET {} = TRUE WHERE id = ANY(%s)").format( + mark_identifier + ), (equal,), ) @@ -3074,7 +3367,8 @@ def cow_templates_replicate_upstream(cr, mark_colname=None): mark_identifier = sql.Identifier(mark_colname) logged_query( cr, - sql.SQL(""" + sql.SQL( + """ UPDATE ir_ui_view AS specific SET arch_db = generic.arch_db FROM ir_ui_view AS generic @@ -3085,6 +3379,6 @@ def cow_templates_replicate_upstream(cr, mark_colname=None): specific.key = generic.key AND specific.type = 'qweb' AND generic.type = 'qweb' - """) - .format(mark_identifier), + """ + ).format(mark_identifier), ) diff --git a/openupgradelib/openupgrade_120.py b/openupgradelib/openupgrade_120.py index 0daaf33c..23a4baba 100644 --- a/openupgradelib/openupgrade_120.py +++ b/openupgradelib/openupgrade_120.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # pylint: disable=C8202 # Copyright 2019 Tecnativa - Jairo Llopis # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). @@ -15,7 +16,9 @@ Don't expect perfection. But patches are welcome. """ +import logging from itertools import product + from psycopg2.extensions import AsIs from .openupgrade import update_field_multilang @@ -23,9 +26,8 @@ convert_html_fragment, convert_html_replacement_class_shortcut as _r, ) -import logging -logger = logging.getLogger('OpenUpgrade') +logger = logging.getLogger("OpenUpgrade") logger.setLevel(logging.DEBUG) _COLS = range(1, 13) @@ -52,40 +54,43 @@ # These replacements are from standard Bootstrap 3 to 4 _BS4_REPLACEMENTS = ( # Convert columns and modifiers among tiers - *(_r("col%s-%s" % (t3, col), "col%s-%s" % (t4, col)) - for (t3, t4), col in product(_TIERS, _COLS)), - *(_r("col%s-offset-%s" % (t3, col), "offset%s-%s" % (t4, col)) - for (t3, t4), col in product(_TIERS, _COLS)), - *(_r("col%s-pull-%s" % (t3, col), "order-first") - for (t3, t4), col in product(_TIERS, _COLS)), - *(_r("col%s-push-%s" % (t3, col), "order-last") - for (t3, t4), col in product(_TIERS, _COLS)), - + *( + _r("col%s-%s" % (t3, col), "col%s-%s" % (t4, col)) + for (t3, t4), col in product(_TIERS, _COLS) + ), + *( + _r("col%s-offset-%s" % (t3, col), "offset%s-%s" % (t4, col)) + for (t3, t4), col in product(_TIERS, _COLS) + ), + *( + _r("col%s-pull-%s" % (t3, col), "order-first") + for (t3, t4), col in product(_TIERS, _COLS) + ), + *( + _r("col%s-push-%s" % (t3, col), "order-last") + for (t3, t4), col in product(_TIERS, _COLS) + ), # Typography _r(selector="blockquote", class_add="blockquote"), _r(selector="blockquote > small", class_add="blockquote-footer"), _r("blockquote-reverse", "blockquote text-right"), _r(selector=".list-inline > li", class_add="list-linline-item"), - # .page-header dropped. See https://stackoverflow.com/a/49708022/1468388 _r("page-header", "pb-2 mt-4 mb-2 border-bottom"), - #
& co. See https://stackoverflow.com/a/56020841/1468388 _r("dl-horizontal", "row"), - _r(selector=".dl-horizontal > dt", - class_add="col-sm-3 text-sm-right"), + _r(selector=".dl-horizontal > dt", class_add="col-sm-3 text-sm-right"), _r(selector=".dl-horizontal > dd", class_add="col-sm-9"), - # Images _r("img-circle", "rounded-circle"), _r("img-responsive", "img-fluid d-block"), _r("img-rounded", "rounded"), - # Tables _r("table-condensed", "table-sm"), - *(_r("%s" % c3, "table-%s" % c4, selector=".table .%s" % c3) - for (c3, c4) in _CONTEXTS), - + *( + _r("%s" % c3, "table-%s" % c4, selector=".table .%s" % c3) + for (c3, c4) in _CONTEXTS + ), # Forms _r("control-label", "col-form-label"), _r("form-group-lg", "form-control-lg"), @@ -93,52 +98,68 @@ _r("input-lg", "form-control-lg"), _r("input-sm", "form-control-sm"), _r("help-block", "form-text"), - _r(selector="div.checkbox, div.radio", - class_rm="checkbox radio", class_add="form-check"), + _r( + selector="div.checkbox, div.radio", + class_rm="checkbox radio", + class_add="form-check", + ), _r(selector="div.form-check label", class_add="form-check-label"), _r(selector="div.form-check input", class_add="form-check-input"), - _r(selector="label.checkbox-inline", - class_rm="checkbox-inline", - class_add="form-check-label form-check-inline"), - _r(selector=".checkbox-inline input", - class_rm="radio checkbox", class_add="form-check-input"), + _r( + selector="label.checkbox-inline", + class_rm="checkbox-inline", + class_add="form-check-label form-check-inline", + ), + _r( + selector=".checkbox-inline input", + class_rm="radio checkbox", + class_add="form-check-input", + ), _r(selector=".form-horizontal .form-group", class_add="row"), - _r(selector=".form-horizontal .form-group .control-label", - class_rm="control-label", class_add="col-form-label text-right"), + _r( + selector=".form-horizontal .form-group .control-label", + class_rm="control-label", + class_add="col-form-label text-right", + ), _r(selector=".form-horizontal", class_rm="form-horizontal"), _r("form-control-static", "form-control-plaintext"), - # Input groups - _r(selector=".form-control + .input-group-addon", - class_rm="input-group-addon", class_add="input-group-text", - wrap=""), - _r(selector=".form-control + .input-group-btn", - class_rm="input-group-btn", class_add="input-group-append"), - _r("input-group-addon", "input-group-text", - wrap=""), + _r( + selector=".form-control + .input-group-addon", + class_rm="input-group-addon", + class_add="input-group-text", + wrap="", + ), + _r( + selector=".form-control + .input-group-btn", + class_rm="input-group-btn", + class_add="input-group-append", + ), + _r( + "input-group-addon", + "input-group-text", + wrap="", + ), _r("input-group-btn", "input-group-prepend"), - # Buttons _r("btn-default", "btn-secondary"), _r("btn-xs", "btn-sm"), _r("btn-group-xs", "btn-group-sm"), - _r("btn-group-justified", "w-100", - wrap='
'), + _r( + "btn-group-justified", + "w-100", + wrap='
', + ), _r(selector=".btn-group + .btn-group", class_add="ml-1"), - # Dropdowns _r("divider", "dropdown-divider", selector=".dropdown-menu > .divider"), _r(selector=".dropdown-menu > li > a", class_add="dropdown-item"), - # List groups - _r("list-group-item", "list-group-item-action", - selector="a.list-group-item"), - + _r("list-group-item", "list-group-item-action", selector="a.list-group-item"), # Navs _r(selector=".nav > li", class_add="nav-item"), _r(selector=".nav > li > a", class_add="nav-link"), _r("nav-stacked", "flex-column"), - # Navbar _r(selector="navbar", class_add="navbar-expand-sm"), _r("navbar-default", "navbar-light"), @@ -147,19 +168,15 @@ _r("navbar-fixed-top", "fixed-top"), _r("navbar-btn", "nav-item"), _r("navbar-right", "ml-auto"), - # Pagination _r(selector=".pagination > li", class_add="page-item"), _r(selector=".pagination > li > a", class_add="page-link"), - # Breadcrumbs _r(selector=".breadcrumb > li", class_add="breadcrumb-item"), - # Labels and badges _r("label", "badge"), _r("badge-default", "badge-secondary"), *(_r("label-%s" % c3, "badge-%s" % c4) for (c3, c4) in _CONTEXTS), - # Convert panels, thumbnails and wells to cards _r("panel", "card"), _r("panel-body", "card-body"), @@ -171,11 +188,9 @@ *(_r("panel-%s" % c3, "bg-%s" % c4) for (c3, c4) in _CONTEXTS), _r("well", "card card-body"), _r("thumbnail", "card card-body"), - # Progress *(_r("progress-bar-%s" % c3, "bg-%s" % c4) for (c3, c4) in _CONTEXTS), _r("active", "progress-bar-animated", selector=".progress-bar.active"), - # Carousel _r("carousel-control left", "carousel-control-prev"), _r("carousel-control right", "carousel-control-next"), @@ -185,27 +200,29 @@ _r("next", "carousel-item-next", selector=".carousel .next"), _r("prev", "carousel-item-prev", selector=".carousel .prev"), _r("right", "carousel-item-right", selector=".carousel .right"), - # Utilities _r("center-block", "d-block mx-auto"), _r("hidden", "d-none"), _r("hidden-xs", "d-none d-md-block"), - _r(selector=".hidden-sm", - class_rm="hidden-sm d-md-block", - class_add="d-md-none d-lg-block"), - _r(selector=".hidden-md", - class_rm="hidden-md d-lg-block", - class_add="d-lg-none d-xl-block"), - _r(selector=".hidden-lg", - class_rm="hidden-lg d-xl-block", - class_add="d-xl-none"), + _r( + selector=".hidden-sm", + class_rm="hidden-sm d-md-block", + class_add="d-md-none d-lg-block", + ), + _r( + selector=".hidden-md", + class_rm="hidden-md d-lg-block", + class_add="d-lg-none d-xl-block", + ), + _r(selector=".hidden-lg", class_rm="hidden-lg d-xl-block", class_add="d-xl-none"), _r("hidden-lg", "d-xl-none"), *(_r("hidden%s" % t3, "d-none%s" % t4) for (t3, t4) in _TIERS), _r("hidden-print", "d-print-none"), - *(_r("visible%s-%s" % (t3, vis), "d-") - for (t3, t4), vis in product(_TIERS, _BS3_VISIBLES)), - *(_r("visible-print-%s" % vis, "d-print-%s" % vis) - for vis in _BS3_VISIBLES), + *( + _r("visible%s-%s" % (t3, vis), "d-") + for (t3, t4), vis in product(_TIERS, _BS3_VISIBLES) + ), + *(_r("visible-print-%s" % vis, "d-print-%s" % vis) for vis in _BS3_VISIBLES), _r("pull-left", "float-left"), _r("pull-right", "float-right"), ) @@ -213,112 +230,125 @@ # These replacements are specific for Odoo v11 to v12 _ODOO12_REPLACEMENTS = ( # Grays renamed; handpicked closest gray equivalent matches - *(_r("bg-gray%s" % v11, "bg-%d00" % v12) - for v11, v12 in ( - ("-darker", 9), ("-dark", 8), ("", 7), - ("-light", 6), ("-lighter", 1))), - + *( + _r("bg-gray%s" % v11, "bg-%d00" % v12) + for v11, v12 in ( + ("-darker", 9), + ("-dark", 8), + ("", 7), + ("-light", 6), + ("-lighter", 1), + ) + ), # Odoo v12 editor adds/removes tags, not tags; keep UX _r(selector="strong", tag="b"), - # 25% opacity black background had white text in v11, but black in v12 _r(selector=".bg-black-25", class_add="text-white"), - # Image floating snippet disappears _r(selector=".o_image_floating.o_margin_s.float-left", class_add="mr8"), _r(selector=".o_image_floating.o_margin_s.float-right", class_add="ml8"), - _r(selector=".o_image_floating.o_margin_m.float-left", - style_add={"margin-right": "12px"}), - _r(selector=".o_image_floating.o_margin_m.float-right", - style_add={"margin-left": "12px"}), + _r( + selector=".o_image_floating.o_margin_m.float-left", + style_add={"margin-right": "12px"}, + ), + _r( + selector=".o_image_floating.o_margin_m.float-right", + style_add={"margin-left": "12px"}, + ), _r(selector=".o_image_floating.o_margin_l.float-left", class_add="mr16"), _r(selector=".o_image_floating.o_margin_l.float-right", class_add="ml16"), _r(selector=".o_image_floating.o_margin_xl.float-left", class_add="mr32"), _r(selector=".o_image_floating.o_margin_xl.float-right", class_add="ml32"), _r(selector=".o_image_floating.o_margin_s", class_add="mb4"), _r(selector=".o_image_floating.o_margin_m", class_add="mb8"), - _r(selector=".o_image_floating.o_margin_l", - style_add={"margin-bottom": "12px"}), + _r(selector=".o_image_floating.o_margin_l", style_add={"margin-bottom": "12px"}), _r(selector=".o_image_floating.o_margin_xl", class_add="mb24"), _r(selector=".o_image_floating .o_footer", class_rm="o_footer"), _r(class_rm="s_image_floating"), - _r("o_image_floating o_margin_s o_margin_m o_margin_l o_margin_xl", - "col-5 p-0", selector=".o_image_floating"), - + _r( + "o_image_floating o_margin_s o_margin_m o_margin_l o_margin_xl", + "col-5 p-0", + selector=".o_image_floating", + ), # Big message (v11) or Banner (v12) snippet - _r(selector=".jumbotron h1, .jumbotron .h1", - style_add={"font-size": "63px"}), + _r(selector=".jumbotron h1, .jumbotron .h1", style_add={"font-size": "63px"}), _r(selector=".jumbotron p", class_add="lead"), - # Big picture snippet _r(selector=".s_big_picture h2", class_add="mt24"), - # Slider (v11) or Carousel (v12) snippet - _r(selector=".carousel", - class_rm="s_banner oe_custom_bg", - class_add="s_carousel s_carousel_default", - style_rm={"height"}, - style_add=lambda styles, **kw: { - "min-height": styles.get("height", "400px")}), - _r(selector=".carousel-control-prev .fa-chevron-left", - class_rm="fa fa-chevron-left", - class_add="carousel-control-prev-icon", - tag="span"), - _r(selector=".carousel-control-next .fa-chevron-right", - class_rm="fa fa-chevron-right", - class_add="carousel-control-next-icon", - tag="span"), - + _r( + selector=".carousel", + class_rm="s_banner oe_custom_bg", + class_add="s_carousel s_carousel_default", + style_rm={"height"}, + style_add=lambda styles, **kw: {"min-height": styles.get("height", "400px")}, + ), + _r( + selector=".carousel-control-prev .fa-chevron-left", + class_rm="fa fa-chevron-left", + class_add="carousel-control-prev-icon", + tag="span", + ), + _r( + selector=".carousel-control-next .fa-chevron-right", + class_rm="fa fa-chevron-right", + class_add="carousel-control-next-icon", + tag="span", + ), # Text snippet loses its built-in headers _r(selector=".s_text_block h2", class_add="mt24"), - # Cover snippet - _r(selector=".s_text_block_image_fw .container > .row > div", - style_add={"padding": "30px"}), - + _r( + selector=".s_text_block_image_fw .container > .row > div", + style_add={"padding": "30px"}, + ), # Image gallery snippet - _r(selector=".s_image_gallery .o_indicators_left", - class_rm="fa fa-chevron-left", - class_add="text-center pt-2", - style_rm={"overflow", "padding", "border"}), - _r(selector=".s_image_gallery .o_indicators_left > br", - wrap=''), - _r(selector=".s_image_gallery .o_indicators_right", - class_rm="fa fa-chevron-right", - class_add="text-center pt-2", - style_rm={"overflow", "padding", "border"}), - _r(selector=".s_image_gallery .o_indicators_right > br", - class_add="fa fa-chevron-right", tag="i"), - + _r( + selector=".s_image_gallery .o_indicators_left", + class_rm="fa fa-chevron-left", + class_add="text-center pt-2", + style_rm={"overflow", "padding", "border"}, + ), + _r( + selector=".s_image_gallery .o_indicators_left > br", + wrap='', + ), + _r( + selector=".s_image_gallery .o_indicators_right", + class_rm="fa fa-chevron-right", + class_add="text-center pt-2", + style_rm={"overflow", "padding", "border"}, + ), + _r( + selector=".s_image_gallery .o_indicators_right > br", + class_add="fa fa-chevron-right", + tag="i", + ), # Comparisons snippet - _r(selector=".s_comparisons > .container > .row > div", - class_add="s_col_no_bgcolor", - attr_add={"data-name": "Box"}), - _r(selector=".s_comparisons .card .list-group", - class_add="list-group-flush"), - + _r( + selector=".s_comparisons > .container > .row > div", + class_add="s_col_no_bgcolor", + attr_add={"data-name": "Box"}, + ), + _r(selector=".s_comparisons .card .list-group", class_add="list-group-flush"), # Company team snippet _r(selector=".s_company_team h1", class_add="mt24"), - # Call to action snippet _r(selector=".s_button .lead:first-child", class_rm="lead", tag="h3"), - # Parallax sliders _r(selector=".s_parallax", style_add={"min-height": "200px"}), - _r(selector=".s_parallax_slider .blockquote", - style_add={"border-left": "5px solid #eeeeee"}), - + _r( + selector=".s_parallax_slider .blockquote", + style_add={"border-left": "5px solid #eeeeee"}, + ), # Accordion snippet _r(selector=".s_faq_collapse .card-header h4", class_add="mb0"), _r(selector=".s_faq_collapse .panel", class_add="mt6"), - # Well snippet _r(selector=".s_well.card", class_add="bg-100"), - # Panel snippet _r("s_panel", "s_card"), - _r(selector=".s_card.bg-secondary", - class_rm="bg-secondary", class_add="bg-white"), + _r(selector=".s_card.bg-secondary", class_rm="bg-secondary", class_add="bg-white"), ) ALL_REPLACEMENTS = _BS4_REPLACEMENTS + _ODOO12_REPLACEMENTS @@ -340,17 +370,18 @@ def convert_string_bootstrap_3to4(html_string, pretty_print=True): return html_string try: return convert_html_fragment( - html_string, ALL_REPLACEMENTS, pretty_print, + html_string, + ALL_REPLACEMENTS, + pretty_print, ) except Exception: - logger.error( - 'Error converting string BS3 to BS4:\n%s' % html_string - ) + logger.error("Error converting string BS3 to BS4:\n%s" % html_string) raise -def convert_field_bootstrap_3to4(env, model_name, field_name, domain=None, - method='orm'): +def convert_field_bootstrap_3to4( + env, model_name, field_name, domain=None, method="orm" +): """This converts all the values for the given model and field, being able to restrict to a domain of affected records. @@ -364,7 +395,10 @@ def convert_field_bootstrap_3to4(env, model_name, field_name, domain=None, assert method in {"orm", "sql"} if method == "orm": return _convert_field_bootstrap_3to4_orm( - env, model_name, field_name, domain, + env, + model_name, + field_name, + domain, ) records = env[model_name].search(domain or []) return _convert_field_bootstrap_3to4_sql( @@ -374,8 +408,7 @@ def convert_field_bootstrap_3to4(env, model_name, field_name, domain=None, ) -def _convert_field_bootstrap_3to4_orm(env, model_name, field_name, - domain=None): +def _convert_field_bootstrap_3to4_orm(env, model_name, field_name, domain=None): """Convert a field from Bootstrap 3 to 4, using Odoo ORM. :param odoo.api.Environment env: Environment to use. @@ -383,9 +416,7 @@ def _convert_field_bootstrap_3to4_orm(env, model_name, field_name, :param str field_name: Field to convert in that model. :param domain list: Domain to restrict conversion. """ - domain = domain or [ - (field_name, "!=", False), (field_name, "!=", "


") - ] + domain = domain or [(field_name, "!=", False), (field_name, "!=", "


")] records = env[model_name].search(domain) update_field_multilang( records, @@ -422,5 +453,8 @@ def _convert_field_bootstrap_3to4_sql(cr, table, field, ids=None): if old_content != new_content: cr.execute( "UPDATE %s SET %s = %s WHERE id = %s", - AsIs(table), AsIs(field), new_content, id_, + AsIs(table), + AsIs(field), + new_content, + id_, ) diff --git a/openupgradelib/openupgrade_130.py b/openupgradelib/openupgrade_130.py index 7bff387f..cf641388 100644 --- a/openupgradelib/openupgrade_130.py +++ b/openupgradelib/openupgrade_130.py @@ -3,18 +3,23 @@ """Tools specific for migrating from Odoo 12.0 to 13.0.""" import logging + from . import openupgrade -_logger = logging.getLogger('OpenUpgrade') +_logger = logging.getLogger("OpenUpgrade") def convert_old_style_tax_tag_to_new( - env, report_line, old_tag_id, new_debit_tag_id, new_credit_tag_id): - _logger.debug("Converting %s to +/- %s..." % (report_line.name, - report_line.tag_name)) + env, report_line, old_tag_id, new_debit_tag_id, new_credit_tag_id +): + _logger.debug( + "Converting %s to +/- %s..." % (report_line.name, report_line.tag_name) + ) # First, update the tax repartition lines' tags - affected = openupgrade.logged_query(env.cr, """ + affected = openupgrade.logged_query( + env.cr, + """ UPDATE account_account_tag_account_tax_repartition_line_rel r SET account_account_tag_id = %s FROM account_tax_repartition_line atrl @@ -23,13 +28,17 @@ def convert_old_style_tax_tag_to_new( atrl.invoice_tax_id IS NOT NULL AND atrl.refund_tax_id IS NULL AND r.account_account_tag_id = %s - """ % (new_debit_tag_id, old_tag_id)) + """ + % (new_debit_tag_id, old_tag_id), + ) if affected > 0: - _logger.info('Converted tag "%s" to "+%s" on repartition line.' % ( - report_line.name, - report_line.tag_name - )) - affected = openupgrade.logged_query(env.cr, """ + _logger.info( + 'Converted tag "%s" to "+%s" on repartition line.' + % (report_line.name, report_line.tag_name) + ) + affected = openupgrade.logged_query( + env.cr, + """ UPDATE account_tax_repartition_financial_tags r SET account_account_tag_id = %s FROM account_tax_repartition_line atrl @@ -38,11 +47,17 @@ def convert_old_style_tax_tag_to_new( atrl.invoice_tax_id IS NOT NULL AND atrl.refund_tax_id IS NULL AND r.account_account_tag_id = %s - """ % (new_debit_tag_id, old_tag_id)) + """ + % (new_debit_tag_id, old_tag_id), + ) if affected > 0: - _logger.info('Converted tag "%s" to "+%s" on repartition line ' - 'template.' % (report_line.name, report_line.tag_name)) - affected = openupgrade.logged_query(env.cr, """ + _logger.info( + 'Converted tag "%s" to "+%s" on repartition line ' + "template." % (report_line.name, report_line.tag_name) + ) + affected = openupgrade.logged_query( + env.cr, + """ UPDATE account_account_tag_account_tax_repartition_line_rel r SET account_account_tag_id = %s FROM account_tax_repartition_line atrl @@ -51,13 +66,17 @@ def convert_old_style_tax_tag_to_new( atrl.invoice_tax_id IS NULL AND atrl.refund_tax_id IS NOT NULL AND r.account_account_tag_id = %s - """ % (new_credit_tag_id, old_tag_id)) + """ + % (new_credit_tag_id, old_tag_id), + ) if affected > 0: - _logger.info('Converted tag "%s" to "-%s" on repartition line.' % ( - report_line.name, - report_line.tag_name - )) - affected = openupgrade.logged_query(env.cr, """ + _logger.info( + 'Converted tag "%s" to "-%s" on repartition line.' + % (report_line.name, report_line.tag_name) + ) + affected = openupgrade.logged_query( + env.cr, + """ UPDATE account_tax_repartition_financial_tags r SET account_account_tag_id = %s FROM account_tax_repartition_line atrl @@ -66,13 +85,19 @@ def convert_old_style_tax_tag_to_new( atrl.invoice_tax_id IS NULL AND atrl.refund_tax_id IS NOT NULL AND r.account_account_tag_id = %s - """ % (new_credit_tag_id, old_tag_id)) + """ + % (new_credit_tag_id, old_tag_id), + ) if affected > 0: - _logger.info('Converted tag "%s" to "-%s" on repartition line ' - 'template.' % (report_line.name, report_line.tag_name)) + _logger.info( + 'Converted tag "%s" to "-%s" on repartition line ' + "template." % (report_line.name, report_line.tag_name) + ) # Then, update the move line tags - openupgrade.logged_query(env.cr, """ + openupgrade.logged_query( + env.cr, + """ UPDATE account_account_tag_account_move_line_rel r SET account_account_tag_id = %s FROM account_move_line aml @@ -80,8 +105,12 @@ def convert_old_style_tax_tag_to_new( r.account_move_line_id = aml.id AND aml.debit >= 0 AND r.account_account_tag_id = %s - """, (new_debit_tag_id, old_tag_id)) - openupgrade.logged_query(env.cr, """ + """, + (new_debit_tag_id, old_tag_id), + ) + openupgrade.logged_query( + env.cr, + """ UPDATE account_account_tag_account_move_line_rel r SET account_account_tag_id = %s FROM account_move_line aml @@ -89,21 +118,30 @@ def convert_old_style_tax_tag_to_new( r.account_move_line_id = aml.id AND aml.credit > 0 AND r.account_account_tag_id = %s - """, (new_credit_tag_id, old_tag_id)) + """, + (new_credit_tag_id, old_tag_id), + ) # The old tag should be deleted or deactivated, because the l10n VAT # report would otherwise still use them. Besides, they are not # necessary anymore anyway. - openupgrade.logged_query(env.cr, """ + openupgrade.logged_query( + env.cr, + """ UPDATE account_account_tag SET active = FALSE WHERE id = %s - """, (old_tag_id,)) + """, + (old_tag_id,), + ) def unlink_invalid_tax_tags_from_move_lines( - env, module, base_tag_xmlids, tax_tag_xmlids): - openupgrade.logged_query(env.cr, """ + env, module, base_tag_xmlids, tax_tag_xmlids +): + openupgrade.logged_query( + env.cr, + """ DELETE FROM account_account_tag_account_move_line_rel r WHERE account_account_tag_id IN ( @@ -111,14 +149,20 @@ def unlink_invalid_tax_tags_from_move_lines( WHERE model = 'account.account.tag' AND module = %s AND - name IN ('""" + "','".join(tax_tag_xmlids) + """') + name IN ('""" + + "','".join(tax_tag_xmlids) + + """') ) AND account_move_line_id IN ( SELECT id FROM account_move_line WHERE tax_base_amount = 0 ) - """, [module]) - openupgrade.logged_query(env.cr, """ + """, + [module], + ) + openupgrade.logged_query( + env.cr, + """ DELETE FROM account_account_tag_account_move_line_rel r WHERE account_account_tag_id IN ( @@ -126,24 +170,31 @@ def unlink_invalid_tax_tags_from_move_lines( WHERE model = 'account.account.tag' AND module = %s AND - name IN ('""" + "','".join(base_tag_xmlids) + """') + name IN ('""" + + "','".join(base_tag_xmlids) + + """') ) AND account_move_line_id IN ( SELECT id FROM account_move_line WHERE tax_base_amount <> 0 ) - """, [module]) + """, + [module], + ) def unlink_invalid_tax_tags_from_repartition_lines( - env, module, base_tag_xmlids, tax_tag_xmlids): - """ The migration script of the account module assigns all tags of - the account.tax's tag_ids field to the tag_ids field of the new - account.tax.repartition.line. However, because each repartition - line only needs a 'base' - or 'tax' tag, we clean up the other - tags. + env, module, base_tag_xmlids, tax_tag_xmlids +): + """The migration script of the account module assigns all tags of + the account.tax's tag_ids field to the tag_ids field of the new + account.tax.repartition.line. However, because each repartition + line only needs a 'base' - or 'tax' tag, we clean up the other + tags. """ - openupgrade.logged_query(env.cr, """ + openupgrade.logged_query( + env.cr, + """ DELETE FROM account_account_tag_account_tax_repartition_line_rel r WHERE account_tax_repartition_line_id IN ( @@ -156,10 +207,16 @@ def unlink_invalid_tax_tags_from_repartition_lines( WHERE model = 'account.account.tag' AND module = %s AND - name IN ('""" + "','".join(tax_tag_xmlids) + """') + name IN ('""" + + "','".join(tax_tag_xmlids) + + """') ) - """, [module]) - openupgrade.logged_query(env.cr, """ + """, + [module], + ) + openupgrade.logged_query( + env.cr, + """ DELETE FROM account_account_tag_account_tax_repartition_line_rel r WHERE account_tax_repartition_line_id IN ( @@ -172,6 +229,10 @@ def unlink_invalid_tax_tags_from_repartition_lines( WHERE model = 'account.account.tag' AND module = %s AND - name IN ('""" + "','".join(base_tag_xmlids) + """') + name IN ('""" + + "','".join(base_tag_xmlids) + + """') ) - """, [module]) + """, + [module], + ) diff --git a/openupgradelib/openupgrade_160.py b/openupgradelib/openupgrade_160.py index b1d29866..c64ede5e 100644 --- a/openupgradelib/openupgrade_160.py +++ b/openupgradelib/openupgrade_160.py @@ -5,9 +5,11 @@ the >=16.0 migration. """ import itertools -from .openupgrade import logged_query, table_exists + from odoo.tools.translate import _get_translation_upgrade_queries +from .openupgrade import logged_query, table_exists + def migrate_translations_to_jsonb(env, fields_spec): """ diff --git a/openupgradelib/openupgrade_70.py b/openupgradelib/openupgrade_70.py index a0325593..f10bc378 100644 --- a/openupgradelib/openupgrade_70.py +++ b/openupgradelib/openupgrade_70.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 ############################################################################## # # OpenERP, Open Source Management Solution @@ -27,7 +27,8 @@ def set_partner_id_from_partner_address_id( - cr, pool, model_name, partner_field, address_field, table=None): + cr, pool, model_name, partner_field, address_field, table=None +): """ Set the new partner_id on any table with migrated contact ids @@ -48,18 +49,25 @@ def set_partner_id_from_partner_address_id( SET %(partner_field)s=address.openupgrade_7_migrated_to_partner_id FROM res_partner_address address WHERE %(table)s.%(address_field)s=address.id - """ % {'table': table, - 'partner_field': partner_field, - 'address_field': address_field}) + """ + % { + "table": table, + "partner_field": partner_field, + "address_field": address_field, + }, + ) def get_partner_id_from_user_id(cr, user_id): """ - Get the new partner_id from user_id. - :param user_id : user previously used. + Get the new partner_id from user_id. + :param user_id : user previously used. """ - cr.execute(""" + cr.execute( + """ SELECT partner_id FROM res_users - WHERE id=%s""", (user_id,)) + WHERE id=%s""", + (user_id,), + ) return cr.fetchone()[0] diff --git a/openupgradelib/openupgrade_80.py b/openupgradelib/openupgrade_80.py index b3dc8159..90ef9ca1 100644 --- a/openupgradelib/openupgrade_80.py +++ b/openupgradelib/openupgrade_80.py @@ -1,4 +1,4 @@ -# -*- encoding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 ############################################################################## # # OpenERP, Open Source Management Solution @@ -52,8 +52,9 @@ def get_last_post_for_model(cr, uid, ids, model_pool): for obj in model_pool.browse(cr, uid, ids): message_ids = obj.message_ids if message_ids: - res[obj.id] = sorted( - message_ids, key=lambda x: x.date, reverse=True)[0].date + res[obj.id] = sorted(message_ids, key=lambda x: x.date, reverse=True)[ + 0 + ].date else: res[obj.id] = False return res @@ -76,7 +77,8 @@ def set_message_last_post(cr, uid, pool, models): models = [models] for model in models: model_pool = pool[model] - query = sql.SQL(""" + query = sql.SQL( + """ UPDATE {table} main SET message_last_post = mm.last_date FROM (SELECT res_id, MAX(date) AS last_date @@ -84,13 +86,19 @@ def set_message_last_post(cr, uid, pool, models): WHERE model = %s AND date IS NOT NULL GROUP BY res_id) AS mm WHERE main.id = mm.res_id - """).format(table=sql.Identifier(model_pool._table)) + """ + ).format(table=sql.Identifier(model_pool._table)) logged_query(cr, query, (model,)) def update_aliases( - cr, registry, model_name, set_parent_thread_id, - alias_defaults=None, defaults_id_key=False): + cr, + registry, + model_name, + set_parent_thread_id, + alias_defaults=None, + defaults_id_key=False, +): """ Update a model's aliases according to how they are configured in the model's create() method. @@ -104,19 +112,20 @@ def update_aliases( :param defaults_id_key: When defined, add this key to each alias' \ defaults dictionary with the resource id as its value. """ - model_id = registry['ir.model'].search( - cr, SUPERUSER_ID, [('model', '=', model_name)])[0] - vals = {'alias_parent_model_id': model_id} + model_id = registry["ir.model"].search( + cr, SUPERUSER_ID, [("model", "=", model_name)] + )[0] + vals = {"alias_parent_model_id": model_id} if defaults_id_key and alias_defaults is None: alias_defaults = {} res_ids = registry[model_name].search( - cr, SUPERUSER_ID, [], context={'active_test': False}) - for res in registry[model_name].browse( - cr, SUPERUSER_ID, res_ids): + cr, SUPERUSER_ID, [], context={"active_test": False} + ) + for res in registry[model_name].browse(cr, SUPERUSER_ID, res_ids): if set_parent_thread_id: - vals['alias_parent_thread_id'] = res.id + vals["alias_parent_thread_id"] = res.id if defaults_id_key: alias_defaults[defaults_id_key] = res.id if alias_defaults is not None: - vals['alias_defaults'] = str(alias_defaults) + vals["alias_defaults"] = str(alias_defaults) res.alias_id.write(vals) diff --git a/openupgradelib/openupgrade_90.py b/openupgradelib/openupgrade_90.py index 18fdab4f..8737a415 100644 --- a/openupgradelib/openupgrade_90.py +++ b/openupgradelib/openupgrade_90.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 # Copyright 2017 Tecnativa - Pedro M. Baeza # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). @@ -39,8 +39,8 @@ def convert_binary_field_to_attachment(env, field_spec): the second element is None, then the column name is taken calling `get_legacy_name` method, which is the typical technique. """ - logger = logging.getLogger('OpenUpgrade') - attachment_model = env['ir.attachment'] + logger = logging.getLogger("OpenUpgrade") + attachment_model = env["ir.attachment"] for model_name in field_spec: model = env[model_name] for field, column in field_spec[model_name]: @@ -56,30 +56,38 @@ def convert_binary_field_to_attachment(env, field_spec): """SELECT id, {0} FROM {1} WHERE {0} IS NOT NULL AND id > {2} ORDER BY id LIMIT 500; - """.format(column, model._table, last_id) + """.format( + column, model._table, last_id + ) ) rows = env.cr.fetchall() if not rows: break logger.info( " converting {0} items starting after {1}..." - "".format(len(rows), last_id)) + "".format(len(rows), last_id) + ) for row in rows: last_id = row[0] data = bytes(row[1]) - if data and data != 'None': - attachment_model.create({ - 'name': field, - 'res_model': model_name, - 'res_field': field, - 'res_id': last_id, - 'type': 'binary', - 'datas': data, - }) + if data and data != "None": + attachment_model.create( + { + "name": field, + "res_model": model_name, + "res_field": field, + "res_id": last_id, + "type": "binary", + "datas": data, + } + ) # Remove source column for cleaning the room - env.cr.execute("ALTER TABLE {} DROP COLUMN {}".format( - model._table, column, - )) + env.cr.execute( + "ALTER TABLE {} DROP COLUMN {}".format( + model._table, + column, + ) + ) def replace_account_types(env, type_spec, unlink=True): @@ -89,27 +97,28 @@ def replace_account_types(env, type_spec, unlink=True): xmlid of new account.account.type) :param unlink: attempt to unlink the old account type """ - logger = logging.getLogger('OpenUpgrade') + logger = logging.getLogger("OpenUpgrade") for old_type, new_type in type_spec: try: type8 = env.ref(old_type) except ValueError: - if getattr(threading.currentThread(), 'testing', False): + if getattr(threading.currentThread(), "testing", False): raise continue type9 = env.ref(new_type) - for table in ('account_account', - 'account_account_template', - 'account_move_line'): + for table in ( + "account_account", + "account_account_template", + "account_move_line", + ): env.cr.execute( "UPDATE %s SET user_type_id = %s WHERE user_type_id = %s", - (AsIs(table), type9.id, type8.id)) + (AsIs(table), type9.id, type8.id), + ) if unlink: with env.cr.savepoint(): try: type8.unlink() except Exception as e: - logger.info( - 'Could not remove account type %s: %s', - old_type, e) + logger.info("Could not remove account type %s: %s", old_type, e) diff --git a/openupgradelib/openupgrade_merge_records.py b/openupgradelib/openupgrade_merge_records.py index a8614e88..7ec16918 100644 --- a/openupgradelib/openupgrade_merge_records.py +++ b/openupgradelib/openupgrade_merge_records.py @@ -1,23 +1,31 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 # Copyright 2018 Tecnativa - Pedro M. Baeza # Copyright 2018 Opener B.V. - Stefan Rijnhart # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -import logging import functools -from psycopg2 import sql -from psycopg2 import ProgrammingError, IntegrityError +import logging + +from psycopg2 import IntegrityError, ProgrammingError, sql from psycopg2.errorcodes import UNDEFINED_COLUMN, UNIQUE_VIOLATION from psycopg2.extensions import AsIs + from .openupgrade import logged_query, version_info from .openupgrade_tools import column_exists, table_exists -logger = logging.getLogger('OpenUpgrade') +logger = logging.getLogger("OpenUpgrade") logger.setLevel(logging.DEBUG) -def _change_foreign_key_refs(env, model_name, record_ids, target_record_id, - exclude_columns, model_table, extra_where=None): +def _change_foreign_key_refs( + env, + model_name, + record_ids, + target_record_id, + exclude_columns, + model_table, + extra_where=None, +): # As found on https://stackoverflow.com/questions/1152260 # /postgres-sql-to-list-table-foreign-keys # Adapted for specific Odoo structures like many2many tables @@ -32,87 +40,102 @@ def _change_foreign_key_refs(env, model_name, record_ids, target_record_id, AND ccu.table_schema = tc.table_schema WHERE tc.constraint_type = 'FOREIGN KEY' AND ccu.table_name = %s and ccu.column_name = 'id' - """, (model_table,)) + """, + (model_table,), + ) for table, column in env.cr.fetchall(): if (table, column) in exclude_columns: continue # Try one big swoop first - env.cr.execute('SAVEPOINT sp1') # can't use env.cr.savepoint() in base + env.cr.execute("SAVEPOINT sp1") # can't use env.cr.savepoint() in base try: query = sql.SQL( """UPDATE {table} SET {column} = %(target_record_id)s WHERE {column} in %(record_ids)s""" ).format( - table=sql.Identifier(table), column=sql.Identifier(column), + table=sql.Identifier(table), + column=sql.Identifier(column), ) if extra_where: query += sql.SQL(extra_where) logged_query( - env.cr, query, { - 'record_ids': tuple(record_ids), - 'target_record_id': target_record_id, - }, skip_no_result=True, + env.cr, + query, + { + "record_ids": tuple(record_ids), + "target_record_id": target_record_id, + }, + skip_no_result=True, ) except (ProgrammingError, IntegrityError) as error: - env.cr.execute('ROLLBACK TO SAVEPOINT sp1') + env.cr.execute("ROLLBACK TO SAVEPOINT sp1") if error.pgcode == UNDEFINED_COLUMN and extra_where: # extra_where is introducing a bad column. Ignore this table. continue elif error.pgcode != UNIQUE_VIOLATION: raise # Fallback on setting each row separately - m2m_table = not column_exists(env.cr, table, 'id') - target_column = column if m2m_table else 'id' - env.cr.execute("""SELECT %(target_column)s FROM %(table)s - WHERE "%(column)s" in %(record_ids)s""", { - 'target_column': AsIs(target_column), - 'table': AsIs(table), - 'column': AsIs(column), - 'record_ids': tuple(record_ids), + m2m_table = not column_exists(env.cr, table, "id") + target_column = column if m2m_table else "id" + env.cr.execute( + """SELECT %(target_column)s FROM %(table)s + WHERE "%(column)s" in %(record_ids)s""", + { + "target_column": AsIs(target_column), + "table": AsIs(table), + "column": AsIs(column), + "record_ids": tuple(record_ids), }, ) for row in list(set([x[0] for x in env.cr.fetchall()])): - env.cr.execute('SAVEPOINT sp2') + env.cr.execute("SAVEPOINT sp2") try: logged_query( - env.cr, """UPDATE %(table)s + env.cr, + """UPDATE %(table)s SET "%(column)s" = %(target_record_id)s - WHERE %(target_column)s = %(record_id)s""", { - 'target_column': AsIs(target_column), - 'table': AsIs(table), - 'column': AsIs(column), - 'record_id': row, - 'target_record_id': target_record_id, + WHERE %(target_column)s = %(record_id)s""", + { + "target_column": AsIs(target_column), + "table": AsIs(table), + "column": AsIs(column), + "record_id": row, + "target_record_id": target_record_id, }, ) except (ProgrammingError, IntegrityError) as error: - env.cr.execute('ROLLBACK TO SAVEPOINT sp2') + env.cr.execute("ROLLBACK TO SAVEPOINT sp2") if error.pgcode != UNIQUE_VIOLATION: raise else: - env.cr.execute('RELEASE SAVEPOINT sp2') + env.cr.execute("RELEASE SAVEPOINT sp2") if m2m_table: # delete remaining values that could not be merged logged_query( - env.cr, """DELETE FROM %(table)s + env.cr, + """DELETE FROM %(table)s WHERE "%(column)s" in %(record_ids)s""", { - 'table': AsIs(table), - 'column': AsIs(column), - 'record_ids': tuple(record_ids), - }, skip_no_result=True, + "table": AsIs(table), + "column": AsIs(column), + "record_ids": tuple(record_ids), + }, + skip_no_result=True, ) else: - env.cr.execute('RELEASE SAVEPOINT sp1') + env.cr.execute("RELEASE SAVEPOINT sp1") -def _change_many2one_refs_orm(env, model_name, record_ids, target_record_id, - exclude_columns): - fields = env['ir.model.fields'].search([ - ('ttype', '=', 'many2one'), - ('relation', '=', model_name), - ]) +def _change_many2one_refs_orm( + env, model_name, record_ids, target_record_id, exclude_columns +): + fields = env["ir.model.fields"].search( + [ + ("ttype", "=", "many2one"), + ("relation", "=", model_name), + ] + ) for field in fields: try: model = env[field.model].with_context(active_test=False) @@ -120,59 +143,76 @@ def _change_many2one_refs_orm(env, model_name, record_ids, target_record_id, continue field_name = field.name if ( - not model._auto or not model._fields.get(field_name) or - not field.store or (model._table, field_name) in exclude_columns + not model._auto + or not model._fields.get(field_name) + or not field.store + or (model._table, field_name) in exclude_columns ): continue # Discard SQL views + invalid fields + non-stored fields - records = model.search([(field_name, 'in', record_ids)]) + records = model.search([(field_name, "in", record_ids)]) if records: records.write({field_name: target_record_id}) logger.debug( "Changed %s record(s) in many2one field '%s' of model '%s'", - len(records), field_name, field.model, + len(records), + field_name, + field.model, ) -def _change_many2many_refs_orm(env, model_name, record_ids, target_record_id, - exclude_columns): - fields = env['ir.model.fields'].search([ - ('ttype', '=', 'many2many'), - ('relation', '=', model_name), - ]) +def _change_many2many_refs_orm( + env, model_name, record_ids, target_record_id, exclude_columns +): + fields = env["ir.model.fields"].search( + [ + ("ttype", "=", "many2many"), + ("relation", "=", model_name), + ] + ) for field in fields: try: model = env[field.model].with_context(active_test=False) except KeyError: continue field_name = field.name - if (not model._auto or not model._fields.get(field_name) or - not field.store or - (model._table, field_name) in exclude_columns): + if ( + not model._auto + or not model._fields.get(field_name) + or not field.store + or (model._table, field_name) in exclude_columns + ): continue # Discard SQL views + invalid fields + non-stored fields - records = model.search([(field_name, 'in', record_ids)]) + records = model.search([(field_name, "in", record_ids)]) if records: - records.write({ - field_name: ( - [(3, x) for x in record_ids] + [(4, target_record_id)] - ), - }) + records.write( + { + field_name: ( + [(3, x) for x in record_ids] + [(4, target_record_id)] + ), + } + ) logger.debug( "Changed %s record(s) in many2many field '%s' of model '%s'", - len(records), field_name, field.model, + len(records), + field_name, + field.model, ) -def _change_reference_refs_sql(env, model_name, record_ids, target_record_id, - exclude_columns): +def _change_reference_refs_sql( + env, model_name, record_ids, target_record_id, exclude_columns +): cr = env.cr - cr.execute(""" + cr.execute( + """ SELECT model, name FROM ir_model_fields WHERE ttype='reference' - """) + """ + ) rows = cr.fetchall() - if ('ir.property', 'value_reference') not in rows: - rows.append(('ir.property', 'value_reference')) + if ("ir.property", "value_reference") not in rows: + rows.append(("ir.property", "value_reference")) for row in rows: try: model = env[row[0]] @@ -184,87 +224,113 @@ def _change_reference_refs_sql(env, model_name, record_ids, target_record_id, if not table_exists(cr, table): continue column = row[1] - if not column_exists(cr, table, column) or ( - (table, column) in exclude_columns): + if not column_exists(cr, table, column) or ((table, column) in exclude_columns): continue - where = ' OR '.join( + where = " OR ".join( ["%s = '%s,%s'" % (column, model_name, x) for x in record_ids] ) logged_query( - cr, """ + cr, + """ UPDATE %s SET %s = %s WHERE %s - """, ( - AsIs(table), AsIs(column), - '%s,%s' % (model_name, target_record_id), AsIs(where) - ), skip_no_result=True, + """, + ( + AsIs(table), + AsIs(column), + "%s,%s" % (model_name, target_record_id), + AsIs(where), + ), + skip_no_result=True, ) -def _change_reference_refs_orm(env, model_name, record_ids, target_record_id, - exclude_columns): - fields = env['ir.model.fields'].search([('ttype', '=', 'reference')]) +def _change_reference_refs_orm( + env, model_name, record_ids, target_record_id, exclude_columns +): + fields = env["ir.model.fields"].search([("ttype", "=", "reference")]) if version_info[0] >= 12: - fields |= env.ref('base.field_ir_property__value_reference') + fields |= env.ref("base.field_ir_property__value_reference") else: - fields |= env.ref('base.field_ir_property_value_reference') + fields |= env.ref("base.field_ir_property_value_reference") for field in fields: try: model = env[field.model].with_context(active_test=False) except KeyError: continue field_name = field.name - if (not model._auto or not model._fields.get(field_name) or - not field.store or - (model._table, field_name) in exclude_columns): + if ( + not model._auto + or not model._fields.get(field_name) + or not field.store + or (model._table, field_name) in exclude_columns + ): continue # Discard SQL views + invalid fields + non-stored fields - expr = ['%s,%s' % (model_name, x) for x in record_ids] - records = model.search([(field_name, 'in', expr)]) + expr = ["%s,%s" % (model_name, x) for x in record_ids] + records = model.search([(field_name, "in", expr)]) if records: - records.write({ - field_name: '%s,%s' % (model_name, target_record_id), - }) + records.write( + { + field_name: "%s,%s" % (model_name, target_record_id), + } + ) logger.debug( "Changed %s record(s) in reference field '%s' of model '%s'", - len(records), field_name, field.model, + len(records), + field_name, + field.model, ) -def _change_translations_orm(env, model_name, record_ids, target_record_id, - exclude_columns): - if ('ir_translation', 'res_id') in exclude_columns: +def _change_translations_orm( + env, model_name, record_ids, target_record_id, exclude_columns +): + if ("ir_translation", "res_id") in exclude_columns: return - translation_obj = env['ir.translation'] - groups = translation_obj.read_group([ - ('type', '=', 'model'), - ('res_id', 'in', record_ids), - ('name', 'like', '%s,%%' % model_name), - ], ['name', 'lang'], ['name', 'lang'], lazy=False) + translation_obj = env["ir.translation"] + groups = translation_obj.read_group( + [ + ("type", "=", "model"), + ("res_id", "in", record_ids), + ("name", "like", "%s,%%" % model_name), + ], + ["name", "lang"], + ["name", "lang"], + lazy=False, + ) for group in groups: - target_translation = translation_obj.search([ - ('type', '=', 'model'), - ('res_id', '=', target_record_id), - ('name', '=', group['name']), - ('lang', '=', group['lang']), - ]) - records = translation_obj.search(group['__domain']) + target_translation = translation_obj.search( + [ + ("type", "=", "model"), + ("res_id", "=", target_record_id), + ("name", "=", group["name"]), + ("lang", "=", group["lang"]), + ] + ) + records = translation_obj.search(group["__domain"]) if not target_translation and records: # There is no target translation, we pick one for being the new one records[:1].res_id = target_record_id records = records[1:] if records: records.unlink() - logger.debug("Deleted %s extra translations for %s (lang = %s).", - len(records), group['name'], group['lang']) + logger.debug( + "Deleted %s extra translations for %s (lang = %s).", + len(records), + group["name"], + group["lang"], + ) -def _change_translations_sql(env, model_name, record_ids, target_record_id, - exclude_columns): - if ('ir_translation', 'res_id') in exclude_columns: +def _change_translations_sql( + env, model_name, record_ids, target_record_id, exclude_columns +): + if ("ir_translation", "res_id") in exclude_columns: return logged_query( - env.cr, """ + env.cr, + """ UPDATE ir_translation it SET res_id = %(target_record_id)s FROM ( @@ -279,12 +345,15 @@ def _change_translations_sql(env, model_name, record_ids, target_record_id, ) AS to_update WHERE it.id = to_update.id""", { - 'target_record_id': target_record_id, - 'record_ids': tuple(record_ids), - 'model_name': model_name, - }, skip_no_result=True) + "target_record_id": target_record_id, + "record_ids": tuple(record_ids), + "model_name": model_name, + }, + skip_no_result=True, + ) logged_query( - env.cr, """ + env.cr, + """ DELETE FROM ir_translation it USING ( SELECT it.id @@ -294,18 +363,29 @@ def _change_translations_sql(env, model_name, record_ids, target_record_id, ) AS to_delete WHERE it.id = to_delete.id""", { - 'target_record_id': target_record_id, - 'record_ids': record_ids, - 'model_name': model_name, - }) + "target_record_id": target_record_id, + "record_ids": record_ids, + "model_name": model_name, + }, + ) +# flake8: noqa: C901 def apply_operations_by_field_type( - env, model_name, record_ids, target_record_id, field_spec, field_vals, - field_type, column, operation, method): + env, + model_name, + record_ids, + target_record_id, + field_spec, + field_vals, + field_type, + column, + operation, + method, +): vals = {} o2m_changes = 0 - if method == 'orm': + if method == "orm": model = env[model_name] all_records = model.browse((target_record_id,) + tuple(record_ids)) target_record = model.browse(target_record_id) @@ -313,88 +393,91 @@ def apply_operations_by_field_type( field = model._fields[column] else: first_value = field_vals[0] - if field_type in ('char', 'text', 'html'): + if field_type in ("char", "text", "html"): if not operation: - operation = 'other' if field_type == 'char' else 'merge' - if operation == 'first_not_null': + operation = "other" if field_type == "char" else "merge" + if operation == "first_not_null": field_vals = [x for x in field_vals if x] if field_vals: vals[column] = field_vals[0] - elif operation == 'merge': + elif operation == "merge": _list = filter(lambda x: x, field_vals) - vals[column] = ' | '.join(_list) - elif field_type in ('integer', 'float', 'monetary'): - if operation or field_type != 'integer': + vals[column] = " | ".join(_list) + elif field_type in ("integer", "float", "monetary"): + if operation or field_type != "integer": field_vals = [0 if not x else x for x in field_vals] if not operation: - operation = 'other' if field_type == 'integer' else 'sum' - if operation == 'sum': + operation = "other" if field_type == "integer" else "sum" + if operation == "sum": vals[column] = sum(field_vals) - elif operation == 'avg': + elif operation == "avg": vals[column] = sum(field_vals) / len(field_vals) - elif operation == 'max': + elif operation == "max": vals[column] = max(field_vals) - elif operation == 'min': + elif operation == "min": vals[column] = min(field_vals) - elif field_type == 'boolean': + elif field_type == "boolean": if operation: field_vals = [False if x is None else x for x in field_vals] - operation = operation or 'other' - if operation == 'and': + operation = operation or "other" + if operation == "and": vals[column] = functools.reduce(lambda x, y: x & y, field_vals) - elif operation == 'or': + elif operation == "or": vals[column] = functools.reduce(lambda x, y: x | y, field_vals) - elif field_type in ('date', 'datetime'): + elif field_type in ("date", "datetime"): if operation: field_vals = list(filter(lambda x: x, field_vals)) - operation = field_vals and operation or 'other' - if operation == 'max': + operation = field_vals and operation or "other" + if operation == "max": vals[column] = max(field_vals) - elif operation == 'min': + elif operation == "min": vals[column] = min(field_vals) - elif field_type == 'many2many' and method == 'orm': - operation = operation or 'merge' - if operation == 'merge': + elif field_type == "many2many" and method == "orm": + operation = operation or "merge" + if operation == "merge": field_vals = filter(lambda x: x is not False, field_vals) vals[column] = [(4, x.id) for x in field_vals] - elif field_type == 'one2many' and method == 'orm': - operation = operation or 'merge' - if operation == 'merge': + elif field_type == "one2many" and method == "orm": + operation = operation or "merge" + if operation == "merge": o2m_changes += 1 field_vals.write({field.inverse_name: target_record_id}) - elif field_type == 'binary': - operation = operation or 'merge' - if operation == 'merge': + elif field_type == "binary": + operation = operation or "merge" + if operation == "merge": field_vals = [x for x in field_vals if x] if not first_value and field_vals: vals[column] = field_vals[0] - elif field_type in ('many2one', 'reference'): - operation = operation or 'merge' - if operation == 'merge': - if method != 'orm': + elif field_type in ("many2one", "reference"): + operation = operation or "merge" + if operation == "merge": + if method != "orm": field_vals = [x for x in field_vals if x] if not first_value and field_vals: vals[column] = field_vals[0] - elif field_type == 'many2one_reference' and method == 'orm' and\ - field.model_field in model._fields: - operation = operation or 'merge' - if operation == 'merge': + elif ( + field_type == "many2one_reference" + and method == "orm" + and field.model_field in model._fields + ): + operation = operation or "merge" + if operation == "merge": if field.model_field in field_spec: del field_spec[field.model_field] list_model_field = all_records.mapped(field.model_field) - zip_list = [(x, y) for x, y - in zip(field_vals, list_model_field) if x and y] + zip_list = [(x, y) for x, y in zip(field_vals, list_model_field) if x and y] if first_value and zip_list: vals[column] = zip_list[0][0] vals[field.model_field] = zip_list[0][1] - if method == 'orm': + if method == "orm": return vals, o2m_changes else: return vals -def _adjust_merged_values_orm(env, model_name, record_ids, target_record_id, - field_spec): +def _adjust_merged_values_orm( + env, model_name, record_ids, target_record_id, field_spec +): """This method deals with the values on the records to be merged + the target record, performing operations that make sense on the meaning of the model. @@ -462,19 +545,30 @@ def _adjust_merged_values_orm(env, model_name, record_ids, target_record_id, vals = {} o2m_changes = 0 for field in fields: - if field_spec.get('openupgrade_other_fields', '') == 'preserve' \ - and field.name not in field_spec: + if ( + field_spec.get("openupgrade_other_fields", "") == "preserve" + and field.name not in field_spec + ): continue if not field.store or field.compute or field.related: continue # don't do anything on these cases op = field_spec.get(field.name, False) - if field.type != 'reference': + if field.type != "reference": _list = all_records.mapped(field.name) else: _list = [x[field.name] for x in all_records if x[field.name]] field_vals, field_o2m_changes = apply_operations_by_field_type( - env, model_name, record_ids, target_record_id, field_spec, - _list, field.type, field.name, op, 'orm') + env, + model_name, + record_ids, + target_record_id, + field_spec, + _list, + field.type, + field.name, + op, + "orm", + ) vals.update(field_vals) o2m_changes += field_o2m_changes if not vals: @@ -482,7 +576,7 @@ def _adjust_merged_values_orm(env, model_name, record_ids, target_record_id, # Curate values that haven't changed new_vals = {} for f in vals: - if model._fields[f].type != 'many2many': + if model._fields[f].type != "many2many": if vals[f] != getattr(target_record, f): new_vals[f] = vals[f] else: @@ -492,12 +586,15 @@ def _adjust_merged_values_orm(env, model_name, record_ids, target_record_id, target_record.write(new_vals) logger.debug( "Write %s value(s) in target record '%s' of model '%s'", - len(new_vals) + o2m_changes, target_record_id, model_name, + len(new_vals) + o2m_changes, + target_record_id, + model_name, ) -def _adjust_merged_values_sql(env, model_name, record_ids, target_record_id, - model_table, field_spec): +def _adjust_merged_values_sql( + env, model_name, record_ids, target_record_id, model_table, field_spec +): """This method deals with the values on the records to be merged + the target record, performing operations that make sense on the meaning of the model. @@ -510,38 +607,53 @@ def _adjust_merged_values_sql(env, model_name, record_ids, target_record_id, Possible operations by field types same as _adjust_merged_values_orm. """ - if not column_exists(env.cr, model_table, 'id'): + if not column_exists(env.cr, model_table, "id"): # TODO: handle one2many and many2many return - env.cr.execute(""" + env.cr.execute( + """ SELECT isc.column_name, isc.data_type, imf.ttype FROM information_schema.columns isc JOIN ir_model_fields imf ON ( imf.name = isc.column_name AND imf.model = %s) WHERE isc.table_name = %s - """, (model_name, model_table)) + """, + (model_name, model_table), + ) dict_column_type = env.cr.fetchall() - columns = ', '.join([x[0] for x in dict_column_type]) + columns = ", ".join([x[0] for x in dict_column_type]) env.cr.execute( """SELECT {columns} FROM {table} WHERE id IN %(record_ids)s""".format( table=model_table, columns=columns, - ), {'record_ids': (target_record_id,) + tuple(record_ids)} + ), + {"record_ids": (target_record_id,) + tuple(record_ids)}, ) lists = list(zip(*(env.cr.fetchall()))) new_vals = {} vals = {} - for i, (column, column_type, field_type) in enumerate(dict_column_type): - if field_spec.get('openupgrade_other_fields', '') == 'preserve' \ - and column not in field_spec: + for i, (column, _column_type, field_type) in enumerate(dict_column_type): + if ( + field_spec.get("openupgrade_other_fields", "") == "preserve" + and column not in field_spec + ): continue op = field_spec.get(column, False) _list = list(lists[i]) field_vals = apply_operations_by_field_type( - env, model_name, record_ids, target_record_id, field_spec, - _list, field_type, column, op, 'sql') + env, + model_name, + record_ids, + target_record_id, + field_spec, + _list, + field_type, + column, + op, + "sql", + ) vals.update(field_vals) if not vals: return @@ -551,9 +663,9 @@ def _adjust_merged_values_sql(env, model_name, record_ids, target_record_id, FROM {table} WHERE id = %(target_record_id)s """.format( - table=model_table, - columns=", ". join(list(vals.keys())) - ), {'target_record_id': target_record_id} + table=model_table, columns=", ".join(list(vals.keys())) + ), + {"target_record_id": target_record_id}, ) record_vals = env.cr.dictfetchall() for column in vals: @@ -567,18 +679,27 @@ def _adjust_merged_values_sql(env, model_name, record_ids, target_record_id, table=sql.Identifier(model_table), id=sql.Identifier("id"), set_value=sql.SQL( - ", ".join([ - "{{{field}}} = %({field})s".format(field=x) - for x in new_vals.keys() - ]) - ).format(**ident_dict) + ", ".join( + [ + "{{{field}}} = %({field})s".format(field=x) + for x in new_vals.keys() + ] + ) + ).format(**ident_dict), ) new_vals["target_record_id"] = target_record_id logged_query(env.cr, query, new_vals) -def _change_generic(env, model_name, record_ids, target_record_id, - exclude_columns, method='orm', new_model_name=None): +def _change_generic( + env, + model_name, + record_ids, + target_record_id, + exclude_columns, + method="orm", + new_model_name=None, +): """Update known generic style res_id/res_model references. :param env: ORM environment :param model_name: Name of the model that have the generic references. @@ -592,12 +713,12 @@ def _change_generic(env, model_name, record_ids, target_record_id, account.invoice > account.move). """ for model_to_replace, res_id_column, model_column in [ - ('calendar.event', 'res_id', 'res_model'), - ('ir.attachment', 'res_id', 'res_model'), - ('mail.activity', 'res_id', 'res_model'), - ('mail.followers', 'res_id', 'res_model'), - ('mail.message', 'res_id', 'model'), - ('rating.rating', 'res_id', 'res_model'), + ("calendar.event", "res_id", "res_model"), + ("ir.attachment", "res_id", "res_model"), + ("mail.activity", "res_id", "res_model"), + ("mail.followers", "res_id", "res_model"), + ("mail.message", "res_id", "model"), + ("rating.rating", "res_id", "res_model"), ]: try: model = env[model_to_replace].with_context(active_test=False) @@ -605,101 +726,122 @@ def _change_generic(env, model_name, record_ids, target_record_id, continue if (model._table, res_id_column) in exclude_columns: continue - if method == 'orm': - if not model._fields.get(model_column) or \ - not model._fields.get(res_id_column): + if method == "orm": + if not model._fields.get(model_column) or not model._fields.get( + res_id_column + ): continue - records = model.search([ - (model_column, '=', model_name), - (res_id_column, 'in', record_ids)]) + records = model.search( + [(model_column, "=", model_name), (res_id_column, "in", record_ids)] + ) if records: vals = {res_id_column: target_record_id} if new_model_name: vals[model_column] = new_model_name - if model_to_replace != 'mail.followers': + if model_to_replace != "mail.followers": records.write(vals) else: # We need to avoid duplicated results in this model - target_duplicated = model.search([ - (model_column, '=', model_name), - (res_id_column, '=', target_record_id), - ('partner_id', 'in', records.mapped('partner_id').ids), - ]) - dup_partners = target_duplicated.mapped('partner_id') - duplicated = records.filtered(lambda x: ( - x.partner_id in dup_partners)) + target_duplicated = model.search( + [ + (model_column, "=", model_name), + (res_id_column, "=", target_record_id), + ("partner_id", "in", records.mapped("partner_id").ids), + ] + ) + dup_partners = target_duplicated.mapped("partner_id") + duplicated = records.filtered( + lambda x: (x.partner_id in dup_partners) + ) (records - duplicated).write(vals) duplicated.unlink() logger.debug( - "Changed %s record(s) of model '%s'", - len(records), model_to_replace) + "Changed %s record(s) of model '%s'", len(records), model_to_replace + ) else: - if not column_exists(env.cr, model._table, res_id_column) or \ - not column_exists(env.cr, model._table, model_column): + if not column_exists( + env.cr, model._table, res_id_column + ) or not column_exists(env.cr, model._table, model_column): continue format_args = { - 'table': sql.Identifier(model._table), - 'res_id_column': sql.Identifier(res_id_column), - 'model_column': sql.Identifier(model_column), + "table": sql.Identifier(model._table), + "res_id_column": sql.Identifier(res_id_column), + "model_column": sql.Identifier(model_column), } query_args = { - 'model_name': model_name, - 'new_model_name': new_model_name or model_name, - 'target_record_id': target_record_id, - 'record_ids': tuple(record_ids), + "model_name": model_name, + "new_model_name": new_model_name or model_name, + "target_record_id": target_record_id, + "record_ids": tuple(record_ids), } query = sql.SQL( "UPDATE {table} SET {res_id_column} = %(target_record_id)s" ).format(**format_args) if new_model_name: - query += sql.SQL( - ", {model_column} = %(new_model_name)s" - ).format(**format_args) - query += sql.SQL( - " WHERE {model_column} = %(model_name)s " - ).format(**format_args) - if model_to_replace != 'mail.followers': - query += sql.SQL( - "AND {res_id_column} in %(record_ids)s" - ).format(**format_args) + query += sql.SQL(", {model_column} = %(new_model_name)s").format( + **format_args + ) + query += sql.SQL(" WHERE {model_column} = %(model_name)s ").format( + **format_args + ) + if model_to_replace != "mail.followers": + query += sql.SQL("AND {res_id_column} in %(record_ids)s").format( + **format_args + ) logged_query(env.cr, query, query_args, skip_no_result=True) else: for record_id in record_ids: query_args["record_id"] = record_id - query2 = query + sql.SQL( - """AND {res_id_column} = %(record_id)s + query2 = ( + query + + sql.SQL( + """AND {res_id_column} = %(record_id)s AND partner_id NOT IN ( SELECT partner_id FROM {table} WHERE {res_id_column} = %(target_record_id)s AND {model_column} = %(new_model_name)s )""" - ).format(**format_args) + ).format(**format_args) + ) logged_query( - env.cr, query2, query_args, skip_no_result=True, + env.cr, + query2, + query_args, + skip_no_result=True, ) # Remove remaining records non updated (that are duplicates) - logged_query(env.cr, sql.SQL( - "DELETE FROM {table} " - "WHERE {model_column} = %(model_name)s " - "AND {res_id_column} IN %(record_ids)s" - ).format(**format_args), query_args, skip_no_result=True) + logged_query( + env.cr, + sql.SQL( + "DELETE FROM {table} " + "WHERE {model_column} = %(model_name)s " + "AND {res_id_column} IN %(record_ids)s" + ).format(**format_args), + query_args, + skip_no_result=True, + ) -def _delete_records_sql(env, model_name, record_ids, target_record_id, - model_table=None): +def _delete_records_sql( + env, model_name, record_ids, target_record_id, model_table=None +): if not model_table: model_table = env[model_name]._table logged_query( - env.cr, "DELETE FROM ir_model_data WHERE model = %s AND res_id IN %s", + env.cr, + "DELETE FROM ir_model_data WHERE model = %s AND res_id IN %s", (model_name, tuple(record_ids)), ) logged_query( - env.cr, "DELETE FROM ir_attachment " - "WHERE res_model = %s AND res_id IN %s", + env.cr, + "DELETE FROM ir_attachment " "WHERE res_model = %s AND res_id IN %s", (model_name, tuple(record_ids)), ) - logged_query(env.cr, sql.SQL("DELETE FROM {} WHERE id IN %s").format( - sql.Identifier(model_table)), (tuple(record_ids), )) + logged_query( + env.cr, + sql.SQL("DELETE FROM {} WHERE id IN %s").format(sql.Identifier(model_table)), + (tuple(record_ids),), + ) def _delete_records_orm(env, model_name, record_ids, target_record_id): @@ -708,15 +850,16 @@ def _delete_records_orm(env, model_name, record_ids, target_record_id): records.unlink() logger.debug( "Deleted %s source record(s) of model '%s'", - len(record_ids), model_name, + len(record_ids), + model_name, ) -def _check_recurrence(env, model_name, record_ids, target_record_id, - model_table=None): +def _check_recurrence(env, model_name, record_ids, target_record_id, model_table=None): if not model_table: model_table = env[model_name]._table - env.cr.execute(""" + env.cr.execute( + """ SELECT tc.table_name, kcu.column_name, COALESCE(imf.column1, 'id') FROM information_schema.table_constraints AS tc JOIN information_schema.key_column_usage AS kcu @@ -733,18 +876,24 @@ def _check_recurrence(env, model_name, record_ids, target_record_id, tc.table_name = imf.relation_table)) WHERE tc.constraint_type = 'FOREIGN KEY' AND ccu.table_name = %s and ccu.column_name = 'id' - """, (model_name, model_table)) + """, + (model_name, model_table), + ) for table, column, origin in env.cr.fetchall(): query = sql.SQL( """SELECT {column} FROM {table} WHERE {origin} = %(target_record_id)s""" ).format( - table=sql.Identifier(table), column=sql.Identifier(column), + table=sql.Identifier(table), + column=sql.Identifier(column), origin=sql.Identifier(origin), ) - env.cr.execute(query, { - 'target_record_id': target_record_id, - }) + env.cr.execute( + query, + { + "target_record_id": target_record_id, + }, + ) new_parent_row = env.cr.fetchall() if new_parent_row and new_parent_row[0] in record_ids: # When we already have recursive hierarchy, doing a @@ -754,14 +903,27 @@ def _check_recurrence(env, model_name, record_ids, target_record_id, logger.info( "Couldn't merge %s record(s) of model %s to record_id %s" " to avoid recursion with field %s of table %s", - len(record_ids), model_name, target_record_id, origin, table) + len(record_ids), + model_name, + target_record_id, + origin, + table, + ) return True return False -def merge_records(env, model_name, record_ids, target_record_id, - field_spec=None, method='orm', delete=True, - exclude_columns=None, model_table=None): +def merge_records( + env, + model_name, + record_ids, + target_record_id, + field_spec=None, + method="orm", + delete=True, + exclude_columns=None, + model_table=None, +): """Merge several records into the target one. NOTE: This should be executed in end migration scripts for assuring that @@ -786,17 +948,18 @@ def merge_records(env, model_name, record_ids, target_record_id, """ if exclude_columns is None: exclude_columns = [] - if field_spec is None and method == 'orm': + if field_spec is None and method == "orm": field_spec = {} if isinstance(record_ids, list): record_ids = tuple(record_ids) args0 = (env, model_name, record_ids, target_record_id) - args = args0 + (exclude_columns, ) + args = args0 + (exclude_columns,) if target_record_id in record_ids: - raise Exception("You can't put the target record in the list or " - "records to be merged.") - _change_generic(*args, method=method) - if method == 'orm': + raise Exception( + "You can't put the target record in the list or " "records to be merged." + ) + _change_generic(*args, method=method) # pylint: disable=E1124 + if method == "orm": # Check which records to be merged exist record_ids = env[model_name].browse(record_ids).exists().ids if not record_ids: @@ -807,7 +970,7 @@ def merge_records(env, model_name, record_ids, target_record_id, _change_many2many_refs_orm(*args) _change_reference_refs_orm(*args) _change_translations_orm(*args) - args2 = args0 + (field_spec, ) + args2 = args0 + (field_spec,) # TODO: serialized fields with env.norecompute(): _adjust_merged_values_orm(*args2) @@ -818,16 +981,20 @@ def merge_records(env, model_name, record_ids, target_record_id, # Check which records to be merged exist if not model_table: model_table = env[model_name]._table - env.cr.execute(sql.SQL("SELECT id FROM {} WHERE id IN %s").format( - sql.Identifier(model_table)), (tuple(record_ids), )) + env.cr.execute( + sql.SQL("SELECT id FROM {} WHERE id IN %s").format( + sql.Identifier(model_table) + ), + (tuple(record_ids),), + ) record_ids = [x[0] for x in env.cr.fetchall()] if not record_ids: return if _check_recurrence( - env, model_name, record_ids, target_record_id, - model_table=model_table): + env, model_name, record_ids, target_record_id, model_table=model_table + ): return - args3 = args + (model_table, ) + args3 = args + (model_table,) _change_foreign_key_refs(*args3) _change_reference_refs_sql(*args) _change_translations_sql(*args) @@ -836,5 +1003,5 @@ def merge_records(env, model_name, record_ids, target_record_id, _adjust_merged_values_sql(*args4) if delete: _delete_records_sql( - env, model_name, record_ids, target_record_id, - model_table=model_table) + env, model_name, record_ids, target_record_id, model_table=model_table + ) diff --git a/openupgradelib/openupgrade_tools.py b/openupgradelib/openupgrade_tools.py index b5dbfae5..99c9e448 100644 --- a/openupgradelib/openupgrade_tools.py +++ b/openupgradelib/openupgrade_tools.py @@ -1,4 +1,4 @@ -# -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- # pylint: disable=C8202 ############################################################################## # # OpenERP, Open Source Management Solution @@ -28,19 +28,20 @@ def table_exists(cr, table): - """ Check whether a certain table or view exists """ - cr.execute('SELECT 1 FROM pg_class WHERE relname = %s', (table,)) + """Check whether a certain table or view exists""" + cr.execute("SELECT 1 FROM pg_class WHERE relname = %s", (table,)) return cr.fetchone() def column_exists(cr, table, column): - """ Check whether a certain column exists """ + """Check whether a certain column exists""" cr.execute( - 'SELECT count(attname) FROM pg_attribute ' - 'WHERE attrelid = ' - '( SELECT oid FROM pg_class WHERE relname = %s ) ' - 'AND attname = %s', - (table, column)) + "SELECT count(attname) FROM pg_attribute " + "WHERE attrelid = " + "( SELECT oid FROM pg_class WHERE relname = %s ) " + "AND attname = %s", + (table, column), + ) return cr.fetchone()[0] == 1 @@ -93,15 +94,17 @@ def convert_html_fragment(html_string, replacements, pretty_print=True): return tostring(fragment, pretty_print=pretty_print, encoding="unicode") -def convert_xml_node(node, - attr_add=None, - attr_rm=frozenset(), - class_add="", - class_rm="", - style_add=None, - style_rm=frozenset(), - tag="", - wrap=""): +def convert_xml_node( + node, + attr_add=None, + attr_rm=frozenset(), + class_add="", + class_rm="", + style_add=None, + style_rm=frozenset(), + tag="", + wrap="", +): """Apply conversions to an XML node. All parameters except :param:`node` can be a callable that return the @@ -163,8 +166,10 @@ def convert_xml_node(node, # Obtain attributes, classes and styles classes = set(node.attrib.get("class", "").split()) styles = node.attrib.get("style", "").split(";") - styles = {key.strip(): val.strip() for key, val in - (style.split(":", 1) for style in styles if ":" in style)} + styles = { + key.strip(): val.strip() + for key, val in (style.split(":", 1) for style in styles if ":" in style) + } # Convert incoming callable arguments into values originals = { "attrs": dict(node.attrib.items()), @@ -217,8 +222,7 @@ def convert_xml_node(node, wrapper.append(node) -def convert_html_replacement_class_shortcut(class_rm="", class_add="", - **kwargs): +def convert_html_replacement_class_shortcut(class_rm="", class_add="", **kwargs): """Shortcut to create a class replacement spec. :param str class_rm: @@ -236,8 +240,10 @@ def convert_html_replacement_class_shortcut(class_rm="", class_add="", """ kwargs.setdefault("selector", ".%s" % ".".join(class_rm.split())) assert kwargs["selector"] != "." - kwargs.update({ - "class_rm": class_rm, - "class_add": class_add, - }) + kwargs.update( + { + "class_rm": class_rm, + "class_add": class_add, + } + ) return kwargs diff --git a/setup.py b/setup.py index fd52e32f..15ca75b8 100755 --- a/setup.py +++ b/setup.py @@ -1,36 +1,35 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- import os -import openupgradelib from setuptools import setup +import openupgradelib -with open('README.rst') as readme_file: +with open("README.rst") as readme_file: readme = readme_file.read() -with open('HISTORY.rst') as history_file: - history = history_file.read().replace('.. :changelog:', '') +with open("HISTORY.rst") as history_file: + history = history_file.read().replace(".. :changelog:", "") dirname = os.path.dirname(__file__) test_requirements = [ - 'coverage', - 'flake8', - 'pep8-naming', - 'mock', + "coverage", + "flake8", + "pep8-naming", + "mock", ] setup( - name='openupgradelib', + name="openupgradelib", use_scm_version=True, description=openupgradelib.__doc__, - long_description=readme + '\n\n' + history, + long_description=readme + "\n\n" + history, author=openupgradelib.__author__, author_email=openupgradelib.__email__, - url='https://github.com/OCA/openupgradelib', - packages=['openupgradelib'], + url="https://github.com/OCA/openupgradelib", + packages=["openupgradelib"], include_package_data=True, setup_requires=["setuptools_scm"], install_requires=[ @@ -41,21 +40,21 @@ python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*", license=openupgradelib.__license__, zip_safe=False, - keywords='openupgradelib', + keywords="openupgradelib", classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU Affero General Public License v3', - 'Natural Language :: English', + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Natural Language :: English", "Programming Language :: Python :: 2", - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", ], - test_suite='tests', - tests_require=test_requirements + test_suite="tests", + tests_require=test_requirements, ) diff --git a/tests/__init__.py b/tests/__init__.py old mode 100755 new mode 100644 index 2d1d9eb6..432f7815 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- from . import test_openupgradelib # noqa: F401 diff --git a/tests/addons/openupgradelib_tests/README.rst b/tests/addons/openupgradelib_tests/README.rst new file mode 100644 index 00000000..db6eb769 --- /dev/null +++ b/tests/addons/openupgradelib_tests/README.rst @@ -0,0 +1 @@ +Openupgradelib Test Addon diff --git a/tests/addons/openupgradelib_tests/__manifest__.py b/tests/addons/openupgradelib_tests/__manifest__.py index 99425bfb..1a056f00 100644 --- a/tests/addons/openupgradelib_tests/__manifest__.py +++ b/tests/addons/openupgradelib_tests/__manifest__.py @@ -1,11 +1,11 @@ -# coding: utf-8 # Copyright 2018 Opener B.V. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { - 'name': 'Openupgradelib Test Addon', - 'version': '1.0.0', # No version prefix on purpose - 'category': 'Migration', - 'author': 'Opener B.V., OCA (Odoo Community Association)', - 'website': 'https://github.com/oca/openupgradelib', - 'installable': True, + "name": "Openupgradelib Test Addon", + "version": "1.0.0", # No version prefix on purpose + "category": "Migration", + "author": "Opener B.V., Odoo Community Association (OCA)", + "license": "AGPL-3", + "website": "https://github.com/oca/openupgradelib", + "installable": True, } diff --git a/tests/addons/openupgradelib_tests/__openerp__.py b/tests/addons/openupgradelib_tests/__openerp__.py index 99425bfb..1a056f00 100644 --- a/tests/addons/openupgradelib_tests/__openerp__.py +++ b/tests/addons/openupgradelib_tests/__openerp__.py @@ -1,11 +1,11 @@ -# coding: utf-8 # Copyright 2018 Opener B.V. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). { - 'name': 'Openupgradelib Test Addon', - 'version': '1.0.0', # No version prefix on purpose - 'category': 'Migration', - 'author': 'Opener B.V., OCA (Odoo Community Association)', - 'website': 'https://github.com/oca/openupgradelib', - 'installable': True, + "name": "Openupgradelib Test Addon", + "version": "1.0.0", # No version prefix on purpose + "category": "Migration", + "author": "Opener B.V., Odoo Community Association (OCA)", + "license": "AGPL-3", + "website": "https://github.com/oca/openupgradelib", + "installable": True, } diff --git a/tests/addons/openupgradelib_tests/migrations/10.0.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/10.0.1.0.0/post-migrate.py index 41bf16fe..b5d758d0 100644 --- a/tests/addons/openupgradelib_tests/migrations/10.0.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/10.0.1.0.0/post-migrate.py @@ -1,11 +1,11 @@ -# coding: utf-8 from openupgradelib import openupgrade @openupgrade.migrate() def migrate(env, version): openupgrade.set_defaults( - env.cr, env, {'res.partner': [('active', None)]}, force=True) + env.cr, env, {"res.partner": [("active", None)]}, force=True + ) openupgrade.set_defaults( - env.cr, env, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + env.cr, env, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/addons/openupgradelib_tests/migrations/11.0.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/11.0.1.0.0/post-migrate.py index a6657379..b5d758d0 100644 --- a/tests/addons/openupgradelib_tests/migrations/11.0.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/11.0.1.0.0/post-migrate.py @@ -4,7 +4,8 @@ @openupgrade.migrate() def migrate(env, version): openupgrade.set_defaults( - env.cr, env, {'res.partner': [('active', None)]}, force=True) + env.cr, env, {"res.partner": [("active", None)]}, force=True + ) openupgrade.set_defaults( - env.cr, env, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + env.cr, env, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/addons/openupgradelib_tests/migrations/6.1.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/6.1.1.0.0/post-migrate.py index 0d87d06c..809359a3 100644 --- a/tests/addons/openupgradelib_tests/migrations/6.1.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/6.1.1.0.0/post-migrate.py @@ -1,5 +1,5 @@ -# coding: utf-8 from openerp import pooler + from openupgradelib import openupgrade @@ -8,9 +8,8 @@ def migrate(cr, version): pool = pooler.get_pool(cr.dbname) # Set the default for res.partner 'active' column to True instead of 1 # which breaks the SQL method of set_defaults - pool['res.partner']._defaults['active'] = lambda *args: True - openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True) + pool["res.partner"]._defaults["active"] = lambda *args: True + openupgrade.set_defaults(cr, pool, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, pool, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/addons/openupgradelib_tests/migrations/7.0.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/7.0.1.0.0/post-migrate.py index 6e49d89c..2fb0b530 100644 --- a/tests/addons/openupgradelib_tests/migrations/7.0.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/7.0.1.0.0/post-migrate.py @@ -1,13 +1,12 @@ -# coding: utf-8 from openerp import pooler + from openupgradelib import openupgrade @openupgrade.migrate() def migrate(cr, version): pool = pooler.get_pool(cr.dbname) + openupgrade.set_defaults(cr, pool, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True) - openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, pool, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/addons/openupgradelib_tests/migrations/8.0.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/8.0.1.0.0/post-migrate.py index 56f36144..cba0dc20 100644 --- a/tests/addons/openupgradelib_tests/migrations/8.0.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/8.0.1.0.0/post-migrate.py @@ -1,19 +1,17 @@ -# coding: utf-8 -from openerp import api, pooler, SUPERUSER_ID +from openerp import SUPERUSER_ID, api, pooler + from openupgradelib import openupgrade @openupgrade.migrate() def migrate(cr, version): pool = pooler.get_pool(cr.dbname) + openupgrade.set_defaults(cr, pool, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True) - openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, pool, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) env = api.Environment(cr, SUPERUSER_ID, {}) + openupgrade.set_defaults(cr, env, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, env, {'res.partner': [('active', None)]}, force=True) - openupgrade.set_defaults( - cr, env, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, env, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/addons/openupgradelib_tests/migrations/9.0.1.0.0/post-migrate.py b/tests/addons/openupgradelib_tests/migrations/9.0.1.0.0/post-migrate.py index 56f36144..cba0dc20 100644 --- a/tests/addons/openupgradelib_tests/migrations/9.0.1.0.0/post-migrate.py +++ b/tests/addons/openupgradelib_tests/migrations/9.0.1.0.0/post-migrate.py @@ -1,19 +1,17 @@ -# coding: utf-8 -from openerp import api, pooler, SUPERUSER_ID +from openerp import SUPERUSER_ID, api, pooler + from openupgradelib import openupgrade @openupgrade.migrate() def migrate(cr, version): pool = pooler.get_pool(cr.dbname) + openupgrade.set_defaults(cr, pool, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True) - openupgrade.set_defaults( - cr, pool, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, pool, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) env = api.Environment(cr, SUPERUSER_ID, {}) + openupgrade.set_defaults(cr, env, {"res.partner": [("active", None)]}, force=True) openupgrade.set_defaults( - cr, env, {'res.partner': [('active', None)]}, force=True) - openupgrade.set_defaults( - cr, env, {'res.partner': [('active', None)]}, force=True, - use_orm=True) + cr, env, {"res.partner": [("active", None)]}, force=True, use_orm=True + ) diff --git a/tests/test_openupgradelib.py b/tests/test_openupgradelib.py index fb8eb8de..fab275b3 100755 --- a/tests/test_openupgradelib.py +++ b/tests/test_openupgradelib.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- """ test_openupgradelib @@ -9,6 +8,7 @@ """ import sys import unittest + import mock # Store original __import__ @@ -16,21 +16,22 @@ # This will be the openerp module openerp_mock = mock.Mock() openerp_mock.release = mock.Mock() -openerp_mock.release.version_info = (8, 0, 0, 'final', 0) +openerp_mock.release.version_info = (8, 0, 0, "final", 0) def import_mock(name, *args): - names = name.split('.') - if names[0] in ['openerp', 'psycopg2']: + names = name.split(".") + if names[0] in ["openerp", "psycopg2"]: return openerp_mock return orig_import(name, *args) if sys.version_info[0] == 3: import builtins # noqa: F401 - import_str = 'builtins.__import__' + + import_str = "builtins.__import__" else: - import_str = '__builtin__.__import__' + import_str = "__builtin__.__import__" def mock_contextmanager(): @@ -41,14 +42,15 @@ def mock_contextmanager(): with mock.patch(import_str, side_effect=import_mock): - from openupgradelib import openupgrade from openerp import api + + from openupgradelib import openupgrade + api.Environment.manage = mock_contextmanager class TestOpenupgradelib(unittest.TestCase): - - def setUp(self): + def setUp(self): # pylint: disable=W8106 self.cr = mock.Mock() self.cr.savepoint = mock_contextmanager @@ -61,12 +63,12 @@ def migrate_with_cr(cr, version): def migrate_with_env(env, version): self.assertTrue(isinstance(env.cr, mock.Mock)) - migrate_with_cr(self.cr, 'irrelevant.version') - migrate_with_env(self.cr, 'irrelevant.version') + migrate_with_cr(self.cr, "irrelevant.version") + migrate_with_env(self.cr, "irrelevant.version") - def tearDown(self): + def tearDown(self): # pylint: disable=W8106 pass -if __name__ == '__main__': +if __name__ == "__main__": unittest.main()