From 869ecef0c6f489b850de14dc5fa0791ccc4d4ef6 Mon Sep 17 00:00:00 2001 From: Vermeer GRANGE Date: Fri, 13 Mar 2020 12:37:40 +0200 Subject: [PATCH 1/2] Setup new code quality tools and drop pylint Welcome black, isort and flake8. Drop pylint which is slow as hell. A new git-precommit hook will run black and isort, however you have to run flake8 manually, as it shows many unsolved issues. --- .pylintrc | 75 ------------------------------------------------ Makefile | 23 +++++++++------ README.md | 22 +++++--------- requirements.in | 7 ++++- requirements.txt | 15 ++++++++-- setup.cfg | 21 ++++++++++++++ 6 files changed, 61 insertions(+), 102 deletions(-) delete mode 100644 .pylintrc create mode 100644 setup.cfg diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index d965f948e..000000000 --- a/.pylintrc +++ /dev/null @@ -1,75 +0,0 @@ -[MASTER] - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=migrations,south_migrations - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns=.*_pb2.py - - -[MESSAGES CONTROL] - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable= - attribute-defined-outside-init, - bad-continuation, - cyclic-import, - duplicate-code, - fixme, - file-ignored, - invalid-name, - locally-enabled, - locally-disabled, - missing-docstring, - no-init, - no-member, - no-self-use, - old-style-class, - protected-access, - redefined-variable-type, - superfluous-parens, - too-few-public-methods, - too-many-ancestors, - too-many-arguments, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-public-methods, - too-many-statements, - unused-argument - - -[REPORTS] - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -msg-template="{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}" - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=120 - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=8 - -# Maximum number of parents for a class (see R0901). -max-parents=10 - -# Maximum number of attributes for a class (see R0902). -max-attributes=10 diff --git a/Makefile b/Makefile index ffe9bfa83..80b2876c9 100644 --- a/Makefile +++ b/Makefile @@ -83,14 +83,21 @@ clean-services: stop-services ## Delete containers and attached volumes # Code quality # ------------ -# Run pylint on the whole project. -pylint-all: - pylint --rcfile=.pylintrc --reports=no --output-format=colorized $(PACKAGE_DIR) || true - -# Run pylint on a specific file, e.g.: -# make pylint FILE=labonneboite/web/app.py -pylint: - pylint --rcfile=.pylintrc --reports=no --output-format=colorized $(FILE) || true +quality: black isort flake8 + +black: + black --line-length 119 . + +isort: + isort --recursive . + +flake8: + flake8 . + +setup_git_pre_commit_hook: + touch .git/hooks/pre-commit + chmod +x .git/hooks/pre-commit + echo "make black && make isort" > .git/hooks/pre-commit # Local dev diff --git a/README.md b/README.md index 13b7808f2..199aed0a5 100644 --- a/README.md +++ b/README.md @@ -60,7 +60,7 @@ Table of contents: - [Elastic Search](#elasticsearch) - [DB content in the development environment](#db-content-in-the-development-environment) - [Running scripts](#running-scripts) -- [Running Pylint](#running-pylint) +- [Code quality](#code-quality) - [Debugging safely in development, staging or production](#debugging-safely-in-a-development-staging-or-production-environment) - [Importer](#importer) - [Single ROME versus multi-ROME search](#single-rome-vs-multi-rome-search) @@ -234,25 +234,17 @@ For example `create_index`: $ python labonneboite/scripts/create_index.py -## Running pylint +## Code quality -You can run [pylint](https://www.pylint.org) on the whole project: +We use three code quality tools: black, isort and flake8. - $ make pylint-all +Run `make quality` for run all three of them. Or `make black`, `make isort` and `make flake8` separately. -Or on a specific python file: +Black and isort will automatically apply changes for you, however flake8 requires you to manually fix every issue. - $ make pylint FILE=labonneboite/web/app.py - -We recommend you use a pylint git pre-commit hook: - - $ pip install git-pylint-commit-hook - $ vim .git/hooks/pre-commit - #!/bin/bash - # (...) previous content which was already present (e.g. nosetests) - # add the following line at the end of your pre-commit hook file - git-pylint-commit-hook +We recommend you run `make setup_git_pre_commit_hook` to setup a git pre-commit hook which will run both black and isort automatically before every commit. +You will still have to run flake 8 manually though. ## Debugging safely in a development, staging or production environment diff --git a/requirements.in b/requirements.in index 7ed318318..57828b272 100644 --- a/requirements.in +++ b/requirements.in @@ -26,9 +26,14 @@ huey Flask-DebugToolbar ipdb ipython -pylint pip-tools +# Code quality. +pylint +flake8 +black==19.3b0 +isort + # profiling tools used in create_index.py pycallgraph pyprof2calltree diff --git a/requirements.txt b/requirements.txt index 9521a4419..7243eb400 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,19 +5,25 @@ # pip-compile --output-file requirements.txt requirements.in # alembic==0.9.10 +appdirs==1.4.3 # via black +appnope==0.1.0 # via ipython astroid==1.6.5 # via pylint +attrs==19.3.0 # via black babel==2.6.0 # via flask-babelex backcall==0.1.0 # via ipython +black==19.3b0 blinker==1.4 # via flask-debugtoolbar, raven certifi==2017.4.17 # via requests chardet==3.0.4 # via requests -click==6.7 # via flask, pip-tools +click==6.7 # via black, flask, pip-tools cssmin==0.2.0 decorator==4.3.0 # via ipython, traitlets, validators defusedxml==0.5.0 # via python3-openid, social-auth-core easyprocess==0.2.3 # via pyvirtualdisplay elasticsearch==1.9.0 +entrypoints==0.3 # via flake8 first==2.0.1 # via pip-tools +flake8==3.7.8 flask-admin==1.5.3 flask-assets==0.12 flask-babelex==0.9.3 @@ -40,7 +46,7 @@ idna==2.5 # via requests ipdb==0.11 ipython-genutils==0.2.0 # via traitlets ipython==6.4.0 -isort==4.2.15 # via pylint +isort==4.3.21 itsdangerous==0.24 # via flask, flask-debugtoolbar jedi==0.12.0 # via ipython jinja2==2.10.1 # via flask, flask-babelex @@ -51,7 +57,7 @@ locustio==0.7.5 mailjet-rest==1.3.3 mako==1.0.7 # via alembic markupsafe==1.1.1 -mccabe==0.6.1 # via pylint +mccabe==0.6.1 # via flake8, pylint msgpack-python==0.5.6 # via locustio mysqlclient==1.4.2.post1 nose==1.3.7 @@ -67,7 +73,9 @@ pip-tools==2.0.2 prompt-toolkit==1.0.15 # via ipython ptyprocess==0.6.0 # via pexpect pycallgraph==1.0.1 +pycodestyle==2.5.0 # via flake8 pycryptodomex==3.6.3 # via pyjwkest +pyflakes==2.1.1 # via flake8 pygments==2.2.0 # via ipython pyjwkest==1.4.0 # via social-auth-core pyjwt==1.5.2 # via social-auth-core @@ -99,6 +107,7 @@ social-auth-storage-sqlalchemy==1.1.0 # via social-auth-app-flask-sqlalchemy speaklater==1.3 sqlalchemy-utils==0.32.13 sqlalchemy==1.3.3 +toml==0.10.0 # via black traitlets==4.3.2 # via ipython unidecode==0.4.21 # via python-slugify urllib3==1.24.3 # via elasticsearch, requests, selenium diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 000000000..7386093f9 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,21 @@ +# - https://timothycrosley.github.io/isort/ +# - https://github.com/timothycrosley/isort#configuring-isort +# - https://github.com/timothycrosley/isort/wiki/isort-Settings + +[isort] +combine_as_imports = True +ensure_newline_before_comments = True +force_grid_wrap = 0 +include_trailing_comma = True +known_first_party = labonneboite +lines_after_imports = 2 +line_length = 119 +multi_line_output = 3 +use_parentheses = True + +# - https://www.flake8rules.com +# - http://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html#changing-the-ignore-list + +[flake8] +ignore = E203, W503 +max-line-length = 119 From d4e3d451545ad2b7e4ce6a34ce76e13aa881e826 Mon Sep 17 00:00:00 2001 From: Vermeer GRANGE Date: Fri, 13 Mar 2020 12:58:36 +0200 Subject: [PATCH 2/2] First `make quality` run --- ROME_NAF/analyses/print_array.py | 22 +- ROME_NAF/referentiels/referentiels.py | 9 +- labonneboite/alembic/env.py | 6 +- ...b893fc_add_flag_pmsmp_and_flag_afpr_poe.py | 66 +- ..._update_index_for_office_offer_matching.py | 43 +- ...locations_and_etablissements_backoffice.py | 101 +- .../090e86dfc304_add_reason_column.py | 11 +- .../0da6b26c212f_create_contact_form_table.py | 72 +- ...e448_modify_etablissements_admin_update.py | 33 +- ...7f39f7fc_create_email_alternance_column.py | 46 +- .../200d176f96b6_harmonize_indexes.py | 39 +- .../240900fabe59_create_importer_tables.py | 194 +- .../2426e6b89deb_switch_to_utf8mb4.py | 83 +- ...1700f_add_social_network_link_in_office.py | 32 +- ...3dc5_add_alternate_index_for_offers_on_.py | 13 +- ...bae_drop_irrelevant_etablissements_raw_.py | 31 +- ...80a1c96e0_add_index_on_dpae_departement.py | 9 +- ...reate_remove_flag_alternance_in_office_.py | 14 +- ..._update_etablissements_backoffice_table.py | 103 +- ...a8_add_boost_global_or_by_for_rome_for_.py | 39 +- ...52f_create_phone_alternance_and_website.py | 48 +- .../versions/428e168fdf0c_update_users.py | 20 +- ...7a2a34d87e5_drop_column_hirings_zipcode.py | 13 +- .../4db5a80597e7_add_extra_naf_column_in_.py | 13 +- ...43d4cf1847_rename_dpae_table_to_hirings.py | 8 +- ..._add_index_for_offers_on_etablissements.py | 14 +- ...ename_remove_flag_alternance_et_create_.py | 29 +- ...6b5_drop_indexes_used_for_api_offres_v1.py | 26 +- ...5533cc7_add_index_on_hirings_siret_for_.py | 8 +- ...dd_index_on_departement_etablissements_.py | 13 +- ...d10e069fea_create_user_favorite_offices.py | 36 +- .../963bf41f07fa_add_office_admin_tables.py | 144 +- ...51564_add_index_on_etablissements_email.py | 12 +- .../a3c7706b467b_bugfix_save_tables.py | 39 +- ...063_merge_website1_and_website2_columns.py | 39 +- .../ba4492177099_switch_to_utf8mb4_bis.py | 13 +- ...bde9330b83fa_add_romes_to_remove_column.py | 16 +- ...4_add_field_new_name_to_etablissements_.py | 20 +- .../c519ecaf1fa6_deduplicate_users.py | 25 +- ...7cb31_increase_rome_lists_fields_length.py | 48 +- ...6b91ef_add_index_on_etablissements_raw_.py | 10 +- ...initial_migration_create_etablissements.py | 61 +- .../d441dca1e974_add_contact_mode_column.py | 31 +- .../d80c910949e4_add_column_dpae_duree_pec.py | 13 +- ...db2fdfb935ec_add_score_alternance_field.py | 21 +- ...convert_officeadminupdate_siret_in_text.py | 7 +- ...4e_add_certified_and_unique_id_in_save_.py | 48 +- .../e4bce598b236_user_and_social_auth.py | 144 +- ...c094998_create_etablissements_admin_add.py | 81 +- ...etablissements_admin_extra_geolocations.py | 48 +- labonneboite/common/activity.py | 34 +- labonneboite/common/autocomplete.py | 128 +- labonneboite/common/chunks.py | 3 +- labonneboite/common/contact_mode.py | 15 +- .../extract_ogr_rome_data.py | 24 +- labonneboite/common/database.py | 33 +- labonneboite/common/departements.py | 6 +- labonneboite/common/doorbell.py | 8 +- labonneboite/common/encoding.py | 12 +- labonneboite/common/env.py | 11 +- labonneboite/common/es.py | 227 +- labonneboite/common/esd.py | 85 +- labonneboite/common/fetcher.py | 4 - labonneboite/common/geocoding/__init__.py | 119 +- labonneboite/common/geocoding/datagouv.py | 28 +- labonneboite/common/hiring_type_util.py | 9 +- labonneboite/common/hotjar.py | 1 + labonneboite/common/load_data.py | 31 +- labonneboite/common/locations.py | 23 +- labonneboite/common/mailjet.py | 90 +- labonneboite/common/mapping.py | 47 +- labonneboite/common/maps/cache.py | 19 +- labonneboite/common/maps/constants.py | 9 +- labonneboite/common/maps/exceptions.py | 4 +- labonneboite/common/maps/precompute.py | 10 +- labonneboite/common/maps/travel.py | 16 +- labonneboite/common/maps/vendors/__init__.py | 14 +- labonneboite/common/maps/vendors/dummy.py | 2 +- labonneboite/common/maps/vendors/ign.py | 60 +- labonneboite/common/maps/vendors/mocks/ign.py | 23 +- .../common/maps/vendors/mocks/navitia.py | 31 +- .../common/maps/vendors/mocks/utils.py | 5 +- labonneboite/common/maps/vendors/navitia.py | 52 +- labonneboite/common/models/__init__.py | 11 +- labonneboite/common/models/auth.py | 34 +- labonneboite/common/models/base.py | 5 +- labonneboite/common/models/office.py | 150 +- labonneboite/common/models/office_admin.py | 159 +- labonneboite/common/models/office_mixin.py | 40 +- .../common/models/recruiter_message.py | 36 +- .../common/models/user_favorite_offices.py | 41 +- labonneboite/common/offers.py | 92 +- labonneboite/common/pagination.py | 25 +- labonneboite/common/pdf.py | 29 +- labonneboite/common/pro.py | 24 +- labonneboite/common/rome_mobilities.py | 3486 +++-------------- labonneboite/common/scoring.py | 58 +- labonneboite/common/search.py | 303 +- labonneboite/common/siret.py | 1 - labonneboite/common/sortedcollection.py | 117 +- labonneboite/common/sorting.py | 15 +- labonneboite/common/user_util.py | 6 +- labonneboite/common/util.py | 56 +- labonneboite/conf/__init__.py | 9 +- .../conf/common/overrides/bonaparte.py | 7 +- .../conf/common/overrides/development.py | 15 +- labonneboite/conf/common/overrides/test.py | 41 +- labonneboite/conf/common/settings_common.py | 148 +- labonneboite/importer/compute_score.py | 199 +- labonneboite/importer/conf/bonaparte.py | 3 +- labonneboite/importer/conf/development.py | 2 +- labonneboite/importer/conf/test.py | 3 +- labonneboite/importer/debug.py | 101 +- labonneboite/importer/jobs/base.py | 18 +- labonneboite/importer/jobs/check_dpae.py | 8 +- .../importer/jobs/check_etablissements.py | 5 +- labonneboite/importer/jobs/common.py | 5 +- labonneboite/importer/jobs/compute_scores.py | 14 +- labonneboite/importer/jobs/extract_dpae.py | 54 +- .../importer/jobs/extract_etablissements.py | 163 +- labonneboite/importer/jobs/geocode.py | 176 +- labonneboite/importer/jobs/populate_flags.py | 37 +- labonneboite/importer/models/computing.py | 49 +- labonneboite/importer/sanity.py | 28 +- .../importer/scripts/clean_csv_city_codes.py | 58 +- labonneboite/importer/settings.py | 20 +- labonneboite/importer/util.py | 135 +- labonneboite/scripts/create_index.py | 389 +- labonneboite/scripts/create_sitemap.py | 21 +- labonneboite/scripts/loadtesting.py | 31 +- labonneboite/scripts/prepare_mailing_data.py | 114 +- .../rebuild_simplified_rome_naf_mapping.py | 108 +- .../scripts/update_metiers_tension.py | 61 +- labonneboite/tests/__init__.py | 3 +- labonneboite/tests/app/maps/test_cache.py | 16 +- labonneboite/tests/app/maps/test_ign.py | 95 +- labonneboite/tests/app/maps/test_navitia.py | 52 +- labonneboite/tests/app/maps/test_travel.py | 46 +- labonneboite/tests/app/test_activity.py | 15 +- labonneboite/tests/app/test_api_scripts.py | 60 +- labonneboite/tests/app/test_contact_mode.py | 26 +- labonneboite/tests/app/test_datagouv.py | 11 +- labonneboite/tests/app/test_geocoding.py | 118 +- labonneboite/tests/app/test_locations.py | 17 +- labonneboite/tests/app/test_models.py | 49 +- labonneboite/tests/app/test_scoring.py | 1 - labonneboite/tests/app/test_search.py | 19 +- labonneboite/tests/app/test_slug.py | 108 +- .../tests/app/test_suggest_locations.py | 9 +- labonneboite/tests/app/test_url.py | 27 +- labonneboite/tests/importer/test_base.py | 6 +- .../tests/importer/test_compute_score.py | 66 +- labonneboite/tests/importer/test_dpae.py | 9 +- .../tests/importer/test_etablissements.py | 62 +- labonneboite/tests/importer/test_geocode.py | 18 +- labonneboite/tests/importer/test_importer.py | 9 +- labonneboite/tests/importer/test_scoring.py | 8 +- .../tests/integration/test_synchro.py | 5 +- .../tests/scripts/test_create_index.py | 413 +- .../scripts/test_prepare_mailing_data.py | 3 +- labonneboite/tests/selenium/__init__.py | 3 +- labonneboite/tests/selenium/base.py | 31 +- .../test_make_a_new_search_on_search_page.py | 101 +- labonneboite/tests/selenium/test_reset_naf.py | 47 +- labonneboite/tests/selenium/test_results.py | 34 +- .../selenium/test_search_selecting_car.py | 52 +- labonneboite/tests/selenium/test_simple.py | 19 +- labonneboite/tests/test_base.py | 30 +- labonneboite/tests/web/api/test_api.py | 2004 +++++----- labonneboite/tests/web/api/test_api_base.py | 725 ++-- .../tests/web/api/test_api_office_details.py | 103 +- labonneboite/tests/web/front/__init__.py | 3 +- labonneboite/tests/web/front/test_admin.py | 19 +- labonneboite/tests/web/front/test_auth.py | 43 +- .../tests/web/front/test_autocomplete.py | 57 +- .../tests/web/front/test_companies.py | 67 +- .../tests/web/front/test_contact_form.py | 243 +- .../tests/web/front/test_favorites.py | 155 +- labonneboite/tests/web/front/test_health.py | 9 +- .../tests/web/front/test_jepostule.py | 28 +- .../tests/web/front/test_pagination.py | 12 +- .../tests/web/front/test_peam_pipeline.py | 58 +- .../tests/web/front/test_pro_version.py | 59 +- labonneboite/tests/web/front/test_root.py | 18 +- labonneboite/tests/web/front/test_routes.py | 148 +- .../tests/web/front/test_static_pages.py | 1 - .../tests/web/front/test_user_account.py | 51 +- .../tests/web/integration/test_maps.py | 89 +- labonneboite/web/admin/forms.py | 9 +- labonneboite/web/admin/utils.py | 10 +- labonneboite/web/admin/views/index.py | 2 +- .../web/admin/views/office_admin_add.py | 278 +- .../views/office_admin_extra_geolocation.py | 101 +- .../web/admin/views/office_admin_remove.py | 190 +- .../web/admin/views/office_admin_update.py | 635 ++- labonneboite/web/admin/views/user.py | 28 +- labonneboite/web/api/util.py | 24 +- labonneboite/web/api/views.py | 361 +- labonneboite/web/app.py | 254 +- labonneboite/web/auth/backends/peam.py | 50 +- .../web/auth/backends/peam_recruiter.py | 94 +- labonneboite/web/auth/utils.py | 6 +- labonneboite/web/auth/views.py | 156 +- labonneboite/web/config.py | 52 +- labonneboite/web/contact_form/forms.py | 109 +- labonneboite/web/contact_form/mail.py | 78 +- labonneboite/web/contact_form/views.py | 278 +- labonneboite/web/data/forms.py | 39 +- labonneboite/web/data/views.py | 95 +- labonneboite/web/health/util.py | 35 +- labonneboite/web/health/views.py | 23 +- labonneboite/web/jepostule/utils.py | 5 +- labonneboite/web/jepostule/views.py | 61 +- labonneboite/web/maps/views.py | 61 +- labonneboite/web/office/views.py | 66 +- labonneboite/web/root/views.py | 112 +- labonneboite/web/search/forms.py | 100 +- labonneboite/web/search/views.py | 387 +- labonneboite/web/tilkee/utils.py | 122 +- labonneboite/web/tilkee/views.py | 27 +- labonneboite/web/user/forms.py | 1 - labonneboite/web/user/views.py | 130 +- labonneboite/web/utils.py | 5 +- setup.py | 42 +- 224 files changed, 8144 insertions(+), 11269 deletions(-) diff --git a/ROME_NAF/analyses/print_array.py b/ROME_NAF/analyses/print_array.py index 30b24a82c..e83bace62 100644 --- a/ROME_NAF/analyses/print_array.py +++ b/ROME_NAF/analyses/print_array.py @@ -1,14 +1,22 @@ import pandas as pd -# Affichage des codes ROME les plus fréquents -ROME_df = pd.read_csv('../referentiels/referentiel_ROME/20150921_arboprincipale28427_ROME.csv', index_col=0, sep='|', dtype=str) -OGR_df = pd.read_csv('../referentiels/referentiel_OGR/20150921_arboprincipale28427_OGR.csv', sep='|', dtype=str).set_index('OGR') -NAF_df = pd.read_csv('../referentiels/referentiel_NAF/naf2008_liste_n5_nouveau_header.csv', sep='|', encoding="utf-8").set_index(['NAF']) +# Affichage des codes ROME les plus fréquents -stats_lbb = pd.read_csv('../stats_lbb/stats_lbb.csv') +ROME_df = pd.read_csv( + "../referentiels/referentiel_ROME/20150921_arboprincipale28427_ROME.csv", index_col=0, sep="|", dtype=str +) +OGR_df = pd.read_csv( + "../referentiels/referentiel_OGR/20150921_arboprincipale28427_OGR.csv", sep="|", dtype=str +).set_index("OGR") +NAF_df = pd.read_csv( + "../referentiels/referentiel_NAF/naf2008_liste_n5_nouveau_header.csv", sep="|", encoding="utf-8" +).set_index(["NAF"]) + +stats_lbb = pd.read_csv("../stats_lbb/stats_lbb.csv") top_slugs = stats_lbb[:34] + def pprint_ROME(array): for i, line in enumerate(top_slugs.iterrows()): slugs, visite, uniq = line[1] @@ -22,10 +30,10 @@ def pprint_ROME(array): poids_NAF = array[:, ROME_index, :].sum(1) top_NAF_indexes = (-poids_NAF).argsort()[:20] - print('{}. {} ({})'.format(i, ROME_label, ROME_code)) + print("{}. {} ({})".format(i, ROME_label, ROME_code)) for NAF_index in top_NAF_indexes: NAF_code = NAF_df.iloc[NAF_index].name NAF_label = NAF_df.loc[NAF_code].label - print('{}, {} : {}'.format(NAF_code, NAF_label, poids_NAF[NAF_index], )) + print("{}, {} : {}".format(NAF_code, NAF_label, poids_NAF[NAF_index])) print() diff --git a/ROME_NAF/referentiels/referentiels.py b/ROME_NAF/referentiels/referentiels.py index abd4a68e9..08c73027a 100644 --- a/ROME_NAF/referentiels/referentiels.py +++ b/ROME_NAF/referentiels/referentiels.py @@ -3,8 +3,11 @@ import pandas as pd -ROME_df = pd.read_csv('referentiel_ROME/20150921_arboprincipale28427_ROME.csv', index_col=0, sep='|', dtype=str) -OGR_df = pd.read_csv('referentiel_OGR/20150921_arboprincipale28427_OGR.csv', sep='|', dtype=str).set_index('OGR') +ROME_df = pd.read_csv("referentiel_ROME/20150921_arboprincipale28427_ROME.csv", index_col=0, sep="|", dtype=str) -NAF_df = pd.read_csv('referentiel_NAF/naf2008_liste_n5_nouveau_header.csv', sep='|', encoding="utf-8").set_index(['NAF']) +OGR_df = pd.read_csv("referentiel_OGR/20150921_arboprincipale28427_OGR.csv", sep="|", dtype=str).set_index("OGR") + +NAF_df = pd.read_csv("referentiel_NAF/naf2008_liste_n5_nouveau_header.csv", sep="|", encoding="utf-8").set_index( + ["NAF"] +) diff --git a/labonneboite/alembic/env.py b/labonneboite/alembic/env.py index 061e5c8ac..499dc4357 100644 --- a/labonneboite/alembic/env.py +++ b/labonneboite/alembic/env.py @@ -1,4 +1,3 @@ - from logging.config import fileConfig from alembic import context @@ -44,10 +43,7 @@ def run_migrations_online(): connectable = create_engine(get_db_string()) with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/labonneboite/alembic/versions/013a2cb893fc_add_flag_pmsmp_and_flag_afpr_poe.py b/labonneboite/alembic/versions/013a2cb893fc_add_flag_pmsmp_and_flag_afpr_poe.py index fd553887b..a144e559a 100644 --- a/labonneboite/alembic/versions/013a2cb893fc_add_flag_pmsmp_and_flag_afpr_poe.py +++ b/labonneboite/alembic/versions/013a2cb893fc_add_flag_pmsmp_and_flag_afpr_poe.py @@ -5,70 +5,68 @@ Revises: c5cd5037cb31 Create Date: 2019-03-01 11:39:05.272684 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '013a2cb893fc' -down_revision = 'c5cd5037cb31' +revision = "013a2cb893fc" +down_revision = "c5cd5037cb31" branch_labels = None depends_on = None def upgrade(): op.add_column( - 'etablissements_raw', - sa.Column('flag_poe_afpr', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_raw", + sa.Column("flag_poe_afpr", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_raw', - sa.Column('flag_pmsmp', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_raw", + sa.Column("flag_pmsmp", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements', - sa.Column('flag_poe_afpr', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements", + sa.Column("flag_poe_afpr", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements', - sa.Column('flag_pmsmp', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements", sa.Column("flag_pmsmp", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) ) op.add_column( - 'etablissements_admin_add', - sa.Column('flag_poe_afpr', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_admin_add", + sa.Column("flag_poe_afpr", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_admin_add', - sa.Column('flag_pmsmp', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_admin_add", + sa.Column("flag_pmsmp", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_exportable', - sa.Column('flag_poe_afpr', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_exportable", + sa.Column("flag_poe_afpr", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_exportable', - sa.Column('flag_pmsmp', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_exportable", + sa.Column("flag_pmsmp", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_backoffice', - sa.Column('flag_poe_afpr', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_backoffice", + sa.Column("flag_poe_afpr", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) op.add_column( - 'etablissements_backoffice', - sa.Column('flag_pmsmp', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_backoffice", + sa.Column("flag_pmsmp", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) - def downgrade(): - op.drop_column('etablissements_raw', 'flag_poe_afpr') - op.drop_column('etablissements_raw', 'flag_pmsmp') - op.drop_column('etablissements', 'flag_poe_afpr') - op.drop_column('etablissements', 'flag_pmsmp') - op.drop_column('etablissements_admin_add', 'flag_poe_afpr') - op.drop_column('etablissements_admin_add', 'flag_pmsmp') - op.drop_column('etablissements_exportable', 'flag_poe_afpr') - op.drop_column('etablissements_exportable', 'flag_pmsmp') - op.drop_column('etablissements_backoffice', 'flag_poe_afpr') - op.drop_column('etablissements_backoffice', 'flag_pmsmp') + op.drop_column("etablissements_raw", "flag_poe_afpr") + op.drop_column("etablissements_raw", "flag_pmsmp") + op.drop_column("etablissements", "flag_poe_afpr") + op.drop_column("etablissements", "flag_pmsmp") + op.drop_column("etablissements_admin_add", "flag_poe_afpr") + op.drop_column("etablissements_admin_add", "flag_pmsmp") + op.drop_column("etablissements_exportable", "flag_poe_afpr") + op.drop_column("etablissements_exportable", "flag_pmsmp") + op.drop_column("etablissements_backoffice", "flag_poe_afpr") + op.drop_column("etablissements_backoffice", "flag_pmsmp") diff --git a/labonneboite/alembic/versions/025bcb5f723e_update_index_for_office_offer_matching.py b/labonneboite/alembic/versions/025bcb5f723e_update_index_for_office_offer_matching.py index 8d8a03c36..429549836 100644 --- a/labonneboite/alembic/versions/025bcb5f723e_update_index_for_office_offer_matching.py +++ b/labonneboite/alembic/versions/025bcb5f723e_update_index_for_office_offer_matching.py @@ -5,35 +5,38 @@ Revises: 8f1ef5533cc7 Create Date: 2018-12-13 12:18:58.492180 """ -from alembic import op - import sqlalchemy as sa +from alembic import op # Revision identifiers, used by Alembic. -revision = '025bcb5f723e' -down_revision = '8f1ef5533cc7' +revision = "025bcb5f723e" +down_revision = "8f1ef5533cc7" branch_labels = None depends_on = None def upgrade(): - op.drop_constraint('_raisonsociale_codecommune', 'etablissements', type_='unique') - op.drop_constraint('_raisonsociale_codecommune', 'etablissements_exportable', type_='unique') - op.drop_constraint('_enseigne_codecommune', 'etablissements', type_='unique') - op.drop_constraint('_enseigne_codecommune', 'etablissements_exportable', type_='unique') - op.create_index('_raisonsociale_departement', 'etablissements', ['raisonsociale', 'departement'], unique=False) - op.create_index('_raisonsociale_departement', 'etablissements_exportable', ['raisonsociale', 'departement'], unique=False) - op.create_index('_enseigne_departement', 'etablissements', ['enseigne', 'departement'], unique=False) - op.create_index('_enseigne_departement', 'etablissements_exportable', ['enseigne', 'departement'], unique=False) + op.drop_constraint("_raisonsociale_codecommune", "etablissements", type_="unique") + op.drop_constraint("_raisonsociale_codecommune", "etablissements_exportable", type_="unique") + op.drop_constraint("_enseigne_codecommune", "etablissements", type_="unique") + op.drop_constraint("_enseigne_codecommune", "etablissements_exportable", type_="unique") + op.create_index("_raisonsociale_departement", "etablissements", ["raisonsociale", "departement"], unique=False) + op.create_index( + "_raisonsociale_departement", "etablissements_exportable", ["raisonsociale", "departement"], unique=False + ) + op.create_index("_enseigne_departement", "etablissements", ["enseigne", "departement"], unique=False) + op.create_index("_enseigne_departement", "etablissements_exportable", ["enseigne", "departement"], unique=False) def downgrade(): - op.drop_constraint('_raisonsociale_departement', 'etablissements', type_='unique') - op.drop_constraint('_raisonsociale_departement', 'etablissements_exportable', type_='unique') - op.drop_constraint('_enseigne_departement', 'etablissements', type_='unique') - op.drop_constraint('_enseigne_departement', 'etablissements_exportable', type_='unique') - op.create_index('_raisonsociale_codecommune', 'etablissements', ['raisonsociale', 'codecommune'], unique=False) - op.create_index('_raisonsociale_codecommune', 'etablissements_exportable', ['raisonsociale', 'codecommune'], unique=False) - op.create_index('_enseigne_codecommune', 'etablissements', ['enseigne', 'codecommune'], unique=False) - op.create_index('_enseigne_codecommune', 'etablissements_exportable', ['enseigne', 'codecommune'], unique=False) + op.drop_constraint("_raisonsociale_departement", "etablissements", type_="unique") + op.drop_constraint("_raisonsociale_departement", "etablissements_exportable", type_="unique") + op.drop_constraint("_enseigne_departement", "etablissements", type_="unique") + op.drop_constraint("_enseigne_departement", "etablissements_exportable", type_="unique") + op.create_index("_raisonsociale_codecommune", "etablissements", ["raisonsociale", "codecommune"], unique=False) + op.create_index( + "_raisonsociale_codecommune", "etablissements_exportable", ["raisonsociale", "codecommune"], unique=False + ) + op.create_index("_enseigne_codecommune", "etablissements", ["enseigne", "codecommune"], unique=False) + op.create_index("_enseigne_codecommune", "etablissements_exportable", ["enseigne", "codecommune"], unique=False) diff --git a/labonneboite/alembic/versions/0592646101eb_create_geolocations_and_etablissements_backoffice.py b/labonneboite/alembic/versions/0592646101eb_create_geolocations_and_etablissements_backoffice.py index 50fc97de2..ae7453cf7 100644 --- a/labonneboite/alembic/versions/0592646101eb_create_geolocations_and_etablissements_backoffice.py +++ b/labonneboite/alembic/versions/0592646101eb_create_geolocations_and_etablissements_backoffice.py @@ -5,67 +5,68 @@ Revises: 240900fabe59 Create Date: 2017-11-13 13:52:23.414532 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = '0592646101eb' -down_revision = '240900fabe59' +revision = "0592646101eb" +down_revision = "240900fabe59" branch_labels = None depends_on = None def upgrade(): - op.create_table('geolocations', - sa.Column('full_address', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('coordinates_x', mysql.FLOAT(), nullable=True), - sa.Column('coordinates_y', mysql.FLOAT(), nullable=True), - sa.PrimaryKeyConstraint('full_address'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "geolocations", + sa.Column("full_address", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("coordinates_x", mysql.FLOAT(), nullable=True), + sa.Column("coordinates_y", mysql.FLOAT(), nullable=True), + sa.PrimaryKeyConstraint("full_address"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('etablissements_backoffice', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('raisonsociale', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('enseigne', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codenaf', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('trancheeffectif', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('numerorue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('libellerue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codepostal', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('tel', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('website', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_junior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_senior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_handicap', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('has_multi_geolocations', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('codecommune', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('coordinates_x', mysql.FLOAT(), nullable=True), - sa.Column('coordinates_y', mysql.FLOAT(), nullable=True), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('semester-1', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-2', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-3', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-4', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-5', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-6', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('semester-7', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('effectif', mysql.DOUBLE(asdecimal=True), nullable=True), - sa.Column('score_regr', mysql.FLOAT(), nullable=True), - sa.PrimaryKeyConstraint('siret'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "etablissements_backoffice", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("raisonsociale", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("enseigne", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codenaf", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("trancheeffectif", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("numerorue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("libellerue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codepostal", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("tel", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_junior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_senior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_handicap", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("has_multi_geolocations", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("codecommune", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("coordinates_x", mysql.FLOAT(), nullable=True), + sa.Column("coordinates_y", mysql.FLOAT(), nullable=True), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("semester-1", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-2", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-3", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-4", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-5", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-6", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("semester-7", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("effectif", mysql.DOUBLE(asdecimal=True), nullable=True), + sa.Column("score_regr", mysql.FLOAT(), nullable=True), + sa.PrimaryKeyConstraint("siret"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) def downgrade(): - op.drop_table('geolocations') - op.drop_table('etablissements_backoffice') - - + op.drop_table("geolocations") + op.drop_table("etablissements_backoffice") diff --git a/labonneboite/alembic/versions/090e86dfc304_add_reason_column.py b/labonneboite/alembic/versions/090e86dfc304_add_reason_column.py index 638c82e3b..ff049415e 100644 --- a/labonneboite/alembic/versions/090e86dfc304_add_reason_column.py +++ b/labonneboite/alembic/versions/090e86dfc304_add_reason_column.py @@ -5,24 +5,23 @@ Revises: eaba5c094998 Create Date: 2017-08-17 12:15:08.841724 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '090e86dfc304' -down_revision = 'eaba5c094998' +revision = "090e86dfc304" +down_revision = "eaba5c094998" branch_labels = None depends_on = None def upgrade(): op.add_column( - 'etablissements_admin_update', - sa.Column('reason', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False) + "etablissements_admin_update", sa.Column("reason", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False) ) def downgrade(): - op.drop_column('etablissements_admin_update', 'reason') + op.drop_column("etablissements_admin_update", "reason") diff --git a/labonneboite/alembic/versions/0da6b26c212f_create_contact_form_table.py b/labonneboite/alembic/versions/0da6b26c212f_create_contact_form_table.py index 80cd439d5..d241fb3d7 100644 --- a/labonneboite/alembic/versions/0da6b26c212f_create_contact_form_table.py +++ b/labonneboite/alembic/versions/0da6b26c212f_create_contact_form_table.py @@ -7,72 +7,68 @@ """ import enum +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql from sqlalchemy.types import Enum -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '0da6b26c212f' -down_revision = 'd441dca1e974' +revision = "0da6b26c212f" +down_revision = "d441dca1e974" branch_labels = None depends_on = None def create_table(name, *columns): common_colums = [ - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('siret', mysql.TINYTEXT(), nullable=True), - sa.Column('requested_by_first_name', mysql.TINYTEXT()), - sa.Column('requested_by_last_name', mysql.TINYTEXT(), nullable=True), - sa.Column('requested_by_email', mysql.TINYTEXT(), nullable=True), - sa.Column('requested_by_phone', mysql.TINYTEXT(), nullable=True), - sa.Column('date_created', mysql.DATETIME(), nullable=True), - sa.Column('date_updated', mysql.DATETIME(), nullable=True), + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("siret", mysql.TINYTEXT(), nullable=True), + sa.Column("requested_by_first_name", mysql.TINYTEXT()), + sa.Column("requested_by_last_name", mysql.TINYTEXT(), nullable=True), + sa.Column("requested_by_email", mysql.TINYTEXT(), nullable=True), + sa.Column("requested_by_phone", mysql.TINYTEXT(), nullable=True), + sa.Column("date_created", mysql.DATETIME(), nullable=True), + sa.Column("date_updated", mysql.DATETIME(), nullable=True), ] - args = [name] + common_colums + list(columns) + [sa.PrimaryKeyConstraint('id')] - op.create_table(*args, mysql_collate='utf8mb4_unicode_ci', mysql_default_charset='utf8mb4', mysql_engine='InnoDB') + args = [name] + common_colums + list(columns) + [sa.PrimaryKeyConstraint("id")] + op.create_table(*args, mysql_collate="utf8mb4_unicode_ci", mysql_default_charset="utf8mb4", mysql_engine="InnoDB") def upgrade(): create_table( - 'other_recruiter_message', - sa.Column('comment', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=True), + "other_recruiter_message", sa.Column("comment", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=True) ) create_table( - 'remove_recruiter_message', - sa.Column('remove_lba', mysql.BOOLEAN(), default=True), - sa.Column('remove_lbb', mysql.BOOLEAN(), default=True), + "remove_recruiter_message", + sa.Column("remove_lba", mysql.BOOLEAN(), default=True), + sa.Column("remove_lbb", mysql.BOOLEAN(), default=True), ) - create_table( - 'update_coordinates_recruiter_message', - sa.Column('new_website', mysql.TINYTEXT(), nullable=True), - sa.Column('new_email', mysql.TINYTEXT(), nullable=True), - sa.Column('new_phone', mysql.TINYTEXT(), nullable=True), - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True), - sa.Column('new_email_alternance', mysql.TINYTEXT(), nullable=True), - sa.Column('new_phone_alternance', mysql.TINYTEXT(), nullable=True), - sa.Column('social_network', mysql.TINYTEXT(), nullable=True), + "update_coordinates_recruiter_message", + sa.Column("new_website", mysql.TINYTEXT(), nullable=True), + sa.Column("new_email", mysql.TINYTEXT(), nullable=True), + sa.Column("new_phone", mysql.TINYTEXT(), nullable=True), + sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True), + sa.Column("new_email_alternance", mysql.TINYTEXT(), nullable=True), + sa.Column("new_phone_alternance", mysql.TINYTEXT(), nullable=True), + sa.Column("social_network", mysql.TINYTEXT(), nullable=True), ) - create_table( - 'update_jobs_recruiter_message', - + "update_jobs_recruiter_message", # Update jobs : comma-separated rome ids - sa.Column('romes_to_add', mysql.TINYTEXT(), nullable=True), - sa.Column('romes_to_remove', mysql.TINYTEXT(), nullable=True), - sa.Column('romes_alternance_to_add', mysql.TINYTEXT(), nullable=True), - sa.Column('romes_alternance_to_remove', mysql.TINYTEXT(), nullable=True), + sa.Column("romes_to_add", mysql.TINYTEXT(), nullable=True), + sa.Column("romes_to_remove", mysql.TINYTEXT(), nullable=True), + sa.Column("romes_alternance_to_add", mysql.TINYTEXT(), nullable=True), + sa.Column("romes_alternance_to_remove", mysql.TINYTEXT(), nullable=True), ) def downgrade(): - op.drop_table('other_recruiter_message') - op.drop_table('remove_recruiter_message') - op.drop_table('update_coordinates_recruiter_message') - op.drop_table('update_jobs_recruiter_message') + op.drop_table("other_recruiter_message") + op.drop_table("remove_recruiter_message") + op.drop_table("update_coordinates_recruiter_message") + op.drop_table("update_jobs_recruiter_message") diff --git a/labonneboite/alembic/versions/1124e80be448_modify_etablissements_admin_update.py b/labonneboite/alembic/versions/1124e80be448_modify_etablissements_admin_update.py index 463c907d1..8f150e5e6 100644 --- a/labonneboite/alembic/versions/1124e80be448_modify_etablissements_admin_update.py +++ b/labonneboite/alembic/versions/1124e80be448_modify_etablissements_admin_update.py @@ -9,42 +9,51 @@ Revises: f03cff523555 Create Date: 2017-09-19 09:25:28.681977 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '1124e80be448' -down_revision = 'f03cff523555' +revision = "1124e80be448" +down_revision = "f03cff523555" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', sa.Column('romes_to_boost', - mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column("romes_to_boost", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) - op.add_column('etablissements_admin_update', sa.Column('boost', mysql.TINYINT(display_width=1), - server_default=sa.text("'0'"), autoincrement=False, nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column( + "boost", mysql.TINYINT(display_width=1), server_default=sa.text("'0'"), autoincrement=False, nullable=False + ), + ) conn = op.get_bind() for item in conn.execute("SELECT * FROM `etablissements_admin_update`;"): if item.new_score == 100: conn.execute("UPDATE `etablissements_admin_update` SET `boost` = 1 WHERE id = %s;" % item.id) - op.drop_column('etablissements_admin_update', 'new_score') + op.drop_column("etablissements_admin_update", "new_score") + def downgrade(): - op.add_column('etablissements_admin_update', sa.Column('new_score', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=True)) + op.add_column( + "etablissements_admin_update", + sa.Column("new_score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + ) conn = op.get_bind() for item in conn.execute("SELECT * FROM `etablissements_admin_update`;"): if item.boost: conn.execute("UPDATE `etablissements_admin_update` SET `new_score` = 100 WHERE id = %s;" % item.id) - op.drop_column('etablissements_admin_update', 'romes_to_boost') - op.drop_column('etablissements_admin_update', 'boost') + op.drop_column("etablissements_admin_update", "romes_to_boost") + op.drop_column("etablissements_admin_update", "boost") diff --git a/labonneboite/alembic/versions/11fc7f39f7fc_create_email_alternance_column.py b/labonneboite/alembic/versions/11fc7f39f7fc_create_email_alternance_column.py index fdf8f5c59..959817695 100644 --- a/labonneboite/alembic/versions/11fc7f39f7fc_create_email_alternance_column.py +++ b/labonneboite/alembic/versions/11fc7f39f7fc_create_email_alternance_column.py @@ -5,32 +5,42 @@ Revises: 4db5a80597e7 Create Date: 2017-11-20 11:19:59.347665 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = '11fc7f39f7fc' -down_revision = '4db5a80597e7' +revision = "11fc7f39f7fc" +down_revision = "4db5a80597e7" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements_admin_add', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements_exportable', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements_raw', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) + op.add_column( + "etablissements_admin_add", + sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) + op.add_column( + "etablissements_exportable", + sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) + op.add_column( + "etablissements_raw", sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False) + ) + op.add_column( + "etablissements", sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False) + ) + def downgrade(): - op.drop_column('etablissements_admin_update', 'email_alternance') - op.drop_column('etablissements_admin_add', 'email_alternance') - op.drop_column('etablissements_exportable', 'email_alternance') - op.drop_column('etablissements_raw', 'email_alternance') - op.drop_column('etablissements', 'email_alternance') + op.drop_column("etablissements_admin_update", "email_alternance") + op.drop_column("etablissements_admin_add", "email_alternance") + op.drop_column("etablissements_exportable", "email_alternance") + op.drop_column("etablissements_raw", "email_alternance") + op.drop_column("etablissements", "email_alternance") diff --git a/labonneboite/alembic/versions/200d176f96b6_harmonize_indexes.py b/labonneboite/alembic/versions/200d176f96b6_harmonize_indexes.py index 567f2a210..5bda769cf 100644 --- a/labonneboite/alembic/versions/200d176f96b6_harmonize_indexes.py +++ b/labonneboite/alembic/versions/200d176f96b6_harmonize_indexes.py @@ -5,35 +5,34 @@ Revises: 2df1845b3dc5 Create Date: 2018-11-30 15:12:23.807521 """ -from alembic import op - import sqlalchemy as sa +from alembic import op # Revision identifiers, used by Alembic. -revision = '200d176f96b6' -down_revision = 'e305ab1e864e' +revision = "200d176f96b6" +down_revision = "e305ab1e864e" branch_labels = None depends_on = None def upgrade(): - op.drop_constraint('dept_i', 'etablissements', type_='unique') - op.drop_constraint('dept_i', 'etablissements_exportable', type_='unique') - op.drop_constraint('dept_i', 'etablissements_raw', type_='unique') - op.drop_constraint('dept_i', 'hirings', type_='unique') - op.create_index('_departement', 'etablissements', ['departement'], unique=False) - op.create_index('_departement', 'etablissements_exportable', ['departement'], unique=False) - op.create_index('_departement', 'etablissements_raw', ['departement'], unique=False) - op.create_index('_departement', 'hirings', ['departement'], unique=False) + op.drop_constraint("dept_i", "etablissements", type_="unique") + op.drop_constraint("dept_i", "etablissements_exportable", type_="unique") + op.drop_constraint("dept_i", "etablissements_raw", type_="unique") + op.drop_constraint("dept_i", "hirings", type_="unique") + op.create_index("_departement", "etablissements", ["departement"], unique=False) + op.create_index("_departement", "etablissements_exportable", ["departement"], unique=False) + op.create_index("_departement", "etablissements_raw", ["departement"], unique=False) + op.create_index("_departement", "hirings", ["departement"], unique=False) def downgrade(): - op.drop_constraint('_departement', 'etablissements', type_='unique') - op.drop_constraint('_departement', 'etablissements_exportable', type_='unique') - op.drop_constraint('_departement', 'etablissements_raw', type_='unique') - op.drop_constraint('_departement', 'hirings', type_='unique') - op.create_index('dept_i', 'etablissements', ['departement'], unique=False) - op.create_index('dept_i', 'etablissements_exportable', ['departement'], unique=False) - op.create_index('dept_i', 'etablissements_raw', ['departement'], unique=False) - op.create_index('dept_i', 'hirings', ['departement'], unique=False) + op.drop_constraint("_departement", "etablissements", type_="unique") + op.drop_constraint("_departement", "etablissements_exportable", type_="unique") + op.drop_constraint("_departement", "etablissements_raw", type_="unique") + op.drop_constraint("_departement", "hirings", type_="unique") + op.create_index("dept_i", "etablissements", ["departement"], unique=False) + op.create_index("dept_i", "etablissements_exportable", ["departement"], unique=False) + op.create_index("dept_i", "etablissements_raw", ["departement"], unique=False) + op.create_index("dept_i", "hirings", ["departement"], unique=False) diff --git a/labonneboite/alembic/versions/240900fabe59_create_importer_tables.py b/labonneboite/alembic/versions/240900fabe59_create_importer_tables.py index ea5c9b710..3488ae40e 100644 --- a/labonneboite/alembic/versions/240900fabe59_create_importer_tables.py +++ b/labonneboite/alembic/versions/240900fabe59_create_importer_tables.py @@ -5,118 +5,126 @@ Revises: bde9330b83fa Create Date: 2017-11-10 15:25:28.929846 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = '240900fabe59' -down_revision = 'bde9330b83fa' +revision = "240900fabe59" +down_revision = "bde9330b83fa" branch_labels = None depends_on = None def upgrade(): - op.create_table('import_tasks', - sa.Column('id', mysql.BIGINT(display_width=20), nullable=False), - sa.Column('filename', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('state', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('import_type', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('created_date', mysql.DATETIME(), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "import_tasks", + sa.Column("id", mysql.BIGINT(display_width=20), nullable=False), + sa.Column("filename", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("state", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("import_type", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("created_date", mysql.DATETIME(), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('dpae_statistics', - sa.Column('id', mysql.BIGINT(display_width=20), nullable=False), - sa.Column('last_import', mysql.DATETIME(), nullable=True), - sa.Column('most_recent_data_date', mysql.DATETIME(), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "dpae_statistics", + sa.Column("id", mysql.BIGINT(display_width=20), nullable=False), + sa.Column("last_import", mysql.DATETIME(), nullable=True), + sa.Column("most_recent_data_date", mysql.DATETIME(), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('etablissements_raw', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('raisonsociale', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('enseigne', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codenaf', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('numerorue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('libellerue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codecommune', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codepostal', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('tel', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('trancheeffectif', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=2), nullable=True), - sa.Column('website1', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('website2', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.PrimaryKeyConstraint('siret'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "etablissements_raw", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("raisonsociale", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("enseigne", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codenaf", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("numerorue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("libellerue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codecommune", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codepostal", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("tel", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("trancheeffectif", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=2), nullable=True), + sa.Column("website1", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("website2", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.PrimaryKeyConstraint("siret"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('etablissements_exportable', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('raisonsociale', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('enseigne', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codenaf', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('trancheeffectif', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('numerorue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('libellerue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codepostal', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('tel', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('website', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_junior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_senior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_handicap', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('has_multi_geolocations', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('codecommune', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('coordinates_x', mysql.FLOAT(), nullable=True), - sa.Column('coordinates_y', mysql.FLOAT(), nullable=True), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('siret'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "etablissements_exportable", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("raisonsociale", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("enseigne", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codenaf", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("trancheeffectif", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("numerorue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("libellerue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codepostal", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("tel", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_junior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_senior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_handicap", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("has_multi_geolocations", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("codecommune", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("coordinates_x", mysql.FLOAT(), nullable=True), + sa.Column("coordinates_y", mysql.FLOAT(), nullable=True), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("siret"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('flag_alternance', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.PrimaryKeyConstraint('siret'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "flag_alternance", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.PrimaryKeyConstraint("siret"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) - op.create_table('dpae', - sa.Column('id', mysql.BIGINT(display_width=20), nullable=False), - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('hiring_date', mysql.DATETIME(), nullable=True), - sa.Column('zipcode', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=True), - sa.Column('contract_type', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=True), - sa.Column('contract_duration', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('iiann', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('tranche_age', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('handicap_label', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "dpae", + sa.Column("id", mysql.BIGINT(display_width=20), nullable=False), + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("hiring_date", mysql.DATETIME(), nullable=True), + sa.Column("zipcode", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=True), + sa.Column("contract_type", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=True), + sa.Column("contract_duration", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("iiann", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("tranche_age", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("handicap_label", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) + def downgrade(): - op.drop_table('dpae') - op.drop_table('flag_alternance') - op.drop_table('etablissements_raw') - op.drop_table('etablissements_exportable') - op.drop_table('dpae_statistics') - op.drop_table('import_tasks') + op.drop_table("dpae") + op.drop_table("flag_alternance") + op.drop_table("etablissements_raw") + op.drop_table("etablissements_exportable") + op.drop_table("dpae_statistics") + op.drop_table("import_tasks") diff --git a/labonneboite/alembic/versions/2426e6b89deb_switch_to_utf8mb4.py b/labonneboite/alembic/versions/2426e6b89deb_switch_to_utf8mb4.py index a2513713f..e86d2404d 100644 --- a/labonneboite/alembic/versions/2426e6b89deb_switch_to_utf8mb4.py +++ b/labonneboite/alembic/versions/2426e6b89deb_switch_to_utf8mb4.py @@ -8,65 +8,48 @@ Revises: 428e168fdf0c Create Date: 2017-07-20 13:19:58.132736 """ -from alembic import op - import sqlalchemy as sa +from alembic import op # Revision identifiers, used by Alembic. -revision = '2426e6b89deb' -down_revision = '428e168fdf0c' +revision = "2426e6b89deb" +down_revision = "428e168fdf0c" branch_labels = None depends_on = None TABLES = [ - 'alembic_version', - 'social_auth_association', - 'social_auth_code', - 'social_auth_nonce', - 'social_auth_partial', - 'social_auth_usersocialauth', - 'users', - 'user_favorite_offices', - 'etablissements', + "alembic_version", + "social_auth_association", + "social_auth_code", + "social_auth_nonce", + "social_auth_partial", + "social_auth_usersocialauth", + "users", + "user_favorite_offices", + "etablissements", ] TABLES_WITH_VARCHAR_COLUMNS = { - 'etablissements': ( - 'siret', - 'raisonsociale', - 'enseigne', - 'codenaf', - 'trancheeffectif', - 'numerorue', - 'libellerue', - 'tel', - 'email', - 'website', - 'codecommune', - ), - 'social_auth_association': ( - 'server_url', - 'handle', - 'secret', - ), - 'social_auth_nonce': ( - 'server_url', - ), - 'social_auth_usersocialauth': ( - 'uid', - ), - 'user_favorite_offices': ( - 'office_siret', - ), - 'users': ( - 'email', - 'gender', - 'first_name', - 'last_name', - 'external_id', - ), + "etablissements": ( + "siret", + "raisonsociale", + "enseigne", + "codenaf", + "trancheeffectif", + "numerorue", + "libellerue", + "tel", + "email", + "website", + "codecommune", + ), + "social_auth_association": ("server_url", "handle", "secret"), + "social_auth_nonce": ("server_url",), + "social_auth_usersocialauth": ("uid",), + "user_favorite_offices": ("office_siret",), + "users": ("email", "gender", "first_name", "last_name", "external_id"), } @@ -98,12 +81,14 @@ def create_constraints(conn): conn.execute( "ALTER TABLE user_favorite_offices " + "ADD CONSTRAINT user_favorite_offices_ibfk_2 FOREIGN KEY (office_siret) REFERENCES etablissements(siret) " - + "ON DELETE CASCADE;") + + "ON DELETE CASCADE;" + ) # Create foreign key on `user_id` to `users(user_id)` on the `user_favorite_offices` table. conn.execute( "ALTER TABLE user_favorite_offices " + "ADD CONSTRAINT `user_favorite_offices_ibfk_1` FOREIGN KEY (user_id) REFERENCES users(id) " - + "ON DELETE CASCADE;") + + "ON DELETE CASCADE;" + ) # Create unique key (`user_id`, `office_siret`) on the `user_favorite_offices` table. conn.execute("ALTER TABLE user_favorite_offices ADD UNIQUE KEY `_user_fav_office` (user_id, office_siret);") diff --git a/labonneboite/alembic/versions/2aca2291700f_add_social_network_link_in_office.py b/labonneboite/alembic/versions/2aca2291700f_add_social_network_link_in_office.py index 8cc380214..a55fca732 100644 --- a/labonneboite/alembic/versions/2aca2291700f_add_social_network_link_in_office.py +++ b/labonneboite/alembic/versions/2aca2291700f_add_social_network_link_in_office.py @@ -5,37 +5,33 @@ Revises: 80194630f4fe Create Date: 2018-06-18 16:28:57.263048 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '2aca2291700f' -down_revision = '80194630f4fe' +revision = "2aca2291700f" +down_revision = "80194630f4fe" branch_labels = None depends_on = None + def upgrade(): - op.add_column('etablissements_admin_update', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_admin_update", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_admin_add', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_admin_add", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_exportable', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_exportable", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_raw', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_raw", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) def downgrade(): - op.drop_column('etablissements_admin_update', 'social_network') - op.drop_column('etablissements_admin_add', 'social_network') - op.drop_column('etablissements_exportable', 'social_network') - op.drop_column('etablissements_raw', 'social_network') - op.drop_column('etablissements', 'social_network') + op.drop_column("etablissements_admin_update", "social_network") + op.drop_column("etablissements_admin_add", "social_network") + op.drop_column("etablissements_exportable", "social_network") + op.drop_column("etablissements_raw", "social_network") + op.drop_column("etablissements", "social_network") diff --git a/labonneboite/alembic/versions/2df1845b3dc5_add_alternate_index_for_offers_on_.py b/labonneboite/alembic/versions/2df1845b3dc5_add_alternate_index_for_offers_on_.py index e55fd7bb5..791470e2d 100644 --- a/labonneboite/alembic/versions/2df1845b3dc5_add_alternate_index_for_offers_on_.py +++ b/labonneboite/alembic/versions/2df1845b3dc5_add_alternate_index_for_offers_on_.py @@ -9,18 +9,17 @@ # Revision identifiers, used by Alembic. -revision = '2df1845b3dc5' -down_revision = '7de6af1a7088' +revision = "2df1845b3dc5" +down_revision = "7de6af1a7088" branch_labels = None depends_on = None def upgrade(): - op.create_index('_enseigne_codecommune', 'etablissements', ['enseigne', 'codecommune'], unique=False) - op.create_index('_enseigne_codecommune', 'etablissements_exportable', ['enseigne', 'codecommune'], unique=False) + op.create_index("_enseigne_codecommune", "etablissements", ["enseigne", "codecommune"], unique=False) + op.create_index("_enseigne_codecommune", "etablissements_exportable", ["enseigne", "codecommune"], unique=False) def downgrade(): - op.drop_constraint('_enseigne_codecommune', 'etablissements', type_='unique') - op.drop_constraint('_enseigne_codecommune', 'etablissements_exportable', type_='unique') - + op.drop_constraint("_enseigne_codecommune", "etablissements", type_="unique") + op.drop_constraint("_enseigne_codecommune", "etablissements_exportable", type_="unique") diff --git a/labonneboite/alembic/versions/2e6781936bae_drop_irrelevant_etablissements_raw_.py b/labonneboite/alembic/versions/2e6781936bae_drop_irrelevant_etablissements_raw_.py index dcc9a694d..69a874689 100644 --- a/labonneboite/alembic/versions/2e6781936bae_drop_irrelevant_etablissements_raw_.py +++ b/labonneboite/alembic/versions/2e6781936bae_drop_irrelevant_etablissements_raw_.py @@ -5,32 +5,31 @@ Revises: d441dca1e974 Create Date: 2018-08-23 17:14:11.736661 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '2e6781936bae' -down_revision = '0da6b26c212f' +revision = "2e6781936bae" +down_revision = "0da6b26c212f" branch_labels = None depends_on = None def upgrade(): - op.drop_column('etablissements_raw', 'phone_alternance') - op.drop_column('etablissements_raw', 'website_alternance') - op.drop_column('etablissements_raw', 'social_network') - op.drop_column('etablissements_raw', 'email_alternance') - op.drop_column('etablissements_raw', 'contact_mode') + op.drop_column("etablissements_raw", "phone_alternance") + op.drop_column("etablissements_raw", "website_alternance") + op.drop_column("etablissements_raw", "social_network") + op.drop_column("etablissements_raw", "email_alternance") + op.drop_column("etablissements_raw", "contact_mode") def downgrade(): - op.add_column('etablissements_raw', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_raw', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_raw', - sa.Column('social_network', mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_raw', - sa.Column('email_alternance', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements_raw', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_raw", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_raw", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_raw", sa.Column("social_network", mysql.TINYTEXT(), nullable=True)) + op.add_column( + "etablissements_raw", sa.Column("email_alternance", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False) + ) + op.add_column("etablissements_raw", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) diff --git a/labonneboite/alembic/versions/33e80a1c96e0_add_index_on_dpae_departement.py b/labonneboite/alembic/versions/33e80a1c96e0_add_index_on_dpae_departement.py index 680983594..f5903f221 100644 --- a/labonneboite/alembic/versions/33e80a1c96e0_add_index_on_dpae_departement.py +++ b/labonneboite/alembic/versions/33e80a1c96e0_add_index_on_dpae_departement.py @@ -7,16 +7,17 @@ """ from alembic import op + # Revision identifiers, used by Alembic. -revision = '33e80a1c96e0' -down_revision = 'd80c910949e4' +revision = "33e80a1c96e0" +down_revision = "d80c910949e4" branch_labels = None depends_on = None def upgrade(): - op.create_index('dept_i', 'dpae', ['departement'], unique=False) + op.create_index("dept_i", "dpae", ["departement"], unique=False) def downgrade(): - op.drop_constraint('dept_i', 'dpae', type_='unique') + op.drop_constraint("dept_i", "dpae", type_="unique") diff --git a/labonneboite/alembic/versions/38fad89a549c_create_remove_flag_alternance_in_office_.py b/labonneboite/alembic/versions/38fad89a549c_create_remove_flag_alternance_in_office_.py index 481f64e00..ac0ca5a9b 100644 --- a/labonneboite/alembic/versions/38fad89a549c_create_remove_flag_alternance_in_office_.py +++ b/labonneboite/alembic/versions/38fad89a549c_create_remove_flag_alternance_in_office_.py @@ -5,22 +5,24 @@ Revises: 11fc7f39f7fc Create Date: 2017-12-13 09:58:01.163137 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '38fad89a549c' -down_revision = '11fc7f39f7fc' +revision = "38fad89a549c" +down_revision = "11fc7f39f7fc" branch_labels = None depends_on = None + def upgrade(): op.add_column( - 'etablissements_admin_update', - sa.Column('remove_flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_admin_update", + sa.Column("remove_flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) + def downgrade(): - op.drop_column('etablissements_admin_update', 'remove_flag_alternance') + op.drop_column("etablissements_admin_update", "remove_flag_alternance") diff --git a/labonneboite/alembic/versions/39042f1317e3_update_etablissements_backoffice_table.py b/labonneboite/alembic/versions/39042f1317e3_update_etablissements_backoffice_table.py index faa9d2dc4..ba2f4d3fe 100644 --- a/labonneboite/alembic/versions/39042f1317e3_update_etablissements_backoffice_table.py +++ b/labonneboite/alembic/versions/39042f1317e3_update_etablissements_backoffice_table.py @@ -5,71 +5,74 @@ Revises: 47a2a34d87e5 Create Date: 2018-04-18 14:30:00.654100 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '39042f1317e3' -down_revision = '47a2a34d87e5' +revision = "39042f1317e3" +down_revision = "47a2a34d87e5" branch_labels = None depends_on = None def upgrade(): - op.drop_column('etablissements_backoffice', 'semester-1') - op.drop_column('etablissements_backoffice', 'semester-2') - op.drop_column('etablissements_backoffice', 'semester-3') - op.drop_column('etablissements_backoffice', 'semester-4') - op.drop_column('etablissements_backoffice', 'semester-5') - op.drop_column('etablissements_backoffice', 'semester-6') - op.drop_column('etablissements_backoffice', 'semester-7') + op.drop_column("etablissements_backoffice", "semester-1") + op.drop_column("etablissements_backoffice", "semester-2") + op.drop_column("etablissements_backoffice", "semester-3") + op.drop_column("etablissements_backoffice", "semester-4") + op.drop_column("etablissements_backoffice", "semester-5") + op.drop_column("etablissements_backoffice", "semester-6") + op.drop_column("etablissements_backoffice", "semester-7") + + op.add_column("etablissements_backoffice", sa.Column("dpae-period-7", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-6", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-5", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-4", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-3", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-2", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("dpae-period-1", mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-7', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-6', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-5', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-4', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-3', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-2', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('dpae-period-1', mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column( + "etablissements_backoffice", + sa.Column("score_alternance", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + ) + op.add_column("etablissements_backoffice", sa.Column("score_alternance_regr", mysql.FLOAT(), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('score_alternance', mysql.INTEGER(display_width=11), - autoincrement=False, nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('score_alternance_regr', mysql.FLOAT(), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-7", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-6", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-5", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-4", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-3", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-2", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("alt-period-1", mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-7', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-6', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-5', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-4', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-3', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-2', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('alt-period-1', mysql.DOUBLE(asdecimal=True), nullable=True)) def downgrade(): - op.add_column('etablissements_backoffice', sa.Column('semester-1', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-2', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-3', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-4', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-5', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-6', mysql.DOUBLE(asdecimal=True), nullable=True)) - op.add_column('etablissements_backoffice', sa.Column('semester-7', mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-1", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-2", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-3", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-4", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-5", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-6", mysql.DOUBLE(asdecimal=True), nullable=True)) + op.add_column("etablissements_backoffice", sa.Column("semester-7", mysql.DOUBLE(asdecimal=True), nullable=True)) - op.drop_column('etablissements_backoffice', 'dpae-period-7') - op.drop_column('etablissements_backoffice', 'dpae-period-6') - op.drop_column('etablissements_backoffice', 'dpae-period-5') - op.drop_column('etablissements_backoffice', 'dpae-period-4') - op.drop_column('etablissements_backoffice', 'dpae-period-3') - op.drop_column('etablissements_backoffice', 'dpae-period-2') - op.drop_column('etablissements_backoffice', 'dpae-period-1') + op.drop_column("etablissements_backoffice", "dpae-period-7") + op.drop_column("etablissements_backoffice", "dpae-period-6") + op.drop_column("etablissements_backoffice", "dpae-period-5") + op.drop_column("etablissements_backoffice", "dpae-period-4") + op.drop_column("etablissements_backoffice", "dpae-period-3") + op.drop_column("etablissements_backoffice", "dpae-period-2") + op.drop_column("etablissements_backoffice", "dpae-period-1") - op.drop_column('etablissements_backoffice', 'score_alternance') - op.drop_column('etablissements_backoffice', 'score_alternance_regr') + op.drop_column("etablissements_backoffice", "score_alternance") + op.drop_column("etablissements_backoffice", "score_alternance_regr") - op.drop_column('etablissements_backoffice', 'alt-period-7') - op.drop_column('etablissements_backoffice', 'alt-period-6') - op.drop_column('etablissements_backoffice', 'alt-period-5') - op.drop_column('etablissements_backoffice', 'alt-period-4') - op.drop_column('etablissements_backoffice', 'alt-period-3') - op.drop_column('etablissements_backoffice', 'alt-period-2') - op.drop_column('etablissements_backoffice', 'alt-period-1') + op.drop_column("etablissements_backoffice", "alt-period-7") + op.drop_column("etablissements_backoffice", "alt-period-6") + op.drop_column("etablissements_backoffice", "alt-period-5") + op.drop_column("etablissements_backoffice", "alt-period-4") + op.drop_column("etablissements_backoffice", "alt-period-3") + op.drop_column("etablissements_backoffice", "alt-period-2") + op.drop_column("etablissements_backoffice", "alt-period-1") diff --git a/labonneboite/alembic/versions/3a70adf06a8_add_boost_global_or_by_for_rome_for_.py b/labonneboite/alembic/versions/3a70adf06a8_add_boost_global_or_by_for_rome_for_.py index 9b4dd4b58..9a1c64504 100644 --- a/labonneboite/alembic/versions/3a70adf06a8_add_boost_global_or_by_for_rome_for_.py +++ b/labonneboite/alembic/versions/3a70adf06a8_add_boost_global_or_by_for_rome_for_.py @@ -5,27 +5,40 @@ Revises: a3c7706b467b Create Date: 2018-05-04 11:12:51.494739 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '3a70adf06a8' -down_revision = 'a3c7706b467b' +revision = "3a70adf06a8" +down_revision = "a3c7706b467b" branch_labels = None depends_on = None + def upgrade(): - op.add_column('etablissements_admin_update', sa.Column('romes_alternance_to_boost', - mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) - op.add_column('etablissements_admin_update', sa.Column('boost_alternance', mysql.TINYINT(display_width=1), - server_default=sa.text("'0'"), autoincrement=False, nullable=False)) - op.add_column('etablissements_admin_update', sa.Column('romes_alternance_to_remove', - mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column("romes_alternance_to_boost", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) + op.add_column( + "etablissements_admin_update", + sa.Column( + "boost_alternance", + mysql.TINYINT(display_width=1), + server_default=sa.text("'0'"), + autoincrement=False, + nullable=False, + ), + ) + op.add_column( + "etablissements_admin_update", + sa.Column("romes_alternance_to_remove", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) -def downgrade(): - op.drop_column('etablissements_admin_update', 'romes_alternance_to_boost') - op.drop_column('etablissements_admin_update', 'boost_alternance') - op.drop_column('etablissements_admin_update', 'romes_alternance_to_remove') +def downgrade(): + op.drop_column("etablissements_admin_update", "romes_alternance_to_boost") + op.drop_column("etablissements_admin_update", "boost_alternance") + op.drop_column("etablissements_admin_update", "romes_alternance_to_remove") diff --git a/labonneboite/alembic/versions/4041eac5f52f_create_phone_alternance_and_website.py b/labonneboite/alembic/versions/4041eac5f52f_create_phone_alternance_and_website.py index 7a81d4272..7e691a93a 100644 --- a/labonneboite/alembic/versions/4041eac5f52f_create_phone_alternance_and_website.py +++ b/labonneboite/alembic/versions/4041eac5f52f_create_phone_alternance_and_website.py @@ -5,41 +5,41 @@ Revises: 2aca2291700f Create Date: 2018-05-22 17:59:15.718908 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '4041eac5f52f' -down_revision = '2aca2291700f' +revision = "4041eac5f52f" +down_revision = "2aca2291700f" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_admin_add', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_raw', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_exportable', sa.Column('phone_alternance', mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_admin_update", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_admin_add", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_raw", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_exportable", sa.Column("phone_alternance", mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_admin_update', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_admin_add', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_raw', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) - op.add_column('etablissements_exportable', sa.Column('website_alternance', mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_admin_update", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_admin_add", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_raw", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) + op.add_column("etablissements_exportable", sa.Column("website_alternance", mysql.TINYTEXT, nullable=True)) def downgrade(): - op.drop_column('etablissements_admin_update', 'phone_alternance') - op.drop_column('etablissements_admin_add', 'phone_alternance') - op.drop_column('etablissements_exportable', 'phone_alternance') - op.drop_column('etablissements_raw', 'phone_alternance') - op.drop_column('etablissements', 'phone_alternance') - - op.drop_column('etablissements_admin_update', 'website_alternance') - op.drop_column('etablissements_admin_add', 'website_alternance') - op.drop_column('etablissements_exportable', 'website_alternance') - op.drop_column('etablissements_raw', 'website_alternance') - op.drop_column('etablissements', 'website_alternance') + op.drop_column("etablissements_admin_update", "phone_alternance") + op.drop_column("etablissements_admin_add", "phone_alternance") + op.drop_column("etablissements_exportable", "phone_alternance") + op.drop_column("etablissements_raw", "phone_alternance") + op.drop_column("etablissements", "phone_alternance") + + op.drop_column("etablissements_admin_update", "website_alternance") + op.drop_column("etablissements_admin_add", "website_alternance") + op.drop_column("etablissements_exportable", "website_alternance") + op.drop_column("etablissements_raw", "website_alternance") + op.drop_column("etablissements", "website_alternance") diff --git a/labonneboite/alembic/versions/428e168fdf0c_update_users.py b/labonneboite/alembic/versions/428e168fdf0c_update_users.py index 1a0c2108b..bcee6b3b8 100644 --- a/labonneboite/alembic/versions/428e168fdf0c_update_users.py +++ b/labonneboite/alembic/versions/428e168fdf0c_update_users.py @@ -8,23 +8,25 @@ Revises: 94d10e069fea Create Date: 2017-06-09 08:22:13.869705 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = '428e168fdf0c' -down_revision = '94d10e069fea' +revision = "428e168fdf0c" +down_revision = "94d10e069fea" branch_labels = None depends_on = None def upgrade(): - op.alter_column('users', 'email', nullable=False, existing_type=mysql.VARCHAR(length=255)) - op.add_column('users', sa.Column('external_id', mysql.VARCHAR(length=255), nullable=True)) - op.create_index('_email', 'users', ['email'], unique=False) + op.alter_column("users", "email", nullable=False, existing_type=mysql.VARCHAR(length=255)) + op.add_column("users", sa.Column("external_id", mysql.VARCHAR(length=255), nullable=True)) + op.create_index("_email", "users", ["email"], unique=False) + def downgrade(): - op.alter_column('users', 'email', nullable=True, existing_type=mysql.VARCHAR(length=255)) - op.drop_column('users', 'external_id') - op.drop_constraint('_email', 'users', type_='unique') + op.alter_column("users", "email", nullable=True, existing_type=mysql.VARCHAR(length=255)) + op.drop_column("users", "external_id") + op.drop_constraint("_email", "users", type_="unique") diff --git a/labonneboite/alembic/versions/47a2a34d87e5_drop_column_hirings_zipcode.py b/labonneboite/alembic/versions/47a2a34d87e5_drop_column_hirings_zipcode.py index 323cae6c9..45bae66d9 100644 --- a/labonneboite/alembic/versions/47a2a34d87e5_drop_column_hirings_zipcode.py +++ b/labonneboite/alembic/versions/47a2a34d87e5_drop_column_hirings_zipcode.py @@ -5,26 +5,23 @@ Revises: 5d43d4cf1847 Create Date: 2018-03-20 09:45:56.347451 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '47a2a34d87e5' -down_revision = '5d43d4cf1847' +revision = "47a2a34d87e5" +down_revision = "5d43d4cf1847" branch_labels = None depends_on = None def upgrade(): - op.drop_column('hirings', 'zipcode') + op.drop_column("hirings", "zipcode") def downgrade(): op.add_column( - 'hirings', - sa.Column('zipcode', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False) + "hirings", sa.Column("zipcode", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False) ) - - diff --git a/labonneboite/alembic/versions/4db5a80597e7_add_extra_naf_column_in_.py b/labonneboite/alembic/versions/4db5a80597e7_add_extra_naf_column_in_.py index 3ac16b818..59e186713 100644 --- a/labonneboite/alembic/versions/4db5a80597e7_add_extra_naf_column_in_.py +++ b/labonneboite/alembic/versions/4db5a80597e7_add_extra_naf_column_in_.py @@ -5,21 +5,24 @@ Revises: c9f0246b91ef Create Date: 2017-12-04 13:49:36.321269 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '4db5a80597e7' -down_revision = 'c9f0246b91ef' +revision = "4db5a80597e7" +down_revision = "c9f0246b91ef" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', sa.Column('nafs_to_add', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column("nafs_to_add", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) def downgrade(): - op.drop_column('etablissements_admin_update', 'nafs_to_add') + op.drop_column("etablissements_admin_update", "nafs_to_add") diff --git a/labonneboite/alembic/versions/5d43d4cf1847_rename_dpae_table_to_hirings.py b/labonneboite/alembic/versions/5d43d4cf1847_rename_dpae_table_to_hirings.py index 83a52da9e..fd909b705 100644 --- a/labonneboite/alembic/versions/5d43d4cf1847_rename_dpae_table_to_hirings.py +++ b/labonneboite/alembic/versions/5d43d4cf1847_rename_dpae_table_to_hirings.py @@ -9,15 +9,15 @@ # Revision identifiers, used by Alembic. -revision = '5d43d4cf1847' -down_revision = 'db2fdfb935ec' +revision = "5d43d4cf1847" +down_revision = "db2fdfb935ec" branch_labels = None depends_on = None def upgrade(): - op.rename_table('dpae', 'hirings') + op.rename_table("dpae", "hirings") def downgrade(): - op.rename_table('hirings', 'dpae') + op.rename_table("hirings", "dpae") diff --git a/labonneboite/alembic/versions/7de6af1a7088_add_index_for_offers_on_etablissements.py b/labonneboite/alembic/versions/7de6af1a7088_add_index_for_offers_on_etablissements.py index 52785fd0a..f5de394a6 100644 --- a/labonneboite/alembic/versions/7de6af1a7088_add_index_for_offers_on_etablissements.py +++ b/labonneboite/alembic/versions/7de6af1a7088_add_index_for_offers_on_etablissements.py @@ -9,17 +9,19 @@ # Revision identifiers, used by Alembic. -revision = '7de6af1a7088' -down_revision = 'c519ecaf1fa6' +revision = "7de6af1a7088" +down_revision = "c519ecaf1fa6" branch_labels = None depends_on = None def upgrade(): - op.create_index('_raisonsociale_codecommune', 'etablissements', ['raisonsociale', 'codecommune'], unique=False) - op.create_index('_raisonsociale_codecommune', 'etablissements_exportable', ['raisonsociale', 'codecommune'], unique=False) + op.create_index("_raisonsociale_codecommune", "etablissements", ["raisonsociale", "codecommune"], unique=False) + op.create_index( + "_raisonsociale_codecommune", "etablissements_exportable", ["raisonsociale", "codecommune"], unique=False + ) def downgrade(): - op.drop_constraint('_raisonsociale_codecommune', 'etablissements', type_='unique') - op.drop_constraint('_raisonsociale_codecommune', 'etablissements_exportable', type_='unique') + op.drop_constraint("_raisonsociale_codecommune", "etablissements", type_="unique") + op.drop_constraint("_raisonsociale_codecommune", "etablissements_exportable", type_="unique") diff --git a/labonneboite/alembic/versions/80194630f4fe_rename_remove_flag_alternance_et_create_.py b/labonneboite/alembic/versions/80194630f4fe_rename_remove_flag_alternance_et_create_.py index a72158cd7..31c5132b5 100644 --- a/labonneboite/alembic/versions/80194630f4fe_rename_remove_flag_alternance_et_create_.py +++ b/labonneboite/alembic/versions/80194630f4fe_rename_remove_flag_alternance_et_create_.py @@ -5,47 +5,46 @@ Revises: a3c7706b467b Create Date: 2018-05-17 15:53:02.360349 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '80194630f4fe' -down_revision = '3a70adf06a8' +revision = "80194630f4fe" +down_revision = "3a70adf06a8" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', - sa.Column('score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True) + op.add_column( + "etablissements_admin_update", + sa.Column("score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), ) - op.add_column('etablissements_admin_update', - sa.Column('score_alternance', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True) + op.add_column( + "etablissements_admin_update", + sa.Column("score_alternance", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), ) # Set 'score_alternance = 0' where 'remove_flag_alternance=true' conn = op.get_bind() conn.execute("UPDATE `etablissements_admin_update` SET `score_alternance` = 0 WHERE remove_flag_alternance = 1") - - op.drop_column('etablissements_admin_update', 'remove_flag_alternance') + op.drop_column("etablissements_admin_update", "remove_flag_alternance") def downgrade(): # Recreate column 'remove_flag_alternance' op.add_column( - 'etablissements_admin_update', - sa.Column('remove_flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False) + "etablissements_admin_update", + sa.Column("remove_flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), ) # Set 'remove_flag_alternance = 1' where 'score_alternance=0' conn = op.get_bind() conn.execute("UPDATE `etablissements_admin_update` SET `remove_flag_alternance` = 1 WHERE score_alternance = 0") - - op.drop_column('etablissements_admin_update', 'score') - op.drop_column('etablissements_admin_update', 'score_alternance') - + op.drop_column("etablissements_admin_update", "score") + op.drop_column("etablissements_admin_update", "score_alternance") diff --git a/labonneboite/alembic/versions/881f590506b5_drop_indexes_used_for_api_offres_v1.py b/labonneboite/alembic/versions/881f590506b5_drop_indexes_used_for_api_offres_v1.py index c3e2dbd34..1dde8a427 100644 --- a/labonneboite/alembic/versions/881f590506b5_drop_indexes_used_for_api_offres_v1.py +++ b/labonneboite/alembic/versions/881f590506b5_drop_indexes_used_for_api_offres_v1.py @@ -5,26 +5,28 @@ Revises: 013a2cb893fc Create Date: 2019-05-30 14:17:47.453223 """ -from alembic import op - import sqlalchemy as sa +from alembic import op # Revision identifiers, used by Alembic. -revision = '881f590506b5' -down_revision = '013a2cb893fc' +revision = "881f590506b5" +down_revision = "013a2cb893fc" branch_labels = None depends_on = None def upgrade(): - op.drop_constraint('_raisonsociale_departement', 'etablissements', type_='unique') - op.drop_constraint('_raisonsociale_departement', 'etablissements_exportable', type_='unique') - op.drop_constraint('_enseigne_departement', 'etablissements', type_='unique') - op.drop_constraint('_enseigne_departement', 'etablissements_exportable', type_='unique') + op.drop_constraint("_raisonsociale_departement", "etablissements", type_="unique") + op.drop_constraint("_raisonsociale_departement", "etablissements_exportable", type_="unique") + op.drop_constraint("_enseigne_departement", "etablissements", type_="unique") + op.drop_constraint("_enseigne_departement", "etablissements_exportable", type_="unique") + def downgrade(): - op.create_index('_raisonsociale_departement', 'etablissements', ['raisonsociale', 'departement'], unique=False) - op.create_index('_raisonsociale_departement', 'etablissements_exportable', ['raisonsociale', 'departement'], unique=False) - op.create_index('_enseigne_departement', 'etablissements', ['enseigne', 'departement'], unique=False) - op.create_index('_enseigne_departement', 'etablissements_exportable', ['enseigne', 'departement'], unique=False) + op.create_index("_raisonsociale_departement", "etablissements", ["raisonsociale", "departement"], unique=False) + op.create_index( + "_raisonsociale_departement", "etablissements_exportable", ["raisonsociale", "departement"], unique=False + ) + op.create_index("_enseigne_departement", "etablissements", ["enseigne", "departement"], unique=False) + op.create_index("_enseigne_departement", "etablissements_exportable", ["enseigne", "departement"], unique=False) diff --git a/labonneboite/alembic/versions/8f1ef5533cc7_add_index_on_hirings_siret_for_.py b/labonneboite/alembic/versions/8f1ef5533cc7_add_index_on_hirings_siret_for_.py index 0fb23d4ab..17952f630 100644 --- a/labonneboite/alembic/versions/8f1ef5533cc7_add_index_on_hirings_siret_for_.py +++ b/labonneboite/alembic/versions/8f1ef5533cc7_add_index_on_hirings_siret_for_.py @@ -9,15 +9,15 @@ # Revision identifiers, used by Alembic. -revision = '8f1ef5533cc7' -down_revision = '99473cb51564' +revision = "8f1ef5533cc7" +down_revision = "99473cb51564" branch_labels = None depends_on = None def upgrade(): - op.create_index('_siret', 'hirings', ['siret'], unique=False) + op.create_index("_siret", "hirings", ["siret"], unique=False) def downgrade(): - op.drop_constraint('_siret', 'hirings', type_='unique') + op.drop_constraint("_siret", "hirings", type_="unique") diff --git a/labonneboite/alembic/versions/91ee6fb3e7fd_add_index_on_departement_etablissements_.py b/labonneboite/alembic/versions/91ee6fb3e7fd_add_index_on_departement_etablissements_.py index e8cabe76c..fed9f2e9e 100644 --- a/labonneboite/alembic/versions/91ee6fb3e7fd_add_index_on_departement_etablissements_.py +++ b/labonneboite/alembic/versions/91ee6fb3e7fd_add_index_on_departement_etablissements_.py @@ -7,18 +7,19 @@ """ from alembic import op + # Revision identifiers, used by Alembic. -revision = '91ee6fb3e7fd' -down_revision = '2e6781936bae' +revision = "91ee6fb3e7fd" +down_revision = "2e6781936bae" branch_labels = None depends_on = None def upgrade(): - op.create_index('dept_i', 'etablissements', ['departement'], unique=False) - op.create_index('dept_i', 'etablissements_exportable', ['departement'], unique=False) + op.create_index("dept_i", "etablissements", ["departement"], unique=False) + op.create_index("dept_i", "etablissements_exportable", ["departement"], unique=False) def downgrade(): - op.drop_constraint('dept_i', 'etablissements', type_='unique') - op.drop_constraint('dept_i', 'etablissements_exportable', type_='unique') + op.drop_constraint("dept_i", "etablissements", type_="unique") + op.drop_constraint("dept_i", "etablissements_exportable", type_="unique") diff --git a/labonneboite/alembic/versions/94d10e069fea_create_user_favorite_offices.py b/labonneboite/alembic/versions/94d10e069fea_create_user_favorite_offices.py index c2219ab00..1542af26e 100644 --- a/labonneboite/alembic/versions/94d10e069fea_create_user_favorite_offices.py +++ b/labonneboite/alembic/versions/94d10e069fea_create_user_favorite_offices.py @@ -5,34 +5,36 @@ Revises: e4bce598b236 Create Date: 2017-05-03 14:26:18.151997 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '94d10e069fea' -down_revision = 'e4bce598b236' +revision = "94d10e069fea" +down_revision = "e4bce598b236" branch_labels = None depends_on = None def upgrade(): op.create_table( - 'user_favorite_offices', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('user_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.Column('office_siret', mysql.VARCHAR(length=191), nullable=False), - sa.Column('date_created', mysql.DATETIME(), nullable=False), - sa.ForeignKeyConstraint(['office_siret'], ['etablissements.siret'], name='user_favorite_offices_ibfk_2', - ondelete='CASCADE'), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='user_favorite_offices_ibfk_1', ondelete='CASCADE'), - sa.UniqueConstraint('user_id', 'office_siret', name='_user_fav_office'), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "user_favorite_offices", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("user_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), + sa.Column("office_siret", mysql.VARCHAR(length=191), nullable=False), + sa.Column("date_created", mysql.DATETIME(), nullable=False), + sa.ForeignKeyConstraint( + ["office_siret"], ["etablissements.siret"], name="user_favorite_offices_ibfk_2", ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="user_favorite_offices_ibfk_1", ondelete="CASCADE"), + sa.UniqueConstraint("user_id", "office_siret", name="_user_fav_office"), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) + def downgrade(): - op.drop_table('user_favorite_offices') + op.drop_table("user_favorite_offices") diff --git a/labonneboite/alembic/versions/963bf41f07fa_add_office_admin_tables.py b/labonneboite/alembic/versions/963bf41f07fa_add_office_admin_tables.py index 4ce3f4cf7..64a795746 100644 --- a/labonneboite/alembic/versions/963bf41f07fa_add_office_admin_tables.py +++ b/labonneboite/alembic/versions/963bf41f07fa_add_office_admin_tables.py @@ -5,82 +5,96 @@ Revises: 2426e6b89deb Create Date: 2017-07-27 14:10:26.763571 """ -import os import codecs +import os +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = '963bf41f07fa' -down_revision = '2426e6b89deb' +revision = "963bf41f07fa" +down_revision = "2426e6b89deb" branch_labels = None depends_on = None def upgrade(): - op.add_column('users', sa.Column('is_admin', mysql.TINYINT(display_width=1), server_default=sa.text("'0'"), - autoincrement=False, nullable=False)) + op.add_column( + "users", + sa.Column( + "is_admin", + mysql.TINYINT(display_width=1), + server_default=sa.text("'0'"), + autoincrement=False, + nullable=False, + ), + ) op.create_table( - 'etablissements_admin_remove', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('reason', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False), - sa.Column('initiative', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('date_follow_up_phone_call', mysql.DATETIME(), nullable=True), - sa.Column('requested_by_email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('requested_by_first_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), - nullable=False), - sa.Column('requested_by_last_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('requested_by_phone', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('date_created', mysql.DATETIME(), nullable=False), - sa.Column('date_updated', mysql.DATETIME(), nullable=True), - sa.Column('created_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('updated_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], name='etablissements_admin_remove_ibfk_1', - ondelete='SET NULL'), - sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], name='etablissements_admin_remove_ibfk_2', - ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "etablissements_admin_remove", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("reason", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + sa.Column("initiative", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("date_follow_up_phone_call", mysql.DATETIME(), nullable=True), + sa.Column("requested_by_email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column( + "requested_by_first_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False + ), + sa.Column("requested_by_last_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("requested_by_phone", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("date_created", mysql.DATETIME(), nullable=False), + sa.Column("date_updated", mysql.DATETIME(), nullable=True), + sa.Column("created_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("updated_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["created_by_id"], ["users.id"], name="etablissements_admin_remove_ibfk_1", ondelete="SET NULL" + ), + sa.ForeignKeyConstraint( + ["updated_by_id"], ["users.id"], name="etablissements_admin_remove_ibfk_2", ondelete="SET NULL" + ), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'etablissements_admin_update', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('new_score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('new_email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('new_phone', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('new_website', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('remove_email', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('remove_phone', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('remove_website', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('requested_by_email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('requested_by_first_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), - nullable=False), - sa.Column('requested_by_last_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('requested_by_phone', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('date_created', mysql.DATETIME(), nullable=False), - sa.Column('date_updated', mysql.DATETIME(), nullable=True), - sa.Column('created_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('updated_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], name='etablissements_admin_update_ibfk_1', - ondelete='SET NULL'), - sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], name='etablissements_admin_update_ibfk_2', - ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "etablissements_admin_update", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("new_score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("new_email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("new_phone", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("new_website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("remove_email", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("remove_phone", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("remove_website", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("requested_by_email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column( + "requested_by_first_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False + ), + sa.Column("requested_by_last_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("requested_by_phone", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("date_created", mysql.DATETIME(), nullable=False), + sa.Column("date_updated", mysql.DATETIME(), nullable=True), + sa.Column("created_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("updated_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["created_by_id"], ["users.id"], name="etablissements_admin_update_ibfk_1", ondelete="SET NULL" + ), + sa.ForeignKeyConstraint( + ["updated_by_id"], ["users.id"], name="etablissements_admin_update_ibfk_2", ondelete="SET NULL" + ), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) # Load initial SQL dumps. @@ -88,18 +102,18 @@ def upgrade(): CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) PARENT_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir)) - initial_remove_sql = os.path.join(PARENT_DIR, 'sql/initial_etablissements_admin_remove.sql') - with codecs.open(initial_remove_sql, encoding='utf-8') as f: + initial_remove_sql = os.path.join(PARENT_DIR, "sql/initial_etablissements_admin_remove.sql") + with codecs.open(initial_remove_sql, encoding="utf-8") as f: sql = f.read() conn.execute(sa.sql.text(sql)) - initial_update_sql = os.path.join(PARENT_DIR, 'sql/initial_etablissements_admin_update.sql') - with codecs.open(initial_update_sql, encoding='utf-8') as f: + initial_update_sql = os.path.join(PARENT_DIR, "sql/initial_etablissements_admin_update.sql") + with codecs.open(initial_update_sql, encoding="utf-8") as f: sql = f.read() conn.execute(sa.sql.text(sql)) def downgrade(): - op.drop_column('users', 'is_admin') - op.drop_table('etablissements_admin_update') - op.drop_table('etablissements_admin_remove') + op.drop_column("users", "is_admin") + op.drop_table("etablissements_admin_update") + op.drop_table("etablissements_admin_remove") diff --git a/labonneboite/alembic/versions/99473cb51564_add_index_on_etablissements_email.py b/labonneboite/alembic/versions/99473cb51564_add_index_on_etablissements_email.py index c2324a939..b49609adb 100644 --- a/labonneboite/alembic/versions/99473cb51564_add_index_on_etablissements_email.py +++ b/labonneboite/alembic/versions/99473cb51564_add_index_on_etablissements_email.py @@ -9,17 +9,17 @@ # Revision identifiers, used by Alembic. -revision = '99473cb51564' -down_revision = '200d176f96b6' +revision = "99473cb51564" +down_revision = "200d176f96b6" branch_labels = None depends_on = None def upgrade(): - op.create_index('_email', 'etablissements', ['email'], unique=False) - op.create_index('_email', 'etablissements_exportable', ['email'], unique=False) + op.create_index("_email", "etablissements", ["email"], unique=False) + op.create_index("_email", "etablissements_exportable", ["email"], unique=False) def downgrade(): - op.drop_constraint('_email', 'etablissements', type_='unique') - op.drop_constraint('_email', 'etablissements_exportable', type_='unique') + op.drop_constraint("_email", "etablissements", type_="unique") + op.drop_constraint("_email", "etablissements_exportable", type_="unique") diff --git a/labonneboite/alembic/versions/a3c7706b467b_bugfix_save_tables.py b/labonneboite/alembic/versions/a3c7706b467b_bugfix_save_tables.py index 6f4a9d2f2..5e9cd24ca 100644 --- a/labonneboite/alembic/versions/a3c7706b467b_bugfix_save_tables.py +++ b/labonneboite/alembic/versions/a3c7706b467b_bugfix_save_tables.py @@ -7,38 +7,41 @@ """ from alembic import op from sqlalchemy.dialects import mysql + + # import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'a3c7706b467b' -down_revision = '39042f1317e3' +revision = "a3c7706b467b" +down_revision = "39042f1317e3" branch_labels = None depends_on = None def upgrade(): # ensure siret unicity in SAVE tables to avoid potential issues - op.create_index('siret_unique', 'etablissements_admin_add', ['siret'], unique=True) - op.create_index('siret_unique', 'etablissements_admin_remove', ['siret'], unique=True) - op.create_index('siret_unique', 'etablissements_admin_extra_geolocations', ['siret'], unique=True) + op.create_index("siret_unique", "etablissements_admin_add", ["siret"], unique=True) + op.create_index("siret_unique", "etablissements_admin_remove", ["siret"], unique=True) + op.create_index("siret_unique", "etablissements_admin_extra_geolocations", ["siret"], unique=True) # primary key should be on 'id' only - op.drop_constraint('siret_id', 'etablissements_admin_add', type_='primary') - op.create_primary_key('id', 'etablissements_admin_add', ['id'], schema=None) + op.drop_constraint("siret_id", "etablissements_admin_add", type_="primary") + op.create_primary_key("id", "etablissements_admin_add", ["id"], schema=None) # enable autoincrement for 'id' - op.alter_column('etablissements_admin_add', 'id', autoincrement=True, - existing_type=mysql.INTEGER(display_width=11)) + op.alter_column( + "etablissements_admin_add", "id", autoincrement=True, existing_type=mysql.INTEGER(display_width=11) + ) def downgrade(): - op.drop_constraint('siret_unique', 'etablissements_admin_add', type_='unique') - op.drop_constraint('siret_unique', 'etablissements_admin_remove', type_='unique') - op.drop_constraint('siret_unique', 'etablissements_admin_extra_geolocations', type_='unique') - - op.alter_column('etablissements_admin_add', 'id', autoincrement=False, - existing_type=mysql.INTEGER(display_width=11)) - op.drop_constraint('id', 'etablissements_admin_add', type_='primary') - op.create_primary_key('siret_id', 'etablissements_admin_add', ['siret', 'id'], schema=None) - + op.drop_constraint("siret_unique", "etablissements_admin_add", type_="unique") + op.drop_constraint("siret_unique", "etablissements_admin_remove", type_="unique") + op.drop_constraint("siret_unique", "etablissements_admin_extra_geolocations", type_="unique") + + op.alter_column( + "etablissements_admin_add", "id", autoincrement=False, existing_type=mysql.INTEGER(display_width=11) + ) + op.drop_constraint("id", "etablissements_admin_add", type_="primary") + op.create_primary_key("siret_id", "etablissements_admin_add", ["siret", "id"], schema=None) diff --git a/labonneboite/alembic/versions/a6ff4a27b063_merge_website1_and_website2_columns.py b/labonneboite/alembic/versions/a6ff4a27b063_merge_website1_and_website2_columns.py index f14db7480..495629b72 100644 --- a/labonneboite/alembic/versions/a6ff4a27b063_merge_website1_and_website2_columns.py +++ b/labonneboite/alembic/versions/a6ff4a27b063_merge_website1_and_website2_columns.py @@ -5,43 +5,34 @@ Revises: 91ee6fb3e7fd Create Date: 2018-09-13 14:35:05.886339 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'a6ff4a27b063' -down_revision = '91ee6fb3e7fd' +revision = "a6ff4a27b063" +down_revision = "91ee6fb3e7fd" branch_labels = None depends_on = None def upgrade(): - op.drop_column('etablissements_raw', 'website1') - op.drop_column('etablissements_raw', 'website2') - op.add_column('etablissements_raw', - sa.Column( - 'website', - mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), - nullable=True - ) + op.drop_column("etablissements_raw", "website1") + op.drop_column("etablissements_raw", "website2") + op.add_column( + "etablissements_raw", + sa.Column("website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), ) def downgrade(): - op.drop_column('etablissements_raw', 'website') - op.add_column('etablissements_raw', - sa.Column( - 'website1', - mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), - nullable=True - ) + op.drop_column("etablissements_raw", "website") + op.add_column( + "etablissements_raw", + sa.Column("website1", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), ) - op.add_column('etablissements_raw', - sa.Column( - 'website2', - mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), - nullable=True - ) + op.add_column( + "etablissements_raw", + sa.Column("website2", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), ) diff --git a/labonneboite/alembic/versions/ba4492177099_switch_to_utf8mb4_bis.py b/labonneboite/alembic/versions/ba4492177099_switch_to_utf8mb4_bis.py index 90073e98f..95efd2c3f 100644 --- a/labonneboite/alembic/versions/ba4492177099_switch_to_utf8mb4_bis.py +++ b/labonneboite/alembic/versions/ba4492177099_switch_to_utf8mb4_bis.py @@ -5,23 +5,18 @@ Revises: 963bf41f07fa Create Date: 2017-08-04 12:49:44.849038 """ -from alembic import op - import sqlalchemy as sa +from alembic import op # Revision identifiers, used by Alembic. -revision = 'ba4492177099' -down_revision = '963bf41f07fa' +revision = "ba4492177099" +down_revision = "963bf41f07fa" branch_labels = None depends_on = None -TABLES_WITH_VARCHAR_COLUMNS = { - 'social_auth_code': ( - 'email', - ), -} +TABLES_WITH_VARCHAR_COLUMNS = {"social_auth_code": ("email",)} def upgrade(): diff --git a/labonneboite/alembic/versions/bde9330b83fa_add_romes_to_remove_column.py b/labonneboite/alembic/versions/bde9330b83fa_add_romes_to_remove_column.py index 8a3fadc29..cdcd94c88 100644 --- a/labonneboite/alembic/versions/bde9330b83fa_add_romes_to_remove_column.py +++ b/labonneboite/alembic/versions/bde9330b83fa_add_romes_to_remove_column.py @@ -6,20 +6,24 @@ Create Date: 2017-10-03 15:28:07.825231 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'bde9330b83fa' -down_revision = '1124e80be448' +revision = "bde9330b83fa" +down_revision = "1124e80be448" branch_labels = None depends_on = None + def upgrade(): - op.add_column('etablissements_admin_update', sa.Column('romes_to_remove', - mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False)) + op.add_column( + "etablissements_admin_update", + sa.Column("romes_to_remove", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + ) + def downgrade(): - op.drop_column('etablissements_admin_update', 'romes_to_remove') + op.drop_column("etablissements_admin_update", "romes_to_remove") diff --git a/labonneboite/alembic/versions/c4c5f7a025c4_add_field_new_name_to_etablissements_.py b/labonneboite/alembic/versions/c4c5f7a025c4_add_field_new_name_to_etablissements_.py index 804174b07..85de7a403 100644 --- a/labonneboite/alembic/versions/c4c5f7a025c4_add_field_new_name_to_etablissements_.py +++ b/labonneboite/alembic/versions/c4c5f7a025c4_add_field_new_name_to_etablissements_.py @@ -5,15 +5,14 @@ Revises: 881f590506b5 Create Date: 2019-06-04 11:22:15.221381 """ -from alembic import op - import sqlalchemy as sa +from alembic import op from sqlalchemy.dialects import mysql # Revision identifiers, used by Alembic. -revision = 'c4c5f7a025c4' -down_revision = '881f590506b5' +revision = "c4c5f7a025c4" +down_revision = "881f590506b5" branch_labels = None depends_on = None @@ -21,15 +20,16 @@ def upgrade(): # New "raison sociale". op.add_column( - 'etablissements_admin_update', - sa.Column('new_company_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), + "etablissements_admin_update", + sa.Column("new_company_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), ) # New "enseigne". op.add_column( - 'etablissements_admin_update', - sa.Column('new_office_name', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), + "etablissements_admin_update", + sa.Column("new_office_name", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), ) + def downgrade(): - op.drop_column('etablissements_admin_update', 'new_company_name') - op.drop_column('etablissements_admin_update', 'new_office_name') + op.drop_column("etablissements_admin_update", "new_company_name") + op.drop_column("etablissements_admin_update", "new_office_name") diff --git a/labonneboite/alembic/versions/c519ecaf1fa6_deduplicate_users.py b/labonneboite/alembic/versions/c519ecaf1fa6_deduplicate_users.py index d2b1f370c..1ba7b8a08 100644 --- a/labonneboite/alembic/versions/c519ecaf1fa6_deduplicate_users.py +++ b/labonneboite/alembic/versions/c519ecaf1fa6_deduplicate_users.py @@ -11,8 +11,8 @@ # Revision identifiers, used by Alembic. -revision = 'c519ecaf1fa6' -down_revision = 'a6ff4a27b063' +revision = "c519ecaf1fa6" +down_revision = "a6ff4a27b063" branch_labels = None depends_on = None @@ -23,33 +23,42 @@ def upgrade(): except KeyboardInterrupt: pass + def downgrade(): # This migration can be run as many times as we need: just rollback # (alembic downgrade -1) and re-apply (alembic upgrade HEAD). pass + def deduplicate_users(): # We import the app to initialize the social models - import labonneboite.web.app # pylint: disable=unused-import,unused-variable + import labonneboite.web.app # pylint: disable=unused-import,unused-variable from labonneboite.common.database import db_session from labonneboite.common.models import auth from labonneboite.common.models.user_favorite_offices import UserFavoriteOffice - # Iterate on duplicated users - for user in auth.User.query.group_by('external_id').having(sa.func.count(auth.User.external_id) > 1): + for user in auth.User.query.group_by("external_id").having(sa.func.count(auth.User.external_id) > 1): duplicate_user_ids = [] favorite_count = 0 # Create favorites, if necessary - for duplicate_user in auth.User.query.filter(auth.User.external_id == user.external_id, auth.User.id != user.id): + for duplicate_user in auth.User.query.filter( + auth.User.external_id == user.external_id, auth.User.id != user.id + ): duplicate_user_ids.append(duplicate_user.id) for favorite in duplicate_user.favorite_offices: _, created = UserFavoriteOffice.get_or_create(user_id=user.id, office_siret=favorite.office_siret) if created: favorite_count += 1 - print("Removing {} duplicates for user #{} ({} favorite added to original user)".format(len(duplicate_user_ids), user.id, favorite_count)) + print( + "Removing {} duplicates for user #{} ({} favorite added to original user)".format( + len(duplicate_user_ids), user.id, favorite_count + ) + ) # Remove duplicate social user - db_session.query(auth.UserSocialAuth).filter(auth.UserSocialAuth.user_id.in_(duplicate_user_ids)).delete(synchronize_session=False) + db_session.query(auth.UserSocialAuth).filter(auth.UserSocialAuth.user_id.in_(duplicate_user_ids)).delete( + synchronize_session=False + ) # Remove duplicate user auth.User.query.filter(auth.User.id.in_(duplicate_user_ids)).delete(synchronize_session=False) diff --git a/labonneboite/alembic/versions/c5cd5037cb31_increase_rome_lists_fields_length.py b/labonneboite/alembic/versions/c5cd5037cb31_increase_rome_lists_fields_length.py index d9912a6dd..7eb17b51e 100644 --- a/labonneboite/alembic/versions/c5cd5037cb31_increase_rome_lists_fields_length.py +++ b/labonneboite/alembic/versions/c5cd5037cb31_increase_rome_lists_fields_length.py @@ -5,35 +5,45 @@ Revises: 025bcb5f723e Create Date: 2019-02-05 16:22:03.849183 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'c5cd5037cb31' -down_revision = '025bcb5f723e' +revision = "c5cd5037cb31" +down_revision = "025bcb5f723e" branch_labels = None depends_on = None def upgrade(): - op.alter_column('update_jobs_recruiter_message', - 'romes_to_add', type_=mysql.TEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_to_remove', type_=mysql.TEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_alternance_to_add', type_=mysql.TEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_alternance_to_remove', type_=mysql.TEXT(collation='utf8mb4_unicode_ci')) + op.alter_column("update_jobs_recruiter_message", "romes_to_add", type_=mysql.TEXT(collation="utf8mb4_unicode_ci")) + op.alter_column( + "update_jobs_recruiter_message", "romes_to_remove", type_=mysql.TEXT(collation="utf8mb4_unicode_ci") + ) + op.alter_column( + "update_jobs_recruiter_message", "romes_alternance_to_add", type_=mysql.TEXT(collation="utf8mb4_unicode_ci") + ) + op.alter_column( + "update_jobs_recruiter_message", "romes_alternance_to_remove", type_=mysql.TEXT(collation="utf8mb4_unicode_ci") + ) def downgrade(): - op.alter_column('update_jobs_recruiter_message', - 'romes_to_add', type_=mysql.TINYTEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_to_remove', type_=mysql.TINYTEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_alternance_to_add', type_=mysql.TINYTEXT(collation='utf8mb4_unicode_ci')) - op.alter_column('update_jobs_recruiter_message', - 'romes_alternance_to_remove', type_=mysql.TINYTEXT(collation='utf8mb4_unicode_ci')) + op.alter_column( + "update_jobs_recruiter_message", "romes_to_add", type_=mysql.TINYTEXT(collation="utf8mb4_unicode_ci") + ) + op.alter_column( + "update_jobs_recruiter_message", "romes_to_remove", type_=mysql.TINYTEXT(collation="utf8mb4_unicode_ci") + ) + op.alter_column( + "update_jobs_recruiter_message", + "romes_alternance_to_add", + type_=mysql.TINYTEXT(collation="utf8mb4_unicode_ci"), + ) + op.alter_column( + "update_jobs_recruiter_message", + "romes_alternance_to_remove", + type_=mysql.TINYTEXT(collation="utf8mb4_unicode_ci"), + ) diff --git a/labonneboite/alembic/versions/c9f0246b91ef_add_index_on_etablissements_raw_.py b/labonneboite/alembic/versions/c9f0246b91ef_add_index_on_etablissements_raw_.py index 337054a39..2ea5e37be 100644 --- a/labonneboite/alembic/versions/c9f0246b91ef_add_index_on_etablissements_raw_.py +++ b/labonneboite/alembic/versions/c9f0246b91ef_add_index_on_etablissements_raw_.py @@ -7,17 +7,17 @@ """ from alembic import op + # Revision identifiers, used by Alembic. -revision = 'c9f0246b91ef' -down_revision = '33e80a1c96e0' +revision = "c9f0246b91ef" +down_revision = "33e80a1c96e0" branch_labels = None depends_on = None - def upgrade(): - op.create_index('dept_i', 'etablissements_raw', ['departement'], unique=False) + op.create_index("dept_i", "etablissements_raw", ["departement"], unique=False) def downgrade(): - op.drop_constraint('dept_i', 'etablissements_raw', type_='unique') + op.drop_constraint("dept_i", "etablissements_raw", type_="unique") diff --git a/labonneboite/alembic/versions/d0c07945abc1_initial_migration_create_etablissements.py b/labonneboite/alembic/versions/d0c07945abc1_initial_migration_create_etablissements.py index 0160f9405..3fb050585 100644 --- a/labonneboite/alembic/versions/d0c07945abc1_initial_migration_create_etablissements.py +++ b/labonneboite/alembic/versions/d0c07945abc1_initial_migration_create_etablissements.py @@ -5,45 +5,48 @@ Revises: None Create Date: 2018-04-16 17:45:37.243833 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = 'd0c07945abc1' +revision = "d0c07945abc1" down_revision = None branch_labels = None depends_on = None + def upgrade(): - op.create_table('etablissements', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('raisonsociale', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('enseigne', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codenaf', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('trancheeffectif', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('numerorue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('libellerue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('codepostal', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('tel', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('website', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_junior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_senior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_handicap', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('has_multi_geolocations', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('codecommune', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=True), - sa.Column('coordinates_x', mysql.FLOAT(), nullable=True), - sa.Column('coordinates_y', mysql.FLOAT(), nullable=True), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=11), nullable=True), - sa.Column('score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('siret'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + op.create_table( + "etablissements", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("raisonsociale", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("enseigne", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codenaf", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("trancheeffectif", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("numerorue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("libellerue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("codepostal", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("tel", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_junior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_senior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_handicap", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("has_multi_geolocations", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("codecommune", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=True), + sa.Column("coordinates_x", mysql.FLOAT(), nullable=True), + sa.Column("coordinates_y", mysql.FLOAT(), nullable=True), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=11), nullable=True), + sa.Column("score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("siret"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) def downgrade(): - op.drop_table('etablissements') + op.drop_table("etablissements") diff --git a/labonneboite/alembic/versions/d441dca1e974_add_contact_mode_column.py b/labonneboite/alembic/versions/d441dca1e974_add_contact_mode_column.py index 2e354447c..01bcaf781 100644 --- a/labonneboite/alembic/versions/d441dca1e974_add_contact_mode_column.py +++ b/labonneboite/alembic/versions/d441dca1e974_add_contact_mode_column.py @@ -5,38 +5,33 @@ Revises: 4041eac5f52f Create Date: 2018-06-04 14:22:49.829461 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'd441dca1e974' -down_revision = '4041eac5f52f' +revision = "d441dca1e974" +down_revision = "4041eac5f52f" branch_labels = None depends_on = None def upgrade(): - op.add_column('etablissements_admin_update', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_admin_update", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_admin_add', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_admin_add", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_exportable', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_exportable", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements_raw', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements_raw", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) - op.add_column('etablissements', - sa.Column('contact_mode', mysql.TINYTEXT(), nullable=True)) + op.add_column("etablissements", sa.Column("contact_mode", mysql.TINYTEXT(), nullable=True)) def downgrade(): - op.drop_column('etablissements_admin_update', 'contact_mode') - op.drop_column('etablissements_admin_add', 'contact_mode') - op.drop_column('etablissements_exportable', 'contact_mode') - op.drop_column('etablissements_raw', 'contact_mode') - op.drop_column('etablissements', 'contact_mode') + op.drop_column("etablissements_admin_update", "contact_mode") + op.drop_column("etablissements_admin_add", "contact_mode") + op.drop_column("etablissements_exportable", "contact_mode") + op.drop_column("etablissements_raw", "contact_mode") + op.drop_column("etablissements", "contact_mode") diff --git a/labonneboite/alembic/versions/d80c910949e4_add_column_dpae_duree_pec.py b/labonneboite/alembic/versions/d80c910949e4_add_column_dpae_duree_pec.py index c34cdcdde..f788b7411 100644 --- a/labonneboite/alembic/versions/d80c910949e4_add_column_dpae_duree_pec.py +++ b/labonneboite/alembic/versions/d80c910949e4_add_column_dpae_duree_pec.py @@ -5,24 +5,21 @@ Revises: 0592646101eb Create Date: 2017-11-20 14:53:20.499202 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'd80c910949e4' -down_revision = '0592646101eb' +revision = "d80c910949e4" +down_revision = "0592646101eb" branch_labels = None depends_on = None def upgrade(): - op.add_column( - 'dpae', - sa.Column('duree_pec', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True) - ) + op.add_column("dpae", sa.Column("duree_pec", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True)) def downgrade(): - op.drop_column('dpae', 'duree_pec') + op.drop_column("dpae", "duree_pec") diff --git a/labonneboite/alembic/versions/db2fdfb935ec_add_score_alternance_field.py b/labonneboite/alembic/versions/db2fdfb935ec_add_score_alternance_field.py index 1d84eeb68..2c3c6cef0 100644 --- a/labonneboite/alembic/versions/db2fdfb935ec_add_score_alternance_field.py +++ b/labonneboite/alembic/versions/db2fdfb935ec_add_score_alternance_field.py @@ -5,22 +5,25 @@ Revises: e21ab8255e02 Create Date: 2018-03-26 12:41:51.155213 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = 'db2fdfb935ec' -down_revision = 'e21ab8255e02' +revision = "db2fdfb935ec" +down_revision = "e21ab8255e02" branch_labels = None depends_on = None + def upgrade(): - op.add_column('etablissements_admin_add', sa.Column('score_alternance', mysql.INTEGER, default=0, nullable=False)) - op.add_column('etablissements_exportable', sa.Column('score_alternance', mysql.INTEGER, default=0, nullable=False)) - op.add_column('etablissements', sa.Column('score_alternance', mysql.INTEGER, default=0, nullable=False)) + op.add_column("etablissements_admin_add", sa.Column("score_alternance", mysql.INTEGER, default=0, nullable=False)) + op.add_column("etablissements_exportable", sa.Column("score_alternance", mysql.INTEGER, default=0, nullable=False)) + op.add_column("etablissements", sa.Column("score_alternance", mysql.INTEGER, default=0, nullable=False)) + def downgrade(): - op.drop_column('etablissements_admin_add', 'score_alternance') - op.drop_column('etablissements_exportable', 'score_alternance') - op.drop_column('etablissements', 'score_alternance') + op.drop_column("etablissements_admin_add", "score_alternance") + op.drop_column("etablissements_exportable", "score_alternance") + op.drop_column("etablissements", "score_alternance") diff --git a/labonneboite/alembic/versions/e21ab8255e02_convert_officeadminupdate_siret_in_text.py b/labonneboite/alembic/versions/e21ab8255e02_convert_officeadminupdate_siret_in_text.py index 224c15ac6..937131ee7 100644 --- a/labonneboite/alembic/versions/e21ab8255e02_convert_officeadminupdate_siret_in_text.py +++ b/labonneboite/alembic/versions/e21ab8255e02_convert_officeadminupdate_siret_in_text.py @@ -5,14 +5,14 @@ Revises: 38fad89a549c Create Date: 2018-01-11 14:10:58.780076 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'e21ab8255e02' -down_revision = '38fad89a549c' +revision = "e21ab8255e02" +down_revision = "38fad89a549c" branch_labels = None depends_on = None @@ -22,7 +22,6 @@ def upgrade(): conn.execute("ALTER TABLE `etablissements_admin_update` CHANGE siret sirets TEXT;") - def downgrade(): conn = op.get_bind() conn.execute("ALTER TABLE `etablissements_admin_update` CHANGE sirets siret VARCHAR(191);") diff --git a/labonneboite/alembic/versions/e305ab1e864e_add_certified_and_unique_id_in_save_.py b/labonneboite/alembic/versions/e305ab1e864e_add_certified_and_unique_id_in_save_.py index a03a6fc0d..a3344ef39 100644 --- a/labonneboite/alembic/versions/e305ab1e864e_add_certified_and_unique_id_in_save_.py +++ b/labonneboite/alembic/versions/e305ab1e864e_add_certified_and_unique_id_in_save_.py @@ -5,51 +5,53 @@ Revises: 2df1845b3dc5 Create Date: 2018-11-28 18:15:01.908123 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'e305ab1e864e' -down_revision = '2df1845b3dc5' +revision = "e305ab1e864e" +down_revision = "2df1845b3dc5" branch_labels = None depends_on = None def upgrade(): # Save form - op.add_column('etablissements_admin_update', sa.Column('certified_recruiter', mysql.BOOLEAN(), default=False)) - op.add_column('etablissements_admin_update', sa.Column('recruiter_uid', mysql.TINYTEXT(), default='')) + op.add_column("etablissements_admin_update", sa.Column("certified_recruiter", mysql.BOOLEAN(), default=False)) + op.add_column("etablissements_admin_update", sa.Column("recruiter_uid", mysql.TINYTEXT(), default="")) # Recruiter messages - op.add_column('other_recruiter_message', sa.Column('certified_recruiter', mysql.BOOLEAN(), default=False)) - op.add_column('other_recruiter_message', sa.Column('recruiter_uid', mysql.TINYTEXT(), default='')) + op.add_column("other_recruiter_message", sa.Column("certified_recruiter", mysql.BOOLEAN(), default=False)) + op.add_column("other_recruiter_message", sa.Column("recruiter_uid", mysql.TINYTEXT(), default="")) - op.add_column('remove_recruiter_message', sa.Column('certified_recruiter', mysql.BOOLEAN(), default=False)) - op.add_column('remove_recruiter_message', sa.Column('recruiter_uid', mysql.TINYTEXT(), default='')) + op.add_column("remove_recruiter_message", sa.Column("certified_recruiter", mysql.BOOLEAN(), default=False)) + op.add_column("remove_recruiter_message", sa.Column("recruiter_uid", mysql.TINYTEXT(), default="")) - op.add_column('update_coordinates_recruiter_message', sa.Column('certified_recruiter', mysql.BOOLEAN(), default=False)) - op.add_column('update_coordinates_recruiter_message', sa.Column('recruiter_uid', mysql.TINYTEXT(), default='')) + op.add_column( + "update_coordinates_recruiter_message", sa.Column("certified_recruiter", mysql.BOOLEAN(), default=False) + ) + op.add_column("update_coordinates_recruiter_message", sa.Column("recruiter_uid", mysql.TINYTEXT(), default="")) - op.add_column('update_jobs_recruiter_message', sa.Column('certified_recruiter', mysql.BOOLEAN(), default=False)) - op.add_column('update_jobs_recruiter_message', sa.Column('recruiter_uid', mysql.TINYTEXT(), default='')) + op.add_column("update_jobs_recruiter_message", sa.Column("certified_recruiter", mysql.BOOLEAN(), default=False)) + op.add_column("update_jobs_recruiter_message", sa.Column("recruiter_uid", mysql.TINYTEXT(), default="")) def downgrade(): # Save form - op.drop_column('etablissements_admin_update', 'certified_recruiter') - op.drop_column('etablissements_admin_update', 'recruiter_uid') + op.drop_column("etablissements_admin_update", "certified_recruiter") + op.drop_column("etablissements_admin_update", "recruiter_uid") # Recruiter messages - op.drop_column('other_recruiter_message', 'certified_recruiter') - op.drop_column('other_recruiter_message', 'recruiter_uid') + op.drop_column("other_recruiter_message", "certified_recruiter") + op.drop_column("other_recruiter_message", "recruiter_uid") - op.drop_column('remove_recruiter_message', 'certified_recruiter') - op.drop_column('remove_recruiter_message', 'recruiter_uid') + op.drop_column("remove_recruiter_message", "certified_recruiter") + op.drop_column("remove_recruiter_message", "recruiter_uid") - op.drop_column('update_coordinates_recruiter_message', 'certified_recruiter') - op.drop_column('update_coordinates_recruiter_message', 'recruiter_uid') + op.drop_column("update_coordinates_recruiter_message", "certified_recruiter") + op.drop_column("update_coordinates_recruiter_message", "recruiter_uid") - op.drop_column('update_jobs_recruiter_message', 'certified_recruiter') - op.drop_column('update_jobs_recruiter_message', 'recruiter_uid') + op.drop_column("update_jobs_recruiter_message", "certified_recruiter") + op.drop_column("update_jobs_recruiter_message", "recruiter_uid") diff --git a/labonneboite/alembic/versions/e4bce598b236_user_and_social_auth.py b/labonneboite/alembic/versions/e4bce598b236_user_and_social_auth.py index 8982349de..fc55cd28b 100644 --- a/labonneboite/alembic/versions/e4bce598b236_user_and_social_auth.py +++ b/labonneboite/alembic/versions/e4bce598b236_user_and_social_auth.py @@ -5,13 +5,14 @@ Revises: d0c07945abc1 Create Date: 2017-03-15 14:05:53.077840 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa + # Revision identifiers, used by Alembic. -revision = 'e4bce598b236' -down_revision = 'd0c07945abc1' +revision = "e4bce598b236" +down_revision = "d0c07945abc1" branch_labels = None depends_on = None @@ -19,89 +20,90 @@ def upgrade(): op.create_table( - 'users', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('email', mysql.VARCHAR(length=255), nullable=True), - sa.Column('gender', mysql.VARCHAR(length=255), nullable=True), - sa.Column('first_name', mysql.VARCHAR(length=255), nullable=True), - sa.Column('last_name', mysql.VARCHAR(length=255), nullable=True), - sa.Column('date_created', mysql.DATETIME(), nullable=True), - sa.Column('active', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "users", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("email", mysql.VARCHAR(length=255), nullable=True), + sa.Column("gender", mysql.VARCHAR(length=255), nullable=True), + sa.Column("first_name", mysql.VARCHAR(length=255), nullable=True), + sa.Column("last_name", mysql.VARCHAR(length=255), nullable=True), + sa.Column("date_created", mysql.DATETIME(), nullable=True), + sa.Column("active", mysql.TINYINT(display_width=1), autoincrement=False, nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'social_auth_usersocialauth', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('provider', mysql.VARCHAR(length=32), nullable=True), - sa.Column('extra_data', mysql.TEXT(), nullable=True), - sa.Column('uid', mysql.VARCHAR(length=255), nullable=True), - sa.Column('user_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='social_auth_usersocialauth_ibfk_1'), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "social_auth_usersocialauth", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("provider", mysql.VARCHAR(length=32), nullable=True), + sa.Column("extra_data", mysql.TEXT(), nullable=True), + sa.Column("uid", mysql.VARCHAR(length=255), nullable=True), + sa.Column("user_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], name="social_auth_usersocialauth_ibfk_1"), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'social_auth_nonce', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('server_url', mysql.VARCHAR(length=255), nullable=True), - sa.Column('timestamp', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('salt', mysql.VARCHAR(length=40), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "social_auth_nonce", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("server_url", mysql.VARCHAR(length=255), nullable=True), + sa.Column("timestamp", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("salt", mysql.VARCHAR(length=40), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'social_auth_partial', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('token', mysql.VARCHAR(length=32), nullable=True), - sa.Column('data', mysql.TEXT(), nullable=True), - sa.Column('next_step', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('backend', mysql.VARCHAR(length=32), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "social_auth_partial", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("token", mysql.VARCHAR(length=32), nullable=True), + sa.Column("data", mysql.TEXT(), nullable=True), + sa.Column("next_step", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("backend", mysql.VARCHAR(length=32), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'social_auth_code', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('email', mysql.VARCHAR(length=200), nullable=True), - sa.Column('code', mysql.VARCHAR(length=32), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "social_auth_code", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("email", mysql.VARCHAR(length=200), nullable=True), + sa.Column("code", mysql.VARCHAR(length=32), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) op.create_table( - 'social_auth_association', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('server_url', mysql.VARCHAR(length=255), nullable=True), - sa.Column('handle', mysql.VARCHAR(length=255), nullable=True), - sa.Column('secret', mysql.VARCHAR(length=255), nullable=True), - sa.Column('issued', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('lifetime', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('assoc_type', mysql.VARCHAR(length=64), nullable=True), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "social_auth_association", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("server_url", mysql.VARCHAR(length=255), nullable=True), + sa.Column("handle", mysql.VARCHAR(length=255), nullable=True), + sa.Column("secret", mysql.VARCHAR(length=255), nullable=True), + sa.Column("issued", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("lifetime", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("assoc_type", mysql.VARCHAR(length=64), nullable=True), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) + def downgrade(): - op.drop_table('social_auth_association') - op.drop_table('social_auth_code') - op.drop_table('social_auth_partial') - op.drop_table('social_auth_nonce') - op.drop_table('social_auth_usersocialauth') - op.drop_table('users') + op.drop_table("social_auth_association") + op.drop_table("social_auth_code") + op.drop_table("social_auth_partial") + op.drop_table("social_auth_nonce") + op.drop_table("social_auth_usersocialauth") + op.drop_table("users") diff --git a/labonneboite/alembic/versions/eaba5c094998_create_etablissements_admin_add.py b/labonneboite/alembic/versions/eaba5c094998_create_etablissements_admin_add.py index a296540a2..a601b5741 100644 --- a/labonneboite/alembic/versions/eaba5c094998_create_etablissements_admin_add.py +++ b/labonneboite/alembic/versions/eaba5c094998_create_etablissements_admin_add.py @@ -5,14 +5,14 @@ Revises: ba4492177099 Create Date: 2017-08-08 13:21:40.075614 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'eaba5c094998' -down_revision = 'ba4492177099' +revision = "eaba5c094998" +down_revision = "ba4492177099" branch_labels = None depends_on = None @@ -20,43 +20,46 @@ def upgrade(): op.create_table( - 'etablissements_admin_add', - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('raisonsociale', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('enseigne', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codenaf', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('numerorue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('libellerue', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codecommune', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codepostal', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('email', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('tel', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('website', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('flag_alternance', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_junior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_senior', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('flag_handicap', mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), - sa.Column('departement', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=8), nullable=False), - sa.Column('trancheeffectif', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=2), nullable=True), - sa.Column('score', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.Column('coordinates_x', mysql.FLOAT(), nullable=False), - sa.Column('coordinates_y', mysql.FLOAT(), nullable=False), - sa.Column('id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.Column('reason', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False), - sa.Column('date_created', mysql.DATETIME(), nullable=False), - sa.Column('date_updated', mysql.DATETIME(), nullable=True), - sa.Column('created_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('updated_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], name='etablissements_admin_add_ibfk_1', - ondelete='SET NULL'), - sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], name='etablissements_admin_add_ibfk_2', - ondelete='SET NULL'), + "etablissements_admin_add", + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("raisonsociale", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("enseigne", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codenaf", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("numerorue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("libellerue", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codecommune", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codepostal", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("email", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("tel", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("website", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("flag_alternance", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_junior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_senior", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("flag_handicap", mysql.TINYINT(display_width=1), autoincrement=False, nullable=False), + sa.Column("departement", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=8), nullable=False), + sa.Column("trancheeffectif", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=2), nullable=True), + sa.Column("score", mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), + sa.Column("coordinates_x", mysql.FLOAT(), nullable=False), + sa.Column("coordinates_y", mysql.FLOAT(), nullable=False), + sa.Column("id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), + sa.Column("reason", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + sa.Column("date_created", mysql.DATETIME(), nullable=False), + sa.Column("date_updated", mysql.DATETIME(), nullable=True), + sa.Column("created_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("updated_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["created_by_id"], ["users.id"], name="etablissements_admin_add_ibfk_1", ondelete="SET NULL" + ), + sa.ForeignKeyConstraint( + ["updated_by_id"], ["users.id"], name="etablissements_admin_add_ibfk_2", ondelete="SET NULL" + ), # this is a mistake, fixed by a later migration - primary key should be ID only - sa.PrimaryKeyConstraint('siret', 'id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + sa.PrimaryKeyConstraint("siret", "id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) + def downgrade(): - op.drop_table('etablissements_admin_add') + op.drop_table("etablissements_admin_add") diff --git a/labonneboite/alembic/versions/f03cff523555_create_etablissements_admin_extra_geolocations.py b/labonneboite/alembic/versions/f03cff523555_create_etablissements_admin_extra_geolocations.py index 43179a2d6..cca7abd57 100644 --- a/labonneboite/alembic/versions/f03cff523555_create_etablissements_admin_extra_geolocations.py +++ b/labonneboite/alembic/versions/f03cff523555_create_etablissements_admin_extra_geolocations.py @@ -5,41 +5,43 @@ Revises: 090e86dfc304 Create Date: 2017-08-22 12:19:46.926977 """ +import sqlalchemy as sa from alembic import op from sqlalchemy.dialects import mysql -import sqlalchemy as sa # Revision identifiers, used by Alembic. -revision = 'f03cff523555' -down_revision = '090e86dfc304' +revision = "f03cff523555" +down_revision = "090e86dfc304" branch_labels = None depends_on = None def upgrade(): op.create_table( - 'etablissements_admin_extra_geolocations', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('siret', mysql.VARCHAR(collation='utf8mb4_unicode_ci', length=191), nullable=False), - sa.Column('codes', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False), - sa.Column('geolocations', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False), - sa.Column('date_end', mysql.DATETIME(), nullable=False), - sa.Column('reason', mysql.TEXT(collation='utf8mb4_unicode_ci'), nullable=False), - sa.Column('date_created', mysql.DATETIME(), nullable=False), - sa.Column('date_updated', mysql.DATETIME(), nullable=True), - sa.Column('created_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('updated_by_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint(['created_by_id'], ['users.id'], - name='etablissements_admin_extra_geolocations_ibfk_1', ondelete='SET NULL'), - sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], - name='etablissements_admin_extra_geolocations_ibfk_2', ondelete='SET NULL'), - sa.PrimaryKeyConstraint('id'), - mysql_collate='utf8mb4_unicode_ci', - mysql_default_charset='utf8mb4', - mysql_engine='InnoDB' + "etablissements_admin_extra_geolocations", + sa.Column("id", mysql.INTEGER(display_width=11), nullable=False), + sa.Column("siret", mysql.VARCHAR(collation="utf8mb4_unicode_ci", length=191), nullable=False), + sa.Column("codes", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + sa.Column("geolocations", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + sa.Column("date_end", mysql.DATETIME(), nullable=False), + sa.Column("reason", mysql.TEXT(collation="utf8mb4_unicode_ci"), nullable=False), + sa.Column("date_created", mysql.DATETIME(), nullable=False), + sa.Column("date_updated", mysql.DATETIME(), nullable=True), + sa.Column("created_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.Column("updated_by_id", mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["created_by_id"], ["users.id"], name="etablissements_admin_extra_geolocations_ibfk_1", ondelete="SET NULL" + ), + sa.ForeignKeyConstraint( + ["updated_by_id"], ["users.id"], name="etablissements_admin_extra_geolocations_ibfk_2", ondelete="SET NULL" + ), + sa.PrimaryKeyConstraint("id"), + mysql_collate="utf8mb4_unicode_ci", + mysql_default_charset="utf8mb4", + mysql_engine="InnoDB", ) def downgrade(): - op.drop_table('etablissements_admin_extra_geolocations') + op.drop_table("etablissements_admin_extra_geolocations") diff --git a/labonneboite/common/activity.py b/labonneboite/common/activity.py index b6239e49e..0df03a3f1 100644 --- a/labonneboite/common/activity.py +++ b/labonneboite/common/activity.py @@ -1,14 +1,15 @@ -from collections import OrderedDict -from datetime import datetime import json import logging import socket +from collections import OrderedDict +from datetime import datetime from flask import has_request_context, request from flask_login import current_user from labonneboite.conf import settings + # Produce json-formatter logs about user activity. This can be used for debugging # and analytics, but stats are mostly used to be dumped in the Pôle Emploi data # lake. Thus, in production activity logs must be dumped to files that will be @@ -16,7 +17,7 @@ # Because fields are parsed by the data lake teams, we MUST NOT modify them # without telling them about it. -logger = logging.getLogger('useractivity') +logger = logging.getLogger("useractivity") logger.setLevel(settings.LOG_LEVEL_USER_ACTIVITY) settings.LOGGING_HANDLER_USER_ACTIVITY.setFormatter(logging.Formatter(settings.LOG_FORMAT_USER_ACTIVITY)) logger.addHandler(settings.LOGGING_HANDLER_USER_ACTIVITY) @@ -25,23 +26,20 @@ def log(event_name, user=None, source=None, **properties): if not user and current_user and not current_user.is_anonymous: user = current_user - source = source or 'site' + source = source or "site" data = OrderedDict() - data['dateheure'] = datetime.isoformat(datetime.now()) - data['nom'] = event_name - data['source'] = source - data['hote'] = socket.gethostname() - data['idutilisateur'] = user.id if user else None - data['idutilisateur-peconnect'] = user.external_id if user else None - data['url'] = request.full_path if has_request_context() else None - data['proprietes'] = properties + data["dateheure"] = datetime.isoformat(datetime.now()) + data["nom"] = event_name + data["source"] = source + data["hote"] = socket.gethostname() + data["idutilisateur"] = user.id if user else None + data["idutilisateur-peconnect"] = user.external_id if user else None + data["url"] = request.full_path if has_request_context() else None + data["proprietes"] = properties logger.info(json.dumps(data)) + def log_search(sirets=None, count=None, page=None, source=None, **properties): - resultats = { - 'page': page, - 'total': count, - 'sirets': sirets, - } - log('recherche', source=source, resultats=resultats, **properties) + resultats = {"page": page, "total": count, "sirets": sirets} + log("recherche", source=source, resultats=resultats, **properties) diff --git a/labonneboite/common/autocomplete.py b/labonneboite/common/autocomplete.py index d696746aa..ad77f84e4 100644 --- a/labonneboite/common/autocomplete.py +++ b/labonneboite/common/autocomplete.py @@ -1,43 +1,31 @@ from functools import lru_cache -from slugify import slugify + import unidecode +from slugify import slugify + from labonneboite.common.es import Elasticsearch from labonneboite.conf import settings + MAX_JOBS = 10 MAX_LOCATIONS = 10 # This file is a fallback which uses ES, we normally use the "address API" from beta.gouv.fr + @lru_cache(maxsize=8 * 1024) def build_location_suggestions(term): - if term.strip() == '': + if term.strip() == "": return [] term = term.title() es = Elasticsearch() - zipcode_match = [{ - "prefix": { - "zipcode": term - } - }, ] - - city_match = [{ - "match": { - "city_name.autocomplete": { - "query": term, - } - }}, { - "match": { - "city_name.stemmed": { - "query": term, - "boost": 1, - } - }}, { - "match_phrase_prefix": { - "city_name.stemmed": { - "query": term, - } - }}] + zipcode_match = [{"prefix": {"zipcode": term}}] + + city_match = [ + {"match": {"city_name.autocomplete": {"query": term}}}, + {"match": {"city_name.stemmed": {"query": term, "boost": 1}}}, + {"match_phrase_prefix": {"city_name.stemmed": {"query": term}}}, + ] filters = zipcode_match @@ -50,17 +38,10 @@ def build_location_suggestions(term): "query": { # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html "function_score": { - "query": { - "bool": { - "should": filters, - }, - }, + "query": {"bool": {"should": filters}}, # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-function-score-query.html#function-field-value-factor - "field_value_factor": { - "field": "population", - "modifier": "log1p", - } - }, + "field_value_factor": {"field": "population", "modifier": "log1p"}, + } }, "size": MAX_LOCATIONS, } @@ -69,19 +50,19 @@ def build_location_suggestions(term): suggestions = [] first_score = None - for hit in res['hits']['hits']: + for hit in res["hits"]["hits"]: if not first_score: - first_score = hit['_score'] - source = hit['_source'] - if source['zipcode']: # and hit['_score'] > 0.1 * first_score: - city_name = source['city_name'].replace('"', '') - label = '%s (%s)' % (city_name, source['zipcode']) + first_score = hit["_score"] + source = hit["_source"] + if source["zipcode"]: # and hit['_score'] > 0.1 * first_score: + city_name = source["city_name"].replace('"', "") + label = "%s (%s)" % (city_name, source["zipcode"]) city = { - 'city': source['slug'], - 'zipcode': source['zipcode'], - 'label': label, - 'latitude': source['location']['lat'], - 'longitude': source['location']['lon'], + "city": source["slug"], + "zipcode": source["zipcode"], + "label": label, + "latitude": source["location"]["lat"], + "longitude": source["location"]["lon"], } suggestions.append(city) return suggestions @@ -93,15 +74,12 @@ def enrich_job_term_with_thesaurus(term): for specific keywords which give poor results otherwise. For example "ios" would match "Vendeur en kiosque". """ - thesaurus = { - 'ios': 'informatique', - 'android': 'informatique', - } - words = term.split(' ') + thesaurus = {"ios": "informatique", "android": "informatique"} + words = term.split(" ") for idx, word in enumerate(words): if word.lower() in thesaurus: words[idx] = thesaurus[word.lower()] - term = ' '.join(words) + term = " ".join(words) return term @@ -117,7 +95,7 @@ def build_job_label_suggestions(term, size=MAX_JOBS): "match": { # Query for multiple words or multiple parts of words across multiple fields. # Based on https://qbox.io/blog/an-introduction-to-ngrams-in-elasticsearch - "_all": unidecode.unidecode(term), + "_all": unidecode.unidecode(term) } }, "aggs": { @@ -126,7 +104,6 @@ def build_job_label_suggestions(term, size=MAX_JOBS): "field": "rome_code", "size": 0, # Note: a maximum of 550 buckets will be fetched, as we have 550 unique ROME codes - # FIXME `order` cannot work without a computed `max_score`, see the `max_score` comment below. # Order results by sub-aggregation named 'max_score' # "order": {"max_score": "desc"}, @@ -136,18 +113,15 @@ def build_job_label_suggestions(term, size=MAX_JOBS): # Another way of saying this is that for all OGR matching a given ROME, we only # keep the most relevant OGR. "by_top_hit": {"top_hits": {"size": 1}}, - # FIXME `max_score` below does not work with Elasticsearch 1.7. # Fixed in elasticsearch 2.0+: # https://github.com/elastic/elasticsearch/issues/10091#issuecomment-193676966 - # FTR @vermeer made another try to find a workaround as of Feb 2018, and failed. # The only way out is to upgrade to elasticsearch 2.0+ - # Set max score among all members of this bucket # "max_score": {"max": {"lang": "expression", "script": "_score"}}, }, - }, + } }, "size": 0, } @@ -158,35 +132,35 @@ def build_job_label_suggestions(term, size=MAX_JOBS): # Since ordering cannot be done easily through Elasticsearch 1.7 (`max_score` not working), # we do it in Python at this time. - results = res['aggregations']['by_rome_code']['buckets'] - results.sort(key=lambda e: e['by_top_hit']['hits']['max_score'], reverse=True) + results = res["aggregations"]["by_rome_code"]["buckets"] + results.sort(key=lambda e: e["by_top_hit"]["hits"]["max_score"], reverse=True) for hit in results: if len(suggestions) < size: - hit = hit['by_top_hit']['hits']['hits'][0] - source = hit['_source'] - highlight = hit.get('highlight', {}) + hit = hit["by_top_hit"]["hits"]["hits"][0] + source = hit["_source"] + highlight = hit.get("highlight", {}) try: - rome_description = highlight['rome_description.autocomplete'][0] + rome_description = highlight["rome_description.autocomplete"][0] except KeyError: - rome_description = source['rome_description'] + rome_description = source["rome_description"] try: - ogr_description = highlight['ogr_description.autocomplete'][0] + ogr_description = highlight["ogr_description.autocomplete"][0] except KeyError: - ogr_description = source['ogr_description'] + ogr_description = source["ogr_description"] label = "%s (%s, ...)" % (rome_description, ogr_description) value = "%s (%s, ...)" % (source["rome_description"], source["ogr_description"]) - score = round(hit['_score'], 1) - suggestions.append({ - 'id': source['rome_code'], - 'label': label, - 'value': value, - 'occupation': slugify(source['rome_description'].lower()), - 'score': score, - }) + score = round(hit["_score"], 1) + suggestions.append( + { + "id": source["rome_code"], + "label": label, + "value": value, + "occupation": slugify(source["rome_description"].lower()), + "score": score, + } + ) else: break return suggestions - - diff --git a/labonneboite/common/chunks.py b/labonneboite/common/chunks.py index f86e4f631..8efa542a0 100644 --- a/labonneboite/common/chunks.py +++ b/labonneboite/common/chunks.py @@ -1,7 +1,6 @@ - def chunks(l, n): """ Yield successive n-sized chunks from l. """ for i in range(0, len(l), n): - yield l[i:i + n] + yield l[i : i + n] diff --git a/labonneboite/common/contact_mode.py b/labonneboite/common/contact_mode.py index 4da4b7d6e..63d2bf13e 100644 --- a/labonneboite/common/contact_mode.py +++ b/labonneboite/common/contact_mode.py @@ -1,4 +1,3 @@ - # Contact modes CONTACT_MODE_STAGES = { "Se présenter spontanément": [ @@ -8,15 +7,19 @@ "Déclarer votre reprise d'emploi à Pôle emploi :-)", ], "Envoyer un CV et une lettre de motivation": [ - ("Rechercher le nom d'un contact dans l'entreprise (google, kompass, linkedin, viadeo, votre réseau) " - "pour lui adresser votre courrier/email"), - ("Rechercher des informations économiques (projet, évolution) sur l'entreprise afin de personnaliser " - "votre lettre de motivation"), + ( + "Rechercher le nom d'un contact dans l'entreprise (google, kompass, linkedin, viadeo, votre réseau) " + "pour lui adresser votre courrier/email" + ), + ( + "Rechercher des informations économiques (projet, évolution) sur l'entreprise afin de personnaliser " + "votre lettre de motivation" + ), "Envoyer votre CV et votre lettre de motivation", "Relancer votre interlocuteur par téléphone", "Préparer votre entretien", "Déclarer votre reprise d'emploi à Pôle emploi :-)", - ] + ], } CONTACT_MODE_DEFAULT = "Envoyer un CV et une lettre de motivation" diff --git a/labonneboite/common/data/ogr_rome_mapping_update/extract_ogr_rome_data.py b/labonneboite/common/data/ogr_rome_mapping_update/extract_ogr_rome_data.py index d60c783a2..521cbc960 100644 --- a/labonneboite/common/data/ogr_rome_mapping_update/extract_ogr_rome_data.py +++ b/labonneboite/common/data/ogr_rome_mapping_update/extract_ogr_rome_data.py @@ -14,21 +14,21 @@ def write_csv_for_rome_labels(rome_labels): - with open('rome_labels.csv', 'wb') as f: + with open("rome_labels.csv", "wb") as f: f.write("rome_id|rome_label\n") for rome_id in sorted(rome_labels.keys()): f.write("%s|%s\n" % (rome_id, rome_labels[rome_id])) def write_csv_for_ogr_labels(ogr_labels): - with open('ogr_labels.csv', 'wb') as f: + with open("ogr_labels.csv", "wb") as f: f.write("ogr_id|ogr_label\r\n") for ogr_id in sorted(ogr_labels.keys()): f.write("%s|%s\r\n" % (ogr_id, ogr_labels[ogr_id])) def write_csv_for_ogr_rome_mapping(ogr_rome_mapping): - with open('ogr_rome_mapping.csv', 'wb') as f: + with open("ogr_rome_mapping.csv", "wb") as f: f.write("ogr_id|rome_id\n") for ogr_id in sorted(ogr_rome_mapping.keys()): f.write("%s|%s\n" % (ogr_id, ogr_rome_mapping[ogr_id])) @@ -38,35 +38,35 @@ def extract_ogr_rome_data(): rome_labels = {} ogr_labels = {} ogr_rome_mapping = {} - with open('ogr_rome_mapping_raw.csv', 'rb') as f: - rows = csv.reader(f, delimiter=',', quotechar='"') + with open("ogr_rome_mapping_raw.csv", "rb") as f: + rows = csv.reader(f, delimiter=",", quotechar='"') for row in rows: if len(row) != 5: - raise ValueError('this row does not have 5 fields') + raise ValueError("this row does not have 5 fields") rome_id = row[0] + row[1] + row[2] ogr_id = row[4] if len(rome_id) == 5: - if ogr_id == '': + if ogr_id == "": # this line defines a rome label rome_label = row[3] if rome_id in rome_labels: - raise ValueError('unexpected duplicate rome label') + raise ValueError("unexpected duplicate rome label") rome_labels[rome_id] = rome_label else: # this line defines an ogr plus its mapping to a rome ogr_label = row[3] if ogr_id in ogr_labels: - raise ValueError('unexpected duplicate ogr label') + raise ValueError("unexpected duplicate ogr label") ogr_labels[ogr_id] = ogr_label if ogr_id in ogr_rome_mapping: - raise ValueError('unexpected duplicate ogr rome mapping') + raise ValueError("unexpected duplicate ogr rome mapping") else: ogr_rome_mapping[ogr_id] = rome_id - + write_csv_for_rome_labels(rome_labels) write_csv_for_ogr_labels(ogr_labels) write_csv_for_ogr_rome_mapping(ogr_rome_mapping) -if __name__ == '__main__': +if __name__ == "__main__": extract_ogr_rome_data() diff --git a/labonneboite/common/database.py b/labonneboite/common/database.py index a5e623106..fb3e0453a 100644 --- a/labonneboite/common/database.py +++ b/labonneboite/common/database.py @@ -1,4 +1,3 @@ - # http://flask.pocoo.org/docs/0.12/patterns/sqlalchemy/#declarative # http://docs.sqlalchemy.org/en/rel_1_1/ import logging @@ -7,8 +6,8 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker +from labonneboite.common.env import ENV_DEVELOPMENT, ENV_TEST, get_current_env from labonneboite.conf import settings -from labonneboite.common.env import get_current_env, ENV_DEVELOPMENT, ENV_TEST CURRENT_ENV = get_current_env() @@ -17,13 +16,14 @@ # ----------------------------------------------------------------------------- DATABASE = { - 'HOST': settings.DB_HOST, - 'PORT': settings.DB_PORT, - 'NAME': settings.DB_NAME, - 'USER': settings.DB_USER, - 'PASSWORD': settings.DB_PASSWORD, + "HOST": settings.DB_HOST, + "PORT": settings.DB_PORT, + "NAME": settings.DB_NAME, + "USER": settings.DB_USER, + "PASSWORD": settings.DB_PASSWORD, } + def get_db_string(db_params=None): """ Returns the database URI that should be used for the connection. @@ -32,27 +32,20 @@ def get_db_string(db_params=None): db_params = db_params or DATABASE return "mysql://{USER}:{PASSWORD}@{HOST}:{PORT}/{NAME}?charset=utf8mb4".format(**db_params) -ENGINE_PARAMS = { - 'convert_unicode': True, - 'echo': False, - 'pool_recycle': 30, -} + +ENGINE_PARAMS = {"convert_unicode": True, "echo": False, "pool_recycle": 30} engine = create_engine(get_db_string(), **ENGINE_PARAMS) # Session # ----------------------------------------------------------------------------- -SESSIONMAKER_PARAMS = { - 'autocommit': False, - 'autoflush': False, - 'bind': engine, -} +SESSIONMAKER_PARAMS = {"autocommit": False, "autoflush": False, "bind": engine} if CURRENT_ENV == ENV_TEST: # Used in unit tests to avoid a `DetachedInstanceError: Instance is not bound to a Session`. # http://www.dangtrinh.com/2014/03/i-got-this-error-when-trying-to.html - SESSIONMAKER_PARAMS['expire_on_commit'] = False + SESSIONMAKER_PARAMS["expire_on_commit"] = False db_session = scoped_session(sessionmaker(**SESSIONMAKER_PARAMS)) @@ -82,6 +75,7 @@ def init_db(): # FIXME import importer models as well !? # pylint:disable=unused-variable from labonneboite.common import models + # pylint:enable=unused-variable Base.metadata.create_all(bind=engine) @@ -93,6 +87,7 @@ def init_db(): # Imports are used by SQLAlchemy `metadata.create_all()` to know what tables to create. from social_flask_sqlalchemy.models import PSABase from social_flask_sqlalchemy.models import UserSocialAuth, Nonce, Association, Code, Partial + # pylint:enable=unused-variable # InnoDB has a maximum index length of 767 bytes, so for utf8mb4 we can index a maximum of 191 characters. Code.email.property.columns[0].type.length = 191 @@ -113,6 +108,7 @@ def delete_db(): # Imports are used by SQLAlchemy `metadata.create_all()` to know what tables to create. from social_flask_sqlalchemy.models import PSABase from social_flask_sqlalchemy.models import UserSocialAuth, Nonce, Association, Code, Partial + # pylint:enable=unused-variable PSABase.metadata.drop_all(engine) @@ -121,6 +117,7 @@ def delete_db(): # Import all models so that metadata can be filled in and SQLAlchemy knows what tables to deal with. # pylint:disable=unused-variable from labonneboite.common import models + # pylint:enable=unused-variable # for t in Base.metadata.sorted_tables: # print("drop table %s" % t.name) diff --git a/labonneboite/common/departements.py b/labonneboite/common/departements.py index 9911304fe..e210df4b5 100644 --- a/labonneboite/common/departements.py +++ b/labonneboite/common/departements.py @@ -1,10 +1,10 @@ def get_departements(largest_ones_first=False): - departements = ["{:02d}".format(d) for d in range(1, 96)] + ['97'] + departements = ["{:02d}".format(d) for d in range(1, 96)] + ["97"] if largest_ones_first: - largest_departements = ['75', '13', '97', '69', '59', '33', '92', '06', '34'] + largest_departements = ["75", "13", "97", "69", "59", "33", "92", "06", "34"] for d in largest_departements: if d not in departements: - raise ValueError('unknown departement %s' % d) + raise ValueError("unknown departement %s" % d) departements.remove(d) departements[:0] = [d] return departements diff --git a/labonneboite/common/doorbell.py b/labonneboite/common/doorbell.py index a4974c056..e3b34f6d3 100644 --- a/labonneboite/common/doorbell.py +++ b/labonneboite/common/doorbell.py @@ -2,8 +2,8 @@ def get_tags(tag): - if tag not in ['faq', 'help', 'press', 'results']: - raise Exception('unknown page') + if tag not in ["faq", "help", "press", "results"]: + raise Exception("unknown page") if user_is_pro(): - return ['conseiller', tag] - return ['de', tag] + return ["conseiller", tag] + return ["de", tag] diff --git a/labonneboite/common/encoding.py b/labonneboite/common/encoding.py index 2345192d0..e305d418b 100644 --- a/labonneboite/common/encoding.py +++ b/labonneboite/common/encoding.py @@ -1,6 +1,6 @@ - import unidecode + def sanitize_string(s): """ Returns a correctly decoded unicode from any given string @@ -11,7 +11,7 @@ def sanitize_string(s): if isinstance(s, bytes): # the main optimistic case : UTF-8 try: - return s.decode('utf-8') + return s.decode("utf-8") except UnicodeDecodeError: pass # a trick to deal with french accents found in unknown encoding @@ -20,23 +20,23 @@ def sanitize_string(s): # é : \xc3\xa3\xa9 unknown encoding => \xc3\xa9 utf-8 # à : \xc3\xa3\xa0 unknown encoding => \xc3\xa0 utf-8 try: - return s.replace('\xa3', '').decode('utf-8') + return s.replace("\xa3", "").decode("utf-8") except UnicodeDecodeError: pass # a special character often seen is the degree sign (e.g. in N°) # see http://www.codetable.net/hex/b0 # which matches the latin1 encoding try: - return s.decode('latin1') + return s.decode("latin1") except UnicodeDecodeError: pass # last resort # 'ignore' will removed any unrecognized character - return s.decode('utf-8', 'ignore') + return s.decode("utf-8", "ignore") elif isinstance(s, str): return s elif s is None: - return s # leave None value untouched + return s # leave None value untouched raise Exception("not a string nor bytes nor None") diff --git a/labonneboite/common/env.py b/labonneboite/common/env.py index e2a3390e1..a8bf2b564 100644 --- a/labonneboite/common/env.py +++ b/labonneboite/common/env.py @@ -1,20 +1,21 @@ import os + # Environment # ----------- -ENV_DEVELOPMENT = 'development' -ENV_TEST = 'test' -ENV_BONAPARTE = 'bonaparte' +ENV_DEVELOPMENT = "development" +ENV_TEST = "test" +ENV_BONAPARTE = "bonaparte" ENVS = [ENV_DEVELOPMENT, ENV_TEST, ENV_BONAPARTE] def get_current_env(): - current_env = os.getenv('LBB_ENV') + current_env = os.getenv("LBB_ENV") if current_env and current_env not in ENVS: raise Exception( "To identify the current environment, an `LBB_ENV` environment variable must be set " - "with one of those values: %s." % ', '.join(ENVS) + "with one of those values: %s." % ", ".join(ENVS) ) return current_env diff --git a/labonneboite/common/es.py b/labonneboite/common/es.py index 69e9cf01b..3b85df125 100644 --- a/labonneboite/common/es.py +++ b/labonneboite/common/es.py @@ -1,14 +1,15 @@ -from datetime import datetime import random import string +from datetime import datetime import elasticsearch from labonneboite.conf import settings -OFFICE_TYPE = 'office' -OGR_TYPE = 'ogr' -LOCATION_TYPE = 'location' + +OFFICE_TYPE = "office" +OGR_TYPE = "ogr" +LOCATION_TYPE = "location" class ConnectionPool(object): @@ -31,10 +32,7 @@ def new_elasticsearch_instance(): In some cases e.g. parallel jobs you may need a dedicated es connection for each of your threads. """ - return elasticsearch.Elasticsearch( - hosts=[settings.ES_HOST], - timeout=settings.ES_TIMEOUT - ) + return elasticsearch.Elasticsearch(hosts=[settings.ES_HOST], timeout=settings.ES_TIMEOUT) def drop_and_create_index(): @@ -61,7 +59,7 @@ def drop_indexes_of_alias(name=settings.ES_INDEX): def drop_index(index): - Elasticsearch().indices.delete(index=index, params={'ignore': [400, 404]}) + Elasticsearch().indices.delete(index=index, params={"ignore": [400, 404]}) def get_new_index_name(): @@ -70,7 +68,7 @@ def get_new_index_name(): appended to avoid collisions for indexes created at the same second (it happens in tests). """ - suffix = ''.join([random.choice(string.ascii_lowercase) for _ in range(5)]) + suffix = "".join([random.choice(string.ascii_lowercase) for _ in range(5)]) return datetime.now().strftime(settings.ES_INDEX + "-%Y%m%d%H%M%S-" + suffix) @@ -83,75 +81,32 @@ def create_index(index): Create index with the right settings. """ filters = { - "stop_francais": { - "type": "stop", - "stopwords": ["_french_"], - }, - "fr_stemmer": { - "type": "stemmer", - "name": "light_french", - }, - "elision": { - "type": "elision", - "articles": ["c", "l", "m", "t", "qu", "n", "s", "j", "d"], - }, - "ngram_filter": { - "type": "ngram", - "min_gram": 2, - "max_gram": 20, - }, - "edge_ngram_filter": { - "type": "edge_ngram", - "min_gram": 1, - "max_gram": 20, - }, + "stop_francais": {"type": "stop", "stopwords": ["_french_"]}, + "fr_stemmer": {"type": "stemmer", "name": "light_french"}, + "elision": {"type": "elision", "articles": ["c", "l", "m", "t", "qu", "n", "s", "j", "d"]}, + "ngram_filter": {"type": "ngram", "min_gram": 2, "max_gram": 20}, + "edge_ngram_filter": {"type": "edge_ngram", "min_gram": 1, "max_gram": 20}, } analyzers = { "stemmed": { "type": "custom", "tokenizer": "standard", - "filter": [ - "asciifolding", - "lowercase", - "stop_francais", - "elision", - "fr_stemmer", - ], - }, - "autocomplete": { - "type": "custom", - "tokenizer": "standard", - "filter": [ - "lowercase", - "edge_ngram_filter", - ], + "filter": ["asciifolding", "lowercase", "stop_francais", "elision", "fr_stemmer"], }, + "autocomplete": {"type": "custom", "tokenizer": "standard", "filter": ["lowercase", "edge_ngram_filter"]}, "ngram_analyzer": { "type": "custom", "tokenizer": "standard", - "filter": [ - "asciifolding", - "lowercase", - "stop_francais", - "elision", - "ngram_filter", - ], + "filter": ["asciifolding", "lowercase", "stop_francais", "elision", "ngram_filter"], }, } mapping_ogr = { # https://www.elastic.co/guide/en/elasticsearch/reference/1.7/mapping-all-field.html - "_all": { - "type": "string", - "index_analyzer": "ngram_analyzer", - "search_analyzer": "standard", - }, + "_all": {"type": "string", "index_analyzer": "ngram_analyzer", "search_analyzer": "standard"}, "properties": { - "ogr_code": { - "type": "string", - "index": "not_analyzed", - }, + "ogr_code": {"type": "string", "index": "not_analyzed"}, "ogr_description": { "type": "string", "include_in_all": True, @@ -159,10 +114,7 @@ def create_index(index): "index_analyzer": "ngram_analyzer", "search_analyzer": "standard", }, - "rome_code": { - "type": "string", - "index": "not_analyzed", - }, + "rome_code": {"type": "string", "index": "not_analyzed"}, "rome_description": { "type": "string", "include_in_all": True, @@ -178,122 +130,47 @@ def create_index(index): "city_name": { "type": "multi_field", "fields": { - "raw": { - "type": "string", - "index": "not_analyzed", - }, - "autocomplete" : { - "type": "string", - "analyzer": "autocomplete", - }, - "stemmed": { - "type": "string", - "analyzer": "stemmed", - "store": "yes", - "term_vector": "yes", - }, + "raw": {"type": "string", "index": "not_analyzed"}, + "autocomplete": {"type": "string", "analyzer": "autocomplete"}, + "stemmed": {"type": "string", "analyzer": "stemmed", "store": "yes", "term_vector": "yes"}, }, }, - "coordinates": { - "type": "geo_point", - }, - "population": { - "type": "integer", - }, - "slug": { - "type": "string", - "index": "not_analyzed", - }, - "zipcode": { - "type": "string", - "index": "not_analyzed", - }, - }, + "coordinates": {"type": "geo_point"}, + "population": {"type": "integer"}, + "slug": {"type": "string", "index": "not_analyzed"}, + "zipcode": {"type": "string", "index": "not_analyzed"}, + } } mapping_office = { "properties": { - "naf": { - "type": "string", - "index": "not_analyzed", - }, - "siret": { - "type": "string", - "index": "not_analyzed", - }, - "name": { - "type": "string", - "index": "not_analyzed", - }, - "email": { - "type": "string", - "index": "not_analyzed", - }, - "tel": { - "type": "string", - "index": "not_analyzed", - }, - "website": { - "type": "string", - "index": "not_analyzed", - }, - "score": { - "type": "integer", - "index": "not_analyzed", - }, - "scores_by_rome": { - "type": "object", - "index": "not_analyzed", - }, - "score_alternance": { - "type": "integer", - "index": "not_analyzed", - }, - "scores_alternance_by_rome": { - "type": "object", - "index": "not_analyzed", - }, - "boosted_romes": { - "type": "object", - "index": "not_analyzed", - }, - "boosted_alternance_romes": { - "type": "object", - "index": "not_analyzed", - }, - "headcount": { - "type": "integer", - "index": "not_analyzed", - }, - "department": { - "type": "string", - "index": "not_analyzed" - }, - "locations": { - "type": "geo_point", - }, - }, + "naf": {"type": "string", "index": "not_analyzed"}, + "siret": {"type": "string", "index": "not_analyzed"}, + "name": {"type": "string", "index": "not_analyzed"}, + "email": {"type": "string", "index": "not_analyzed"}, + "tel": {"type": "string", "index": "not_analyzed"}, + "website": {"type": "string", "index": "not_analyzed"}, + "score": {"type": "integer", "index": "not_analyzed"}, + "scores_by_rome": {"type": "object", "index": "not_analyzed"}, + "score_alternance": {"type": "integer", "index": "not_analyzed"}, + "scores_alternance_by_rome": {"type": "object", "index": "not_analyzed"}, + "boosted_romes": {"type": "object", "index": "not_analyzed"}, + "boosted_alternance_romes": {"type": "object", "index": "not_analyzed"}, + "headcount": {"type": "integer", "index": "not_analyzed"}, + "department": {"type": "string", "index": "not_analyzed"}, + "locations": {"type": "geo_point"}, + } } create_body = { - "settings": { - "index": { - "analysis": { - "filter": filters, - "analyzer": analyzers, - }, - }, - }, - "mappings": { - "ogr": mapping_ogr, - "location": mapping_location, - "office": mapping_office, - }, + "settings": {"index": {"analysis": {"filter": filters, "analyzer": analyzers}}}, + "mappings": {"ogr": mapping_ogr, "location": mapping_location, "office": mapping_office}, } Elasticsearch().indices.create(index=index, body=create_body) fake_doc = fake_office() - Elasticsearch().index(index=index, doc_type=OFFICE_TYPE, id=fake_doc['siret'], body=fake_doc) + Elasticsearch().index(index=index, doc_type=OFFICE_TYPE, id=fake_doc["siret"], body=fake_doc) + # This fake office having a zero but existing score for each rome is designed # as a workaround of the following bug: @@ -316,14 +193,14 @@ def create_index(index): # This fake office ensures no rome will ever be orphaned. def fake_office(): doc = { - 'siret': "0", + "siret": "0", # fields required even if not used by function_score - 'score': 0, - 'score_alternance': 0, + "score": 0, + "score_alternance": 0, } # all fields used by function_score which could potentially be orphaned and thus cause the bug - doc['scores_by_rome'] = {rome: 0 for rome in settings.ROME_DESCRIPTIONS} - doc['scores_alternance_by_rome'] = {rome: 0 for rome in settings.ROME_DESCRIPTIONS} + doc["scores_by_rome"] = {rome: 0 for rome in settings.ROME_DESCRIPTIONS} + doc["scores_alternance_by_rome"] = {rome: 0 for rome in settings.ROME_DESCRIPTIONS} return doc diff --git a/labonneboite/common/esd.py b/labonneboite/common/esd.py index f7754477f..bf7449ced 100644 --- a/labonneboite/common/esd.py +++ b/labonneboite/common/esd.py @@ -1,12 +1,15 @@ -import logging import datetime +import logging import time -import requests from urllib.parse import urlencode + +import requests from requests.exceptions import ConnectionError, ReadTimeout + from labonneboite.conf import settings -logger = logging.getLogger('main') + +logger = logging.getLogger("main") ESD_TOKEN_ENDPOINT_URL = "%s/connexion/oauth2/access_token" % settings.PEAM_TOKEN_BASE_URL ESD_TIMEOUT = 5 @@ -37,41 +40,34 @@ def get_token(cls): cls.prepare_token() return cls.VALUE - @classmethod def is_token_valid(cls): if not cls.EXPIRATION_DATE: return False return cls.EXPIRATION_DATE > datetime.datetime.now() - @classmethod def prepare_token(cls): - data = urlencode([ - ('realm', '/partenaire'), - ('grant_type', 'client_credentials'), - ('client_id', settings.PEAM_CLIENT_ID), - ('client_secret', settings.PEAM_CLIENT_SECRET), - ('scope', "application_%s" % settings.PEAM_CLIENT_ID) - ]) + data = urlencode( + [ + ("realm", "/partenaire"), + ("grant_type", "client_credentials"), + ("client_id", settings.PEAM_CLIENT_ID), + ("client_secret", settings.PEAM_CLIENT_SECRET), + ("scope", "application_%s" % settings.PEAM_CLIENT_ID), + ] + ) data += "%20api_offresdemploiv2 o2dsoffre" data += " qos_silver_offresdemploiv2" - headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - } + headers = {"Content-Type": "application/x-www-form-urlencoded"} - response = _get_response( - url=ESD_TOKEN_ENDPOINT_URL, - headers=headers, - method='POST', - data=data, - ) - if 'access_token' in response: - cls.VALUE = response['access_token'] + response = _get_response(url=ESD_TOKEN_ENDPOINT_URL, headers=headers, method="POST", data=data) + if "access_token" in response: + cls.VALUE = response["access_token"] # Wait slightly less than instructed before requesting a new token, # to avoid random 401 errors. - expires_in = int(0.75 * response['expires_in']) + expires_in = int(0.75 * response["expires_in"]) cls.EXPIRATION_DATE = datetime.datetime.now() + datetime.timedelta(seconds=expires_in) else: raise TokenFailure @@ -81,50 +77,31 @@ def get_response(url, params): """ Get a response for a request to one of the ESD APIs. """ - headers = { - 'Authorization': 'Bearer {}'.format(EsdToken.get_token()), - 'Content-Type': 'application/json', - } + headers = {"Authorization": "Bearer {}".format(EsdToken.get_token()), "Content-Type": "application/json"} attempts = 1 - response = {'results': []} + response = {"results": []} while attempts <= ESD_OFFERS_MAX_ATTEMPTS: try: - return _get_response( - url=url, - params=params, - headers=headers, - method='GET', - ) + return _get_response(url=url, params=params, headers=headers, method="GET") except TooManyRequests: time.sleep(ESD_OFFERS_THROTTLE_IN_SECONDS) attempts += 1 return response -def _get_response(url, headers, params=None, method='GET', data=None): +def _get_response(url, headers, params=None, method="GET", data=None): """ Generic method fetching the response for a GET/POST request to a given url with a given data object. """ try: - if method == 'GET': + if method == "GET": if data: raise ValueError("data should be None for a GET request") - response = requests.get( - url=url, - params=params, - headers=headers, - timeout=ESD_TIMEOUT, - ) - elif method == 'POST': - response = requests.post( - url=url, - params=params, - headers=headers, - data=data, - timeout=ESD_TIMEOUT, - ) + response = requests.get(url=url, params=params, headers=headers, timeout=ESD_TIMEOUT) + elif method == "POST": + response = requests.post(url=url, params=params, headers=headers, data=data, timeout=ESD_TIMEOUT) else: raise ValueError("unknown HTTP method") except (ConnectionError, ReadTimeout) as e: @@ -135,11 +112,7 @@ def _get_response(url, headers, params=None, method='GET', data=None): if response.status_code == http_too_many_requests: raise TooManyRequests elif response.status_code >= 400: - error = '{} responded with a {} error: {}'.format( - url, - response.status_code, - response.content, - ) + error = "{} responded with a {} error: {}".format(url, response.status_code, response.content) log_level = logging.WARNING if response.status_code >= 500 else logging.ERROR logger.log(log_level, error) raise RequestFailed("response={}".format(response.content)) diff --git a/labonneboite/common/fetcher.py b/labonneboite/common/fetcher.py index 002269ec9..a3d25f67c 100644 --- a/labonneboite/common/fetcher.py +++ b/labonneboite/common/fetcher.py @@ -1,14 +1,10 @@ - - class InvalidFetcherArgument(Exception): pass class Fetcher(object): - def get_offices(self): raise NotImplementedError() - def get_office_count(self): return self.office_count diff --git a/labonneboite/common/geocoding/__init__.py b/labonneboite/common/geocoding/__init__.py index 58cdd9059..32ce12ea5 100644 --- a/labonneboite/common/geocoding/__init__.py +++ b/labonneboite/common/geocoding/__init__.py @@ -1,13 +1,14 @@ -from functools import wraps import collections import json import os -import geopy.distance +from functools import wraps +import geopy.distance from slugify import slugify from labonneboite.common import departements from labonneboite.common.util import unique_elements + from . import datagouv @@ -15,27 +16,24 @@ def city_as_dict(item): - first_zipcode = item['codesPostaux'][0] + first_zipcode = item["codesPostaux"][0] # Use the "main" zipcode for cities that are subdivided into arrondissements. - if item['nom'] == 'Lyon': + if item["nom"] == "Lyon": first_zipcode = "69000" - elif item['nom'] == 'Marseille': + elif item["nom"] == "Marseille": first_zipcode = "13000" - elif item['nom'] == 'Paris': + elif item["nom"] == "Paris": first_zipcode = "75000" return { - 'name': item['nom'], - 'slug': slugify(item['nom']), - 'commune_id': item['code'], - 'zipcodes': item['codesPostaux'], - 'zipcode': first_zipcode, - 'population': item['population'], - 'coords': { - 'lon': item['centre']['coordinates'][0], - 'lat': item['centre']['coordinates'][1], - }, + "name": item["nom"], + "slug": slugify(item["nom"]), + "commune_id": item["code"], + "zipcodes": item["codesPostaux"], + "zipcode": first_zipcode, + "population": item["population"], + "coords": {"lon": item["centre"]["coordinates"][0], "lat": item["centre"]["coordinates"][1]}, } @@ -65,43 +63,64 @@ def load_cities_cache(): # Communes without `population` attribute. # ---------------------------------------- # Meuse (starting with 55), because there is no more inhabitants. - "55039", "55050", "55139", "55189", "55239", "55307", + "55039", + "55050", + "55139", + "55189", + "55239", + "55307", # Mayotte (starting with 976), because there is no accurate population data available yet. - "97601", "97602", "97603", "97604", "97605", "97606", "97607", "97608", "97609", - "97610", "97611", "97612", "97613", "97614", "97615", "97616", "97617", + "97601", + "97602", + "97603", + "97604", + "97605", + "97606", + "97607", + "97608", + "97609", + "97610", + "97611", + "97612", + "97613", + "97614", + "97615", + "97616", + "97617", ] cities = [] json_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data/cities.json") - with open(json_file, 'r') as json_data: + with open(json_file, "r") as json_data: for item in json.load(json_data): - if item['code'] not in COMMUNES_TO_SKIP: + if item["code"] not in COMMUNES_TO_SKIP: cities.append(city_as_dict(item)) json_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data/arrondissements_as_cities.json") - with open(json_file, 'r') as json_data: + with open(json_file, "r") as json_data: for item in json.load(json_data): cities.append(city_as_dict(item)) - CACHE['cities'] = cities + CACHE["cities"] = cities # Create a dict where each "code commune (INSEE)" is mapped to its corresponding city. # This works because a code commune is unique for each city. - CACHE['cities_by_commune_id'] = {city['commune_id']: city for city in cities} + CACHE["cities_by_commune_id"] = {city["commune_id"]: city for city in cities} # Create a dict where each "zipcode" is mapped to its corresponding cities. # Since the zipcode is not unique, it can be mapped to several cities. - CACHE['cities_by_zipcode'] = collections.defaultdict(list) + CACHE["cities_by_zipcode"] = collections.defaultdict(list) for city in cities: - key = city['zipcode'] - CACHE['cities_by_zipcode'][key].append(city) + key = city["zipcode"] + CACHE["cities_by_zipcode"][key].append(city) def cities_cache_required(function): """ A decorator that ensures that cities cache is loaded. """ + @wraps(function) def decorated(*args, **kwargs): if not CACHE: @@ -113,7 +132,7 @@ def decorated(*args, **kwargs): @cities_cache_required def get_cities(): - return CACHE['cities'] + return CACHE["cities"] @cities_cache_required @@ -123,21 +142,21 @@ def get_city_by_commune_id(commune_id): """ if isinstance(commune_id, int): commune_id = str(commune_id) - return CACHE['cities_by_commune_id'].get(commune_id) + return CACHE["cities_by_commune_id"].get(commune_id) @cities_cache_required -def get_city_by_zipcode(zipcode, slug=''): +def get_city_by_zipcode(zipcode, slug=""): """ Returns the city corresponding to the given `zipcode` string and `city_name_slug`. `city_name_slug` is required to deal with situations where a zipcode is not unique for a city. """ - cities = CACHE['cities_by_zipcode'].get(zipcode) + cities = CACHE["cities_by_zipcode"].get(zipcode) if not cities: return None if len(cities) > 1: for city in cities: - if not slug or city['slug'] == slug: + if not slug or city["slug"] == slug: return city return cities[0] @@ -148,9 +167,7 @@ def get_all_cities_from_departement(departement): Returns a list of all cities for the given departement. """ return [ - city - for commune_id, city in list(CACHE['cities_by_commune_id'].items()) - if commune_id.startswith(departement) + city for commune_id, city in list(CACHE["cities_by_commune_id"].items()) if commune_id.startswith(departement) ] @@ -160,7 +177,7 @@ def get_distance_between_commune_id_and_coordinates(commune_id, latitude, longit Return distance (float, kilometers) from commune_id to gps coordinates """ city = get_city_by_commune_id(commune_id) - coords_1 = (city['coords']['lat'], city['coords']['lon']) + coords_1 = (city["coords"]["lat"], city["coords"]["lon"]) coords_2 = (latitude, longitude) return geopy.distance.geodesic(coords_1, coords_2).km @@ -170,7 +187,7 @@ def is_commune_id(value): """ Returns true if the given string is a "code commune (INSEE)", false otherwise. """ - return value in CACHE['cities_by_commune_id'] + return value in CACHE["cities_by_commune_id"] def is_departement(value): @@ -197,22 +214,22 @@ def get_coordinates(address, limit=10): for result in datagouv.search(address, limit=limit): try: feature = { - 'latitude': result['geometry']['coordinates'][1], - 'longitude': result['geometry']['coordinates'][0], - 'label': result['properties']['label'], - 'zipcode': result['properties']['postcode'], - 'city': result['properties']['city'] + "latitude": result["geometry"]["coordinates"][1], + "longitude": result["geometry"]["coordinates"][0], + "label": result["properties"]["label"], + "zipcode": result["properties"]["postcode"], + "city": result["properties"]["city"], } # The zipcode is normally always present in the label, # but sometimes is inconsistently absent from it (e.g. Saint-Paul) # thus we add it if necessary. - if feature['zipcode'] not in feature['label']: - feature['label'] += " %s" % feature['zipcode'] + if feature["zipcode"] not in feature["label"]: + feature["label"] += " %s" % feature["zipcode"] features.append(feature) except KeyError: continue - return unique_elements(features, key=lambda x: (x['latitude'], x['longitude'])) + return unique_elements(features, key=lambda x: (x["latitude"], x["longitude"])) def get_address(latitude, longitude, limit=10): @@ -226,11 +243,13 @@ def get_address(latitude, longitude, limit=10): features = [] for result in datagouv.reverse(latitude, longitude, limit=limit): try: - features.append({ - 'label': result['properties']['label'], - 'zipcode': result['properties']['postcode'], - 'city': result['properties']['city'] - }) + features.append( + { + "label": result["properties"]["label"], + "zipcode": result["properties"]["postcode"], + "city": result["properties"]["city"], + } + ) except KeyError: pass return unique_elements(features) diff --git a/labonneboite/common/geocoding/datagouv.py b/labonneboite/common/geocoding/datagouv.py index dcfc0d86c..263931546 100644 --- a/labonneboite/common/geocoding/datagouv.py +++ b/labonneboite/common/geocoding/datagouv.py @@ -1,18 +1,17 @@ import logging - from functools import lru_cache + import requests from requests.exceptions import ConnectionError, ReadTimeout from labonneboite.conf import settings -logger = logging.getLogger('main') +logger = logging.getLogger("main") BAN_TIMEOUT = 3 - def search(address, limit=10): """ Return a list of locations with latitude/longitude coordinates that @@ -26,10 +25,7 @@ def search(address, limit=10): # Longer requests cause a 413 error address = address[:200] - return get_features('/search', **{ - 'q': address, - 'limit': limit - }) + return get_features("/search", **{"q": address, "limit": limit}) def reverse(latitude, longitude, limit=10): @@ -37,11 +33,7 @@ def reverse(latitude, longitude, limit=10): Find the candidate addresses associated to given latitude/longitude coordinates. """ - return get_features('/reverse', **{ - 'lat': latitude, - 'lon': longitude, - 'limit': limit - }) + return get_features("/reverse", **{"lat": latitude, "lon": longitude, "limit": limit}) @lru_cache(1000) @@ -55,22 +47,16 @@ def get_features(endpoint, **params): params (dict): key/value dictionary to pass as query string """ try: - response = requests.get( - settings.API_ADRESSE_BASE_URL + endpoint, - params=params, - timeout=BAN_TIMEOUT, - ) + response = requests.get(settings.API_ADRESSE_BASE_URL + endpoint, params=params, timeout=BAN_TIMEOUT) except (ConnectionError, ReadTimeout): # FIXME log BAN DOWN event return [] if response.status_code >= 400: - error = 'adresse-api.data.gouv.fr responded with a {} error: {}'.format( - response.status_code, response.content - ) + error = "adresse-api.data.gouv.fr responded with a {} error: {}".format(response.status_code, response.content) # We log an error only if we made an incorrect request # FIXME Where does this log go? Not found in uwsgi log nor sentry. log_level = logging.WARNING if response.status_code >= 500 else logging.ERROR logger.log(log_level, error) return [] - return response.json()['features'] + return response.json()["features"] diff --git a/labonneboite/common/hiring_type_util.py b/labonneboite/common/hiring_type_util.py index 44bd5b055..f07563209 100644 --- a/labonneboite/common/hiring_type_util.py +++ b/labonneboite/common/hiring_type_util.py @@ -1,11 +1,10 @@ - -DPAE = 'dpae' -ALTERNANCE = 'alt' +DPAE = "dpae" +ALTERNANCE = "alt" VALUES = [DPAE, ALTERNANCE] DEFAULT = DPAE -CONTRACT_DPAE = 'dpae' -CONTRACT_ALTERNANCE = 'alternance' +CONTRACT_DPAE = "dpae" +CONTRACT_ALTERNANCE = "alternance" CONTRACT_TO_HIRING_TYPE = {CONTRACT_DPAE: DPAE, CONTRACT_ALTERNANCE: ALTERNANCE} CONTRACT_VALUES = sorted(CONTRACT_TO_HIRING_TYPE.keys()) CONTRACT_DEFAULT = CONTRACT_DPAE diff --git a/labonneboite/common/hotjar.py b/labonneboite/common/hotjar.py index 7419110b9..2d1cb3407 100644 --- a/labonneboite/common/hotjar.py +++ b/labonneboite/common/hotjar.py @@ -1,5 +1,6 @@ from .pro import user_is_pro + def get_hotjar_tag(): if user_is_pro(): return "conseiller" diff --git a/labonneboite/common/load_data.py b/labonneboite/common/load_data.py index 3c5f97724..392a9a195 100644 --- a/labonneboite/common/load_data.py +++ b/labonneboite/common/load_data.py @@ -1,9 +1,9 @@ +import csv import os import pickle -import csv - -from functools import lru_cache from collections import defaultdict +from functools import lru_cache + USE_ROME_SLICING_DATASET = False # Rome slicing dataset is not ready yet @@ -18,20 +18,20 @@ def load_file(func, filename): - full_filename = os.path.join(os.path.dirname( - os.path.realpath(__file__)), "data/%s" % filename) + full_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data/%s" % filename) return func(full_filename) def load_pickle_file(filename): def f(full_filename): return pickle.load(open(full_filename, "r")) + return load_file(f, filename) -def load_csv_file(filename, delimiter='|'): +def load_csv_file(filename, delimiter="|"): def f(full_filename): - csv_file = open(full_filename, 'r') + csv_file = open(full_filename, "r") reader = csv.reader(csv_file, delimiter=delimiter) return reader @@ -46,8 +46,7 @@ def f(full_filename): # skip empty rows continue elif len(row) != len_previous_row: - raise IndexError( - "found row with abnormal number of fields : %s" % row) + raise IndexError("found row with abnormal number of fields : %s" % row) rows.append(row) else: # first line of CSV file: headers should be ignored @@ -106,7 +105,7 @@ def load_city_codes(): def load_contact_modes(): # use comma delimiter instead of pipe so that it is recognized by github # and can easily be edited online by the intrapreneurs - rows = load_csv_file("contact_modes.csv", delimiter=',') + rows = load_csv_file("contact_modes.csv", delimiter=",") naf_prefix_to_rome_to_contact_mode = load_rows_as_dict_of_dict(rows) return naf_prefix_to_rome_to_contact_mode @@ -163,19 +162,19 @@ def load_naf_labels(): @lru_cache(maxsize=None) def load_rome_naf_mapping(): - return load_csv_file(ROME_NAF_FILE, delimiter=',') + return load_csv_file(ROME_NAF_FILE, delimiter=",") @lru_cache(maxsize=None) def load_metiers_tension(): - csv_metiers_tension = load_csv_file("metiers_tension.csv", ',') + csv_metiers_tension = load_csv_file("metiers_tension.csv", ",") rome_to_tension = defaultdict(int) for row in csv_metiers_tension: tension_pct = row[2] rome_code = row[3] # FIXME : remove rows where tension is #N/A in the dataset, to remove this ugly check ? - if tension_pct != '#N/A': + if tension_pct != "#N/A": tension_pct = float(tension_pct) if 0 <= tension_pct <= 100: # As a single ROME can have multiple tensions, @@ -185,13 +184,15 @@ def load_metiers_tension(): raise ValueError return rome_to_tension -#Used for PSE study, it returns a list of SIRET that must not b be seen on LBB + +# Used for PSE study, it returns a list of SIRET that must not b be seen on LBB @lru_cache(maxsize=None) def load_siret_to_remove(): - rows = load_csv_file("untreated_BB.csv", ',') + rows = load_csv_file("untreated_BB.csv", ",") sirets_to_remove = load_rows_as_set(rows) return sirets_to_remove + OGR_ROME_CODES = load_ogr_rome_mapping() ROME_CODES = list(OGR_ROME_CODES.values()) diff --git a/labonneboite/common/locations.py b/labonneboite/common/locations.py index 3f2d54f1e..f20eb95ec 100644 --- a/labonneboite/common/locations.py +++ b/labonneboite/common/locations.py @@ -1,12 +1,12 @@ -from collections import namedtuple import logging +from collections import namedtuple from slugify import slugify from labonneboite.common import geocoding -logger = logging.getLogger('main') +logger = logging.getLogger("main") class Location(object): @@ -19,8 +19,7 @@ def __repr__(self): class CityLocation(object): - - def __init__(self, zipcode, name=''): + def __init__(self, zipcode, name=""): self.zipcode = zipcode # Location attribute may be None if slug/zipcode combination is incorrect @@ -32,22 +31,18 @@ def __init__(self, zipcode, name=''): if not city: logger.debug("unable to retrieve a city for zipcode `%s` and slug `%s`", self.zipcode, self.slug) else: - coordinates = city['coords'] - self.location = Location(coordinates['lat'], coordinates['lon']) - self.slug = city['slug'] - self.name = city['name'] + coordinates = city["coords"] + self.location = Location(coordinates["lat"], coordinates["lon"]) + self.slug = city["slug"] + self.name = city["name"] @property def full_name(self): - return '%s (%s)' % (self.name, self.zipcode) + return "%s (%s)" % (self.name, self.zipcode) @property def is_location_correct(self): return self.location is not None -NamedLocation = namedtuple('NamedLocation', [ - 'zipcode', - 'city', - 'name' -]) +NamedLocation = namedtuple("NamedLocation", ["zipcode", "city", "name"]) diff --git a/labonneboite/common/mailjet.py b/labonneboite/common/mailjet.py index 1cda907e4..5c9513e95 100644 --- a/labonneboite/common/mailjet.py +++ b/labonneboite/common/mailjet.py @@ -3,6 +3,7 @@ from mailjet_rest import Client + """ This code is duplicated in LBB and JP If you update one, please keep the other in sync @@ -13,20 +14,23 @@ Also when more code is shared between JP and LBB we will probably want to use pip modules """ + class MailJetClient: def __init__(self, MAILJET_API_KEY, MAILJET_API_SECRET): self.MAILJET_API_KEY = MAILJET_API_KEY self.MAILJET_API_SECRET = MAILJET_API_SECRET - def send(self, - subject, - html_content, - from_email, - recipients, - from_name=None, - reply_to=None, - attachments=None, - monitoring_category=None): + def send( + self, + subject, + html_content, + from_email, + recipients, + from_name=None, + reply_to=None, + attachments=None, + monitoring_category=None, + ): data = { "Messages": [ { @@ -35,29 +39,30 @@ def send(self, "Subject": subject, "HTMLPart": html_content, "Attachments": self.encode_attachments(attachments), - }, - ], + } + ] } if from_name: - data['Messages'][0]['From']['Name'] = from_name + data["Messages"][0]["From"]["Name"] = from_name if reply_to: - data['Messages'][0]['ReplyTo'] = {"Email": reply_to} + data["Messages"][0]["ReplyTo"] = {"Email": reply_to} if monitoring_category: - data['Messages'][0]['MonitoringCategory'] = monitoring_category + data["Messages"][0]["MonitoringCategory"] = monitoring_category json_response = self.post_api(data) return self.extract_ids(json_response) - - def send_using_template(self, - subject, - mailjet_template_id, - mailjet_template_data, - from_email, - recipients, - from_name=None, - reply_to=None, - attachments=None, - monitoring_category=None): + def send_using_template( + self, + subject, + mailjet_template_id, + mailjet_template_data, + from_email, + recipients, + from_name=None, + reply_to=None, + attachments=None, + monitoring_category=None, + ): data = { "Messages": [ { @@ -68,38 +73,41 @@ def send_using_template(self, "TemplateID": mailjet_template_id, "TemplateLanguage": True, "Variables": mailjet_template_data, - }, - ], + } + ] } if from_name: - data['Messages'][0]['From']['Name'] = from_name + data["Messages"][0]["From"]["Name"] = from_name if reply_to: - data['Messages'][0]['ReplyTo'] = {"Email": reply_to} + data["Messages"][0]["ReplyTo"] = {"Email": reply_to} if monitoring_category: - data['Messages'][0]['MonitoringCategory'] = monitoring_category + data["Messages"][0]["MonitoringCategory"] = monitoring_category json_response = self.post_api(data) return self.extract_ids(json_response) - def extract_ids(self, json_response): return [message["MessageID"] for message in json_response["Messages"][0]["To"]] - def encode_attachments(self, attachments): - return [{ - "ContentType": self.mimetype(attachment[0]), - "Filename": attachment[0], - "Base64Content": base64.encodebytes(attachment[1]).decode().strip(), - } for attachment in attachments] if attachments else [] - + return ( + [ + { + "ContentType": self.mimetype(attachment[0]), + "Filename": attachment[0], + "Base64Content": base64.encodebytes(attachment[1]).decode().strip(), + } + for attachment in attachments + ] + if attachments + else [] + ) def mimetype(self, url): types = mimetypes.guess_type(url, strict=False) - return types[0] if types and types[0] else 'application/octet-stream' - + return types[0] if types and types[0] else "application/octet-stream" def post_api(self, data): - client = Client(auth=(self.MAILJET_API_KEY, self.MAILJET_API_SECRET), version='v3.1') + client = Client(auth=(self.MAILJET_API_KEY, self.MAILJET_API_SECRET), version="v3.1") response = client.send.create(data=data) # In case of error, throw so that the caller can retry sending the email # When there is a queue - i.e. JP, it's important that the task queue diff --git a/labonneboite/common/mapping.py b/labonneboite/common/mapping.py index f072ee34c..7835d569b 100644 --- a/labonneboite/common/mapping.py +++ b/labonneboite/common/mapping.py @@ -1,6 +1,5 @@ - -from collections import namedtuple import logging +from collections import namedtuple from functools import lru_cache from slugify import slugify @@ -9,7 +8,7 @@ from labonneboite.conf import settings -logger = logging.getLogger('main') +logger = logging.getLogger("main") SLUGIFIED_ROME_LABELS = {slugify(v): k for k, v in list(settings.ROME_DESCRIPTIONS.items())} @@ -46,20 +45,16 @@ def populate_rome_naf_mapping(): if ENSURE_LABELS_IN_MAPPING_MATCH: - if naf_label != settings.NAF_CODES[naf].encode('utf8'): - raise Exception("labels '%s' and '%s' do not match for NAF %s" % ( - naf_label, - settings.NAF_CODES[naf].encode('utf8'), - naf, - ) + if naf_label != settings.NAF_CODES[naf].encode("utf8"): + raise Exception( + "labels '%s' and '%s' do not match for NAF %s" + % (naf_label, settings.NAF_CODES[naf].encode("utf8"), naf) ) - if rome_label != settings.ROME_DESCRIPTIONS[rome].encode('utf8'): - raise Exception("labels '%s' and '%s' do not match for ROME %s" % ( - rome_label, - settings.ROME_DESCRIPTIONS[rome].encode('utf8'), - rome, - ) + if rome_label != settings.ROME_DESCRIPTIONS[rome].encode("utf8"): + raise Exception( + "labels '%s' and '%s' do not match for ROME %s" + % (rome_label, settings.ROME_DESCRIPTIONS[rome].encode("utf8"), rome) ) MANUAL_ROME_NAF_MAPPING.setdefault(rome, {}) @@ -89,7 +84,7 @@ def get_total_naf_hirings(naf): return sum(MANUAL_NAF_ROME_MAPPING[naf][rome] for rome in romes) -@lru_cache(maxsize=32*1024) +@lru_cache(maxsize=32 * 1024) def get_affinity_between_rome_and_naf(rome_code, naf_code): """ Ratio of hirings of this NAF made by this ROME. @@ -108,11 +103,11 @@ def map_romes_to_nafs(rome_codes, optional_naf_codes=None): naf_codes = set() for rome in rome_codes: if rome not in settings.ROME_DESCRIPTIONS: - raise ValueError('bad rome code : %s' % rome) + raise ValueError("bad rome code : %s" % rome) try: naf_codes_with_hirings = MANUAL_ROME_NAF_MAPPING[rome] except KeyError: - logger.error('soft fail: no NAF codes for ROME %s', rome) + logger.error("soft fail: no NAF codes for ROME %s", rome) naf_codes_with_hirings = {} for naf, _ in naf_codes_with_hirings.items(): if optional_naf_codes: @@ -137,11 +132,11 @@ def romes_for_naf(naf): """ romes = {k: v for (k, v) in list(MANUAL_ROME_NAF_MAPPING.items()) if naf in v} romes = sorted(list(romes.items()), key=lambda k_v: k_v[1][naf], reverse=True) - Rome = namedtuple('Rome', ['code', 'name', 'nafs']) + Rome = namedtuple("Rome", ["code", "name", "nafs"]) return [Rome(rome[0], settings.ROME_DESCRIPTIONS[rome[0]], rome[1]) for rome in romes] -@lru_cache(maxsize=8*1024) # about 500 rome_codes in current dataset and 5000 in sliced dataset +@lru_cache(maxsize=8 * 1024) # about 500 rome_codes in current dataset and 5000 in sliced dataset def nafs_for_rome(rome): """ Returns NAF codes matching the given ROME code as a list of named tuples ordered by the number of hires. @@ -155,13 +150,11 @@ def nafs_for_rome(rome): """ nafs = MANUAL_ROME_NAF_MAPPING.get(rome, {}) nafs = sorted(list(nafs.items()), key=lambda k_v1: k_v1[1], reverse=True) - Naf = namedtuple('Naf', ['code', 'name', 'hirings', 'affinity']) - return [Naf( - naf[0], - settings.NAF_CODES[naf[0]], - naf[1], - get_affinity_between_rome_and_naf(rome, naf[0]), - ) for naf in nafs] + Naf = namedtuple("Naf", ["code", "name", "hirings", "affinity"]) + return [ + Naf(naf[0], settings.NAF_CODES[naf[0]], naf[1], get_affinity_between_rome_and_naf(rome, naf[0])) + for naf in nafs + ] def rome_is_valid(rome): diff --git a/labonneboite/common/maps/cache.py b/labonneboite/common/maps/cache.py index 191fbb223..c752344b6 100644 --- a/labonneboite/common/maps/cache.py +++ b/labonneboite/common/maps/cache.py @@ -1,18 +1,18 @@ import json + import redis import redis.sentinel - from flask import current_app from labonneboite.conf import settings + # Here we define caches to store results provided by vendor APIs, so that # we don't exceed our allowed quotas. The various classes have different storage # backends. class BaseCache(object): - def get(self, key, default=None): raise NotImplementedError @@ -112,10 +112,7 @@ def connect(cls): # Connect directly to redis if cls.CONNECTION_POOL is None: # Share one connection pool for all RedisCache instances - cls.CONNECTION_POOL = redis.ConnectionPool( - host=settings.REDIS_HOST, - port=settings.REDIS_PORT - ) + cls.CONNECTION_POOL = redis.ConnectionPool(host=settings.REDIS_HOST, port=settings.REDIS_PORT) return redis.StrictRedis(connection_pool=cls.CONNECTION_POOL) def get(self, key, default=None): @@ -128,19 +125,17 @@ def get(self, key, default=None): return json.loads(value) def set(self, key, value): - self.__safe( - self.__redis.set, key, json.dumps(value), ex=self.EXPIRES_IN_SECONDS - ) + self.__safe(self.__redis.set, key, json.dumps(value), ex=self.EXPIRES_IN_SECONDS) def clear(self): self.__safe(self.__redis.flushdb) -if settings.TRAVEL_CACHE == 'dummy': +if settings.TRAVEL_CACHE == "dummy": Cache = DummyCache -elif settings.TRAVEL_CACHE == 'local': +elif settings.TRAVEL_CACHE == "local": Cache = LocalCache -elif settings.TRAVEL_CACHE == 'redis': +elif settings.TRAVEL_CACHE == "redis": Cache = RedisCache else: raise ValueError("Invalid TRAVEL_CACHE setting: {}".format(settings.TRAVEL_CACHE)) diff --git a/labonneboite/common/maps/constants.py b/labonneboite/common/maps/constants.py index d0e8b4470..e04530fc8 100644 --- a/labonneboite/common/maps/constants.py +++ b/labonneboite/common/maps/constants.py @@ -3,8 +3,8 @@ ISOCHRONE_DURATIONS_MINUTES = (15, 30, 45) -CAR_MODE = 'car' -PUBLIC_MODE = 'public' +CAR_MODE = "car" +PUBLIC_MODE = "public" TRAVEL_MODES = () if ENABLE_PUBLIC_MODE: @@ -17,7 +17,4 @@ else: DEFAULT_TRAVEL_MODE = PUBLIC_MODE -TRAVEL_MODES_FRENCH = { - CAR_MODE: 'Voiture', - PUBLIC_MODE: 'Transports en commun', -} +TRAVEL_MODES_FRENCH = {CAR_MODE: "Voiture", PUBLIC_MODE: "Transports en commun"} diff --git a/labonneboite/common/maps/exceptions.py b/labonneboite/common/maps/exceptions.py index 19e2397c8..681801778 100644 --- a/labonneboite/common/maps/exceptions.py +++ b/labonneboite/common/maps/exceptions.py @@ -1,4 +1,3 @@ - def error_catcher(exception_class, value_on_error=None): """ Decorator that catches NavitiaUnreachable exceptions. On error, it returns @@ -10,13 +9,16 @@ def error_catcher(exception_class, value_on_error=None): def do_network_stuff(): ... """ + def decorator(func): def decorated(*args, **kwargs): try: return func(*args, **kwargs) except exception_class: return value_on_error + return decorated + return decorator diff --git a/labonneboite/common/maps/precompute.py b/labonneboite/common/maps/precompute.py index 3b970c84d..573851828 100644 --- a/labonneboite/common/maps/precompute.py +++ b/labonneboite/common/maps/precompute.py @@ -1,12 +1,10 @@ +import redis from flask import current_app from huey import RedisHuey -import redis from labonneboite.conf import settings -from . import cache -from . import constants -from . import travel +from . import cache, constants, travel def create_huey(): @@ -23,11 +21,14 @@ class DummyHuey: """ Dummy task scheduler that just trashes tasks. Useful for testing. """ + def task(self, *args, **kwargs): def patched(func): def dummy(*args, **kwargs): pass + return dummy + return patched @@ -36,6 +37,7 @@ def dummy(*args, **kwargs): # Asynchronous version of the `isochrone` function isochrone = huey.task()(travel.isochrone) + def isochrones(location): """ Compute isochrones asynchronously for all durations and modes. Each isochrone diff --git a/labonneboite/common/maps/travel.py b/labonneboite/common/maps/travel.py index da4569367..8ea1fae23 100644 --- a/labonneboite/common/maps/travel.py +++ b/labonneboite/common/maps/travel.py @@ -1,11 +1,10 @@ - -import logging import json +import logging +from . import exceptions, vendors from .cache import Cache from .constants import DEFAULT_TRAVEL_MODE, TRAVEL_MODES -from . import exceptions -from . import vendors + logger = logging.getLogger(__name__) @@ -16,6 +15,7 @@ # Note: everywhere we deal with coordinates, they are assumed to be float tuples of the # form (latitude, longitude). + def isochrone(location, duration, mode=None): """ Compute the isochrone around a given location. @@ -29,7 +29,7 @@ def isochrone(location, duration, mode=None): A list of polygons; each polygon is itself a list of coordinates. Thus, the result is a list of list of coordinates. """ - return backend_cached_func(ISOCHRONE_CACHE, 'isochrone', mode, location, duration) + return backend_cached_func(ISOCHRONE_CACHE, "isochrone", mode, location, duration) def durations(origin, destinations, mode=None): @@ -51,8 +51,8 @@ def durations(origin, destinations, mode=None): durations_by_destination = {} destinations_to_fetch = [] - backend_name, mode = backend_info('durations', mode) - func_name = 'durations' + backend_name, mode = backend_info("durations", mode) + func_name = "durations" # Fetch results from cache for destination in set(destinations): @@ -133,7 +133,7 @@ def backend_info(func_name, mode): """ mode = mode or DEFAULT_TRAVEL_MODE if mode not in TRAVEL_MODES: - raise ValueError('Invalid travel mode: {}'.format(mode)) + raise ValueError("Invalid travel mode: {}".format(mode)) backend_name = vendors.backend_name(func_name, mode) return backend_name, mode diff --git a/labonneboite/common/maps/vendors/__init__.py b/labonneboite/common/maps/vendors/__init__.py index 6cbf15b07..6cd23d8b7 100644 --- a/labonneboite/common/maps/vendors/__init__.py +++ b/labonneboite/common/maps/vendors/__init__.py @@ -1,20 +1,22 @@ from labonneboite.conf import settings + def backend_name(function_name, mode): return settings.TRAVEL_VENDOR_BACKENDS[function_name][mode] + def backend(name): - if name == 'dummy': + if name == "dummy": from . import dummy as back - elif name == 'ign': + elif name == "ign": from . import ign as back - elif name == 'navitia': + elif name == "navitia": from . import navitia as back - elif name == 'ign_mock': + elif name == "ign_mock": from .mocks import ign as back - elif name == 'navitia_mock': + elif name == "navitia_mock": from .mocks import navitia as back else: - raise ValueError('Invalid backend name: {}'.format(name)) + raise ValueError("Invalid backend name: {}".format(name)) return back diff --git a/labonneboite/common/maps/vendors/dummy.py b/labonneboite/common/maps/vendors/dummy.py index 12c07ab6f..d827a6c54 100644 --- a/labonneboite/common/maps/vendors/dummy.py +++ b/labonneboite/common/maps/vendors/dummy.py @@ -13,4 +13,4 @@ def durations(origin, destinations): Returns: list of float durations, or None when duration could not be computed. The list has the same length as the destinations argument. """ - return [None]*len(destinations) + return [None] * len(destinations) diff --git a/labonneboite/common/maps/vendors/ign.py b/labonneboite/common/maps/vendors/ign.py index afeb67d21..e0fe2ab1d 100644 --- a/labonneboite/common/maps/vendors/ign.py +++ b/labonneboite/common/maps/vendors/ign.py @@ -1,13 +1,13 @@ import re from timeit import default_timer as timer -from requests.auth import HTTPBasicAuth import requests -from requests.exceptions import ConnectionError, Timeout - from flask import current_app +from requests.auth import HTTPBasicAuth +from requests.exceptions import ConnectionError, Timeout from labonneboite.conf import settings + from ..exceptions import BackendUnreachable @@ -17,11 +17,11 @@ def isochrone(origin, duration): # Documentation: https://geoservices.ign.fr/documentation/geoservices/isochrones.html - endpoint = 'isochrone/isochrone.json' + endpoint = "isochrone/isochrone.json" params = { - 'location': '{:.7f},{:.7f}'.format(origin[1], origin[0]), - 'time': int(duration * 60), - 'smoothing': 'true', + "location": "{:.7f},{:.7f}".format(origin[1], origin[0]), + "time": int(duration * 60), + "smoothing": "true", # Settings holes=true yields really beautiful maps, but very hard to # debug. Also, requires more data transfer. # 'holes': 'true', @@ -29,15 +29,13 @@ def isochrone(origin, duration): data = request_json_api(endpoint, params, timeout=REQUEST_TIMEOUT_SECONDS) # geometry is a string of the form: 'POLYGON ((3.504869 45.910195, ...), (...))' - geometry = data['wktGeometry'] - polygons = re.findall(r'\([-*0-9. ,]+\)', geometry) + geometry = data["wktGeometry"] + polygons = re.findall(r"\([-*0-9. ,]+\)", geometry) isochrones = [] for polygon in polygons: - coordinates = polygon.strip(')').strip('(').split(', ') - lon_lat = [coords.split(' ') for coords in coordinates] - isochrones.append([ - (float(lat), float(lon)) for lon, lat in lon_lat - ]) + coordinates = polygon.strip(")").strip("(").split(", ") + lon_lat = [coords.split(" ") for coords in coordinates] + isochrones.append([(float(lat), float(lon)) for lon, lat in lon_lat]) return isochrones @@ -51,7 +49,7 @@ def durations(origin, destinations): end = timer() time_spent = end - start time_left -= time_spent - result.append(float(data['durationSeconds'])) + result.append(float(data["durationSeconds"])) return result @@ -65,11 +63,11 @@ def get_journey(origin, destination, timeout=REQUEST_TIMEOUT_SECONDS): Return: the JSON data returned by the IGN "itineraire" API. """ - endpoint = 'itineraire/rest/route.json' + endpoint = "itineraire/rest/route.json" params = { - 'origin': '{:.7f},{:.7f}'.format(origin[1], origin[0]), - 'destination': '{:.7f},{:.7f}'.format(destination[1], destination[0]), - 'graphName': 'Voiture' + "origin": "{:.7f},{:.7f}".format(origin[1], origin[0]), + "destination": "{:.7f},{:.7f}".format(destination[1], destination[0]), + "graphName": "Voiture", } data = request_json_api(endpoint, params, timeout=timeout) return data @@ -77,19 +75,19 @@ def get_journey(origin, destination, timeout=REQUEST_TIMEOUT_SECONDS): def request_json_api(endpoint, params, timeout): ign_credentials = settings.IGN_CREDENTIALS - url = 'https://wxs.ign.fr/{}/{}'.format(ign_credentials['key'], endpoint) - auth = HTTPBasicAuth( - ign_credentials['username'], ign_credentials['password'] - ) if 'username' in ign_credentials else None - headers = { - "Referer": ign_credentials['referer'] - } if 'referer' in ign_credentials else None + url = "https://wxs.ign.fr/{}/{}".format(ign_credentials["key"], endpoint) + auth = ( + HTTPBasicAuth(ign_credentials["username"], ign_credentials["password"]) + if "username" in ign_credentials + else None + ) + headers = {"Referer": ign_credentials["referer"]} if "referer" in ign_credentials else None try: response = requests.get(url, params=params, auth=auth, timeout=timeout, headers=headers) except Timeout: # This occurs frequently so we don't trigger a timeout error - current_app.logger.warning('IGN API timeout') + current_app.logger.warning("IGN API timeout") raise BackendUnreachable except ConnectionError as e: if str(e) == "HTTPSConnectionPool(host='wxs.ign.fr', port=443): Read timed out.": @@ -101,12 +99,12 @@ def request_json_api(endpoint, params, timeout): if response.status_code == 200: return response.json() if response.status_code == 401: - current_app.logger.error('Invalid IGN API user/password') + current_app.logger.error("Invalid IGN API user/password") elif response.status_code == 403: - current_app.logger.error('Invalid IGN API key: %s', response.content) + current_app.logger.error("Invalid IGN API key: %s", response.content) elif response.status_code == 500: # A 500 error from the IGN API is quite common - current_app.logger.warning('IGN API 500 error: %s', response.content) + current_app.logger.warning("IGN API 500 error: %s", response.content) else: - current_app.logger.warning('IGN API %d error', response.status_code) + current_app.logger.warning("IGN API %d error", response.status_code) raise BackendUnreachable diff --git a/labonneboite/common/maps/vendors/mocks/ign.py b/labonneboite/common/maps/vendors/mocks/ign.py index 484136781..874fe5397 100644 --- a/labonneboite/common/maps/vendors/mocks/ign.py +++ b/labonneboite/common/maps/vendors/mocks/ign.py @@ -6,10 +6,11 @@ from unittest import mock from labonneboite.common.maps.vendors import ign + from .utils import mock_response_from_json -FIXTURES_ROOT = os.path.join(os.path.dirname(__file__), 'fixtures', 'ign') +FIXTURES_ROOT = os.path.join(os.path.dirname(__file__), "fixtures", "ign") def isochrone(origin, duration): @@ -19,21 +20,16 @@ def isochrone(origin, duration): origin: tuple(latitude, longitude) duration: integer """ - file = open(os.path.join( - FIXTURES_ROOT, - 'isochrones', - f'metz_{duration}_minutes.json' - )).read() + file = open(os.path.join(FIXTURES_ROOT, "isochrones", f"metz_{duration}_minutes.json")).read() response = mock_response_from_json(file) - with mock.patch.object(ign.requests, 'get', response): + with mock.patch.object(ign.requests, "get", response): isochrone = ign.isochrone(origin, duration) return isochrone - def durations(origin, destinations): """ Return commute time from an origin to several destinations. @@ -50,18 +46,13 @@ def durations(origin, destinations): result = [] for destination in destinations: - file = open(os.path.join( - FIXTURES_ROOT, - 'destinations', - f'{destination[0]}_{destination[1]}.json' - )).read() + file = open(os.path.join(FIXTURES_ROOT, "destinations", f"{destination[0]}_{destination[1]}.json")).read() response = mock_response_from_json(file) - - with mock.patch.object(ign.requests, 'get', response): + with mock.patch.object(ign.requests, "get", response): data = ign.get_journey(origin, destination) - result.append(float(data['durationSeconds'])) + result.append(float(data["durationSeconds"])) return result diff --git a/labonneboite/common/maps/vendors/mocks/navitia.py b/labonneboite/common/maps/vendors/mocks/navitia.py index 805567b9e..fecaaf09c 100644 --- a/labonneboite/common/maps/vendors/mocks/navitia.py +++ b/labonneboite/common/maps/vendors/mocks/navitia.py @@ -6,10 +6,11 @@ from unittest import mock from labonneboite.common.maps.vendors import navitia + from .utils import mock_response_from_json -FIXTURES_ROOT = os.path.join(os.path.dirname(__file__), 'fixtures', 'navitia') +FIXTURES_ROOT = os.path.join(os.path.dirname(__file__), "fixtures", "navitia") def isochrone(origin, duration): @@ -19,11 +20,7 @@ def isochrone(origin, duration): origin: tuple(latitude, longitude) duration: integer """ - file = open(os.path.join( - FIXTURES_ROOT, - 'isochrones', - f'metz_{duration}_minutes.json' - )).read() + file = open(os.path.join(FIXTURES_ROOT, "isochrones", f"metz_{duration}_minutes.json")).read() response = mock_response_from_json(file) @@ -31,14 +28,13 @@ def isochrone(origin, duration): # one to get a coverage id and a second one to get isochrones. # For more details on the coverage_id, see the full request answer here: # fixtures/navitia_metz_coverage.json - with mock.patch.object(navitia, 'get_coverage', return_value='fr-ne'): - with mock.patch.object(navitia.requests, 'get', response): + with mock.patch.object(navitia, "get_coverage", return_value="fr-ne"): + with mock.patch.object(navitia.requests, "get", response): isochrone = navitia.isochrone(origin, duration) return isochrone - def durations(origin, destinations): """ Return commute time from an origin to several destinations. @@ -52,27 +48,20 @@ def durations(origin, destinations): Output: List of durations in seconds (float). """ - coverage_response_file = open(os.path.join( - FIXTURES_ROOT, - f'metz_coverage.json' - )).read() + coverage_response_file = open(os.path.join(FIXTURES_ROOT, f"metz_coverage.json")).read() coverage_response = mock_response_from_json(coverage_response_file) - with mock.patch.object(navitia.requests, 'get', coverage_response): - endpoint = navitia.get_coverage_endpoint('journeys', origin) + with mock.patch.object(navitia.requests, "get", coverage_response): + endpoint = navitia.get_coverage_endpoint("journeys", origin) results = [] for destination in destinations: - file = open(os.path.join( - FIXTURES_ROOT, - 'destinations', - f'{destination[0]}_{destination[1]}.json' - )).read() + file = open(os.path.join(FIXTURES_ROOT, "destinations", f"{destination[0]}_{destination[1]}.json")).read() response = mock_response_from_json(file) - with mock.patch.object(navitia.requests, 'get', response): + with mock.patch.object(navitia.requests, "get", response): duration = navitia.get_duration(endpoint, origin, destination) results.append(duration) diff --git a/labonneboite/common/maps/vendors/mocks/utils.py b/labonneboite/common/maps/vendors/mocks/utils.py index 3ba573369..cdd2be529 100644 --- a/labonneboite/common/maps/vendors/mocks/utils.py +++ b/labonneboite/common/maps/vendors/mocks/utils.py @@ -11,8 +11,5 @@ def mock_response_from_json(file): Mock an HTTP request based on a JSON file. Return a 200 status code and the response. """ - response = mock.Mock( - status_code=200, - json=mock.Mock(return_value=json.loads(file)) - ) + response = mock.Mock(status_code=200, json=mock.Mock(return_value=json.loads(file))) return mock.Mock(return_value=response) diff --git a/labonneboite/common/maps/vendors/navitia.py b/labonneboite/common/maps/vendors/navitia.py index 66115ec7c..164e35010 100644 --- a/labonneboite/common/maps/vendors/navitia.py +++ b/labonneboite/common/maps/vendors/navitia.py @@ -1,9 +1,10 @@ from functools import lru_cache -import requests +import requests from flask import current_app from labonneboite.conf import settings + from ..exceptions import BackendUnreachable @@ -11,16 +12,20 @@ def isochrone(origin, duration): - data = request_location_api('isochrones', origin, { - 'from': '{:.7f};{:.7f}'.format(origin[1], origin[0]), - 'max_duration': int(duration * 60), - # Activate transport by car only (although navitia was clearly not made - # for car transport) - # 'first_section_mode[]': 'car', - # 'last_section_mode[]': 'car', - }) - - coordinates = data['isochrones'][0]['geojson']['coordinates'] + data = request_location_api( + "isochrones", + origin, + { + "from": "{:.7f};{:.7f}".format(origin[1], origin[0]), + "max_duration": int(duration * 60), + # Activate transport by car only (although navitia was clearly not made + # for car transport) + # 'first_section_mode[]': 'car', + # 'last_section_mode[]': 'car', + }, + ) + + coordinates = data["isochrones"][0]["geojson"]["coordinates"] # Each polygon is a pair of (lon, lat) that we convert to (lat, lon) return [[(coords[1], coords[0]) for coords in polygon[0]] for polygon in coordinates] @@ -32,7 +37,7 @@ def durations(origin, destinations): """ # Endpoint must be computed just once so that we don't make too many # coverage requests to navitia - endpoint = get_coverage_endpoint('journeys', origin) + endpoint = get_coverage_endpoint("journeys", origin) results = [] for destination in destinations: duration = get_duration(endpoint, origin, destination) @@ -42,18 +47,19 @@ def durations(origin, destinations): # Auxiliary functions + def get_duration(endpoint, origin, destination): params = { - 'from': '{:.7f};{:.7f}'.format(origin[1], origin[0]), - 'to': '{:.7f};{:.7f}'.format(destination[1], destination[0]), + "from": "{:.7f};{:.7f}".format(origin[1], origin[0]), + "to": "{:.7f};{:.7f}".format(destination[1], destination[0]), } try: data = request_json_api(endpoint, params=params) - if data.get('error') and data['error']['id'] == 'no_solution': + if data.get("error") and data["error"]["id"] == "no_solution": duration = None else: - duration = data['journeys'][0]['duration'] + duration = data["journeys"][0]["duration"] except BackendUnreachable: duration = None @@ -67,33 +73,31 @@ def request_location_api(endpoint, location, params): def get_coverage_endpoint(endpoint, location): coverage_id = get_coverage(location) - return 'coverage/{}/{}'.format(coverage_id, endpoint) + return "coverage/{}/{}".format(coverage_id, endpoint) @lru_cache(1000) def get_coverage(location): - endpoint = 'coverage/{:.7f};{:.7f}'.format(location[1], location[0]) + endpoint = "coverage/{:.7f};{:.7f}".format(location[1], location[0]) data = request_json_api(endpoint) if data is None: # This is a serious error but we cannot crash the entire app raise BackendUnreachable try: - region_id = data['regions'][0]['id'] + region_id = data["regions"][0]["id"] except (KeyError, IndexError): # This is a serious error that should not happen -- unless invalid # coordinates were used? We'll have to examine logs to find out. - current_app.logger.error('Coverage region could not be found for location %s', location) + current_app.logger.error("Coverage region could not be found for location %s", location) raise BackendUnreachable return region_id def request_json_api(endpoint, params=None): - url = 'https://api.navitia.io/v1/{}'.format(endpoint) - headers = { - 'Authorization': settings.NAVITIA_API_TOKEN - } + url = "https://api.navitia.io/v1/{}".format(endpoint) + headers = {"Authorization": settings.NAVITIA_API_TOKEN} try: response = requests.get(url, params=params, headers=headers, timeout=TIMEOUT_SECONDS) diff --git a/labonneboite/common/models/__init__.py b/labonneboite/common/models/__init__.py index 4f506d8d1..62206d83f 100644 --- a/labonneboite/common/models/__init__.py +++ b/labonneboite/common/models/__init__.py @@ -18,11 +18,12 @@ Warning: make sure the order of the imports matches the order of which the dependent tables should be created. """ -# pylint: disable=wildcard-import -from labonneboite.common.models.office_mixin import * -from labonneboite.common.models.office_admin import * -from labonneboite.common.models.office import * from labonneboite.common.models.auth import * -from labonneboite.common.models.user_favorite_offices import * +from labonneboite.common.models.office import * +from labonneboite.common.models.office_admin import * +from labonneboite.common.models.office_mixin import * from labonneboite.common.models.recruiter_message import * +from labonneboite.common.models.user_favorite_offices import * + + # pylint: enable=wildcard-import diff --git a/labonneboite/common/models/auth.py b/labonneboite/common/models/auth.py index 91f406d02..b0eab1e1b 100644 --- a/labonneboite/common/models/auth.py +++ b/labonneboite/common/models/auth.py @@ -1,17 +1,14 @@ - import datetime -from sqlalchemy import Boolean, Column, DateTime, Integer, String, Unicode -from sqlalchemy import desc -from sqlalchemy.orm import relationship -from sqlalchemy_utils import ChoiceType - from flask_login import UserMixin from social_flask_sqlalchemy.models import UserSocialAuth +from sqlalchemy import Boolean, Column, DateTime, Integer, String, Unicode, desc +from sqlalchemy.orm import relationship +from sqlalchemy_utils import ChoiceType +from labonneboite.common import user_util from labonneboite.common.database import Base, db_session from labonneboite.common.models.base import CRUDMixin -from labonneboite.common import user_util class User(CRUDMixin, UserMixin, Base): @@ -22,13 +19,9 @@ class User(CRUDMixin, UserMixin, Base): expects user objects to have. """ - __tablename__ = 'users' + __tablename__ = "users" - GENDERS = [ - (user_util.GENDER_MALE, 'Homme'), - (user_util.GENDER_FEMALE, 'Femme'), - (user_util.GENDER_OTHER, 'Autre'), - ] + GENDERS = [(user_util.GENDER_MALE, "Homme"), (user_util.GENDER_FEMALE, "Femme"), (user_util.GENDER_OTHER, "Autre")] id = Column(Integer, primary_key=True) # E-mail may not be unique or may not be available for some third party auth providers, e.g. `PEAM/PE Connect`. @@ -42,7 +35,7 @@ class User(CRUDMixin, UserMixin, Base): active = Column(Boolean, default=True) # The ID used by third party auth providers (if available). external_id = Column(String(191), nullable=True) - favorite_offices = relationship('UserFavoriteOffice', order_by=desc('date_created')) + favorite_offices = relationship("UserFavoriteOffice", order_by=desc("date_created")) # Designates whether this user can access the admin site. is_admin = Column(Boolean, default=False) @@ -57,12 +50,7 @@ def get_user_social_auth(user_id): """ Return the latest `UserSocialAuth` instance for the given `user_id`. """ - return ( - db_session.query(UserSocialAuth) - .filter_by(user_id=user_id) - .order_by(desc(UserSocialAuth.id)) - .first() - ) + return db_session.query(UserSocialAuth).filter_by(user_id=user_id).order_by(desc(UserSocialAuth.id)).first() def find_user(strategy, details, backend, *args, user=None, **kwargs): @@ -74,7 +62,5 @@ def find_user(strategy, details, backend, *args, user=None, **kwargs): function, each backend will create a new user. Thus, a single user may be subscribed twice, with different favorites. """ - user = user or User.query.filter_by(external_id=details.get('external_id')).first() - return { - 'user': user - } + user = user or User.query.filter_by(external_id=details.get("external_id")).first() + return {"user": user} diff --git a/labonneboite/common/models/base.py b/labonneboite/common/models/base.py index b914a404f..809af9f1b 100644 --- a/labonneboite/common/models/base.py +++ b/labonneboite/common/models/base.py @@ -1,12 +1,11 @@ - -from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.exc import IntegrityError +from sqlalchemy.orm.exc import NoResultFound from labonneboite.common.database import db_session class CRUDMixin(object): - __table_args__ = {'extend_existing': True} + __table_args__ = {"extend_existing": True} @classmethod def create(cls, commit=True, **kwargs): diff --git a/labonneboite/common/models/office.py b/labonneboite/common/models/office.py index cc8317111..096de6751 100644 --- a/labonneboite/common/models/office.py +++ b/labonneboite/common/models/office.py @@ -1,27 +1,29 @@ +import logging from functools import lru_cache from urllib.parse import urlencode -import logging from babel.dates import format_date from flask import url_for from slugify import slugify -from sqlalchemy import PrimaryKeyConstraint, Index +from sqlalchemy import Index, PrimaryKeyConstraint from werkzeug import cached_property -from labonneboite.common import encoding as encoding_util -from labonneboite.common import hiring_type_util -from labonneboite.common import mapping as mapping_util -from labonneboite.common import scoring as scoring_util -from labonneboite.common import util -from labonneboite.common.database import Base, db_session, DATABASE +from labonneboite.common import ( + encoding as encoding_util, + hiring_type_util, + mapping as mapping_util, + scoring as scoring_util, + util, +) +from labonneboite.common.database import DATABASE, Base, db_session from labonneboite.common.load_data import load_city_codes, load_groupements_employeurs +from labonneboite.common.models import FinalOfficeMixin, OfficeAdminUpdate from labonneboite.common.models.base import CRUDMixin from labonneboite.conf import settings from labonneboite.importer import settings as importer_settings -from labonneboite.common.models import FinalOfficeMixin, OfficeAdminUpdate -logger = logging.getLogger('main') +logger = logging.getLogger("main") CITY_NAMES = load_city_codes() @@ -56,15 +58,13 @@ class Office(FinalOfficeMixin, CRUDMixin, Base): __tablename__ = settings.OFFICE_TABLE __table_args__ = ( - PrimaryKeyConstraint('siret'), - + PrimaryKeyConstraint("siret"), # Improve performance of create_index.py parallel jobs # by quickly fetching all offices of any given departement. - Index('_departement', 'departement'), - + Index("_departement", "departement"), # Improve performance of create_index.py remove_scam_emails() # by quickly locating offices having a given scam email. - Index('_email', 'email'), + Index("_email", "email"), ) # You should normally *not* add any column here - see documentation above. @@ -72,13 +72,7 @@ class Office(FinalOfficeMixin, CRUDMixin, Base): def __unicode__(self): return "%s - %s" % (self.siret, self.name) - def as_json(self, - rome_codes=None, - hiring_type=None, - distance=None, - zipcode=None, - extra_query_string=None, - ): + def as_json(self, rome_codes=None, hiring_type=None, distance=None, zipcode=None, extra_query_string=None): """ `rome_codes`: optional parameter, used only in case of being in the context of a search by ROME codes (single rome or multi rome). @@ -97,57 +91,57 @@ def as_json(self, `extra_query_string` (dict): extra query string to be added to the API urls for each office. Typically some Google Analytics trackers. """ - if rome_codes is None: # no rome search context + if rome_codes is None: # no rome search context rome_code = None - elif len(rome_codes) == 1: # single rome search context + elif len(rome_codes) == 1: # single rome search context rome_code = rome_codes[0] - else: # multi rome search context + else: # multi rome search context rome_code = self.matched_rome alternance = hiring_type == hiring_type_util.ALTERNANCE extra_query_string = extra_query_string or {} json = { - 'address': self.address_as_text, - 'city': self.city, - 'headcount_text': self.headcount_text, - 'lat': self.y, - 'lon': self.x, - 'naf': self.naf, - 'naf_text': self.naf_text, - 'name': self.name, - 'siret': self.siret, - 'stars': self.get_stars_for_rome_code(rome_code, hiring_type), - 'url': self.get_url_for_rome_code(rome_code, alternance, **extra_query_string), - 'contact_mode': util.get_contact_mode_for_rome_and_office(rome_code, self), - 'social_network': self.social_network or '', - 'alternance': self.qualifies_for_alternance(), + "address": self.address_as_text, + "city": self.city, + "headcount_text": self.headcount_text, + "lat": self.y, + "lon": self.x, + "naf": self.naf, + "naf_text": self.naf_text, + "name": self.name, + "siret": self.siret, + "stars": self.get_stars_for_rome_code(rome_code, hiring_type), + "url": self.get_url_for_rome_code(rome_code, alternance, **extra_query_string), + "contact_mode": util.get_contact_mode_for_rome_and_office(rome_code, self), + "social_network": self.social_network or "", + "alternance": self.qualifies_for_alternance(), } # Warning: the `distance`, `boost` and `matched_rome` fields are added by `get_offices_from_es_and_db`, # they are NOT model fields or properties! - if hasattr(self, 'distance'): - json['distance'] = self.distance + if hasattr(self, "distance"): + json["distance"] = self.distance - if hasattr(self, 'boost'): - json['boosted'] = self.boost + if hasattr(self, "boost"): + json["boosted"] = self.boost if rome_code: - json['matched_rome_code'] = rome_code - json['matched_rome_label'] = settings.ROME_DESCRIPTIONS[rome_code] - json['matched_rome_slug'] = slugify(settings.ROME_DESCRIPTIONS[rome_code]) + json["matched_rome_code"] = rome_code + json["matched_rome_label"] = settings.ROME_DESCRIPTIONS[rome_code] + json["matched_rome_slug"] = slugify(settings.ROME_DESCRIPTIONS[rome_code]) # offers* fields are added by VisibleMarketFetcher.get_offices, # they are NOT model fields or properties - if hasattr(self, 'offers_count'): - json['offers_count'] = self.offers_count - if hasattr(self, 'offers'): - json['offers'] = self.offers + if hasattr(self, "offers_count"): + json["offers_count"] = self.offers_count + if hasattr(self, "offers"): + json["offers"] = self.offers # This message should concern only a small number of companies who explicitly requested # to appear in extra geolocations. - if any([distance, zipcode]) and json['address'] and self.show_multi_geolocations_msg(distance, zipcode): - json['address'] += ", Cette entreprise recrute aussi dans votre région." + if any([distance, zipcode]) and json["address"] and self.show_multi_geolocations_msg(distance, zipcode): + json["address"] += ", Cette entreprise recrute aussi dans votre région." return json @property @@ -156,8 +150,8 @@ def address_fields(self): if not self.is_small: result.append("Service des ressources humaines") if self.street_name: - result.append('%s %s' % (self.street_number, self.street_name)) - result.append('%s %s' % (self.zipcode, self.city)) + result.append("%s %s" % (self.street_number, self.street_name)) + result.append("%s %s" % (self.zipcode, self.city)) return result @property @@ -171,9 +165,9 @@ def phone(self): has_phone = self.tel and not self.tel.isspace() if has_phone: # not sure why, the import botched the phone number... - if self.tel[-2] == '.': - s = '0%s' % self.tel[:-2] - return " ".join(s[i:i + 2] for i in range(0, len(s), 2)) + if self.tel[-2] == ".": + s = "0%s" % self.tel[:-2] + return " ".join(s[i : i + 2] for i in range(0, len(s), 2)) return self.tel return None @@ -184,12 +178,12 @@ def name(self): elif self.company_name: result = self.company_name.upper() else: - result = 'sans nom' + result = "sans nom" return encoding_util.sanitize_string(result) @property def google_url(self): - google_search = "%s+%s" % (self.name.replace(' ', '+'), self.city.replace(' ', '+')) + google_search = "%s+%s" % (self.name.replace(" ", "+"), self.city.replace(" ", "+")) return "https://www.google.fr/search?q=%s" % google_search @property @@ -217,9 +211,11 @@ def is_removed_from_lba(self): # such a score. # We still have to check that there actually is a SAVE record # specifically asking for this removal. - office_admin_update = OfficeAdminUpdate.query.filter( - OfficeAdminUpdate.sirets.like("%{}%".format(self.siret)) - ).filter_by(score_alternance=0).first() + office_admin_update = ( + OfficeAdminUpdate.query.filter(OfficeAdminUpdate.sirets.like("%{}%".format(self.siret))) + .filter_by(score_alternance=0) + .first() + ) return bool(office_admin_update) @property @@ -227,7 +223,7 @@ def headcount_text(self): try: return settings.HEADCOUNT_INSEE[self.headcount] except KeyError: - return '' + return "" @property def is_small(self): @@ -240,7 +236,7 @@ def has_city(self): try: city = bool(CITY_NAMES[self.city_code]) except KeyError: - if self.city_code.startswith('75'): + if self.city_code.startswith("75"): city = True else: city = None @@ -251,8 +247,8 @@ def city(self): try: return CITY_NAMES[self.city_code] except KeyError: - if self.city_code.startswith('75'): - return 'Paris' + if self.city_code.startswith("75"): + return "Paris" else: raise @@ -284,10 +280,8 @@ def get_score_for_rome_code(self, rome_code, hiring_type=None): raise ValueError("Unknown hiring_type") raw_score = self.score if hiring_type == hiring_type_util.DPAE else self.score_alternance return scoring_util.get_score_adjusted_to_rome_code_and_naf_code( - score=raw_score, - rome_code=rome_code, - naf_code=self.naf - ) + score=raw_score, rome_code=rome_code, naf_code=self.naf + ) def get_stars_for_rome_code(self, rome_code, hiring_type=None): score = self.get_score_for_rome_code(rome_code, hiring_type) @@ -314,16 +308,16 @@ def url_alternance(self): """ Returns the URL of `La Bonne Alternance` page or `None` if we are outside of a Flask's application context. """ - return 'https://labonnealternance.pole-emploi.fr/details-entreprises/{}'.format(self.siret) + return "https://labonnealternance.pole-emploi.fr/details-entreprises/{}".format(self.siret) def get_url_for_rome_code(self, rome_code, alternance=False, **query_string): if alternance: - return '{}?{}'.format(self.url_alternance, urlencode(query_string)) + return "{}?{}".format(self.url_alternance, urlencode(query_string)) try: if rome_code: - return url_for('office.details', siret=self.siret, rome_code=rome_code, _external=True, **query_string) - return url_for('office.details', siret=self.siret, _external=True, **query_string) + return url_for("office.details", siret=self.siret, rome_code=rome_code, _external=True, **query_string) + return url_for("office.details", siret=self.siret, _external=True, **query_string) except RuntimeError: # RuntimeError is raised when we are outside of a Flask's application context. # Here, we cannot properly generate an URL via url_for. @@ -343,6 +337,7 @@ def show_multi_geolocations_msg(self, distance=None, zipcode=None): return False # If the given `distance` is too far: the message is unnecessary. from labonneboite.web.search.forms import CompanySearchForm + if distance and int(distance) > int(CompanySearchForm.DISTANCE_S): return False return True @@ -361,7 +356,10 @@ def get_date_of_last_data_deploy(cls): FROM information_schema.tables WHERE TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s'; - """ % (DATABASE['NAME'], settings.OFFICE_TABLE) + """ % ( + DATABASE["NAME"], + settings.OFFICE_TABLE, + ) last_data_deploy_date = db_session.execute(query).first() @@ -373,5 +371,5 @@ def get_date_of_last_data_deploy(cls): # Formatting date in french format using locale.setlocale is strongly discouraged. # Using babel instead is the recommended way. # See https://stackoverflow.com/questions/985505/locale-date-formatting-in-python - last_data_deploy_date_formated_as_french = format_date(last_data_deploy_date, locale='fr', format='long') + last_data_deploy_date_formated_as_french = format_date(last_data_deploy_date, locale="fr", format="long") return last_data_deploy_date_formated_as_french diff --git a/labonneboite/common/models/office_admin.py b/labonneboite/common/models/office_admin.py index 75b9760ef..721293684 100644 --- a/labonneboite/common/models/office_admin.py +++ b/labonneboite/common/models/office_admin.py @@ -3,10 +3,7 @@ import re from dateutil.relativedelta import relativedelta -from sqlalchemy import Boolean, DateTime, Integer, String, Text -from sqlalchemy.dialects import mysql -from sqlalchemy import Column, ForeignKey -from sqlalchemy import desc +from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, Text, desc from sqlalchemy.dialects import mysql from sqlalchemy.event import listens_for from sqlalchemy.orm import relationship @@ -29,32 +26,31 @@ class OfficeAdminAdd(OfficeMixin, CRUDMixin, Base): are provided by the `OfficeMixin`. """ - __tablename__ = 'etablissements_admin_add' + __tablename__ = "etablissements_admin_add" def __init__(self, *args, **kwargs): # The `headcount` field must be different form the one of `Office` # to be able to provide a clean