From 88d2543a841896731725cb5f2e5cd2b7b34d2a6a Mon Sep 17 00:00:00 2001
From: MarkLark86 <mark.pittaway@gmail.com>
Date: Wed, 31 Jul 2024 09:48:58 +1000
Subject: [PATCH] [SDESK-7325] Move flask imports to superdesk module (#2650)

* Changes to apps

* Changes to content_api

* Changes to prod_api

* Changes to superdesk

* Changes to docs

* Changes to tests

* run black
---
 apps/archive/archive.py                       |  97 +++---
 apps/archive/archive_correction.py            |  21 +-
 apps/archive/archive_link.py                  |   8 +-
 apps/archive/archive_lock.py                  |   2 +-
 apps/archive/archive_media.py                 |  17 +-
 apps/archive/archive_rewrite.py               |  45 +--
 apps/archive/archive_spike.py                 |  39 ++-
 apps/archive/autocomplete.py                  |  18 +-
 apps/archive/commands.py                      |  59 ++--
 apps/archive/common.py                        |  42 +--
 apps/archive/ingest.py                        |  12 +-
 apps/archive/related.py                       |   2 +-
 apps/archive_broadcast/broadcast.py           |  51 ++-
 apps/archive_history/service.py               |  22 +-
 apps/archived/archived.py                     |  57 ++-
 apps/auth/__init__.py                         |  12 +-
 apps/auth/auth.py                             |  31 +-
 apps/auth/db/commands.py                      |   7 +-
 apps/auth/db/db.py                            |   8 +-
 apps/auth/db/reset_password.py                |  10 +-
 apps/auth/errors.py                           |   2 +-
 apps/auth/oidc/auth.py                        |   6 +-
 apps/auth/service.py                          |  21 +-
 apps/auth/session_purge.py                    |   4 +-
 apps/auth/xmpp/auth.py                        |   8 +-
 apps/client_config.py                         |   4 +-
 apps/comments/comments.py                     |   7 +-
 apps/comments/inline_comments.py              |   4 +-
 apps/comments/user_mentions.py                |   5 +-
 apps/common/models/io/base_proxy.py           |   7 +-
 apps/contacts/service.py                      |   8 +-
 apps/content/__init__.py                      |  10 +-
 .../content_filter/content_filter_service.py  |  20 +-
 .../content_filter/content_filter_test.py     |   4 +-
 .../filter_condition_field.py                 |   3 +-
 .../filter_condition_parameters.py            |  12 +-
 .../filter_condition_resource.py              |  10 +-
 apps/content_types/content_types.py           |   9 +-
 apps/desks.py                                 |  50 +--
 apps/dictionaries/service.py                  |  10 +-
 apps/duplication/archive_copy.py              |   9 +-
 apps/duplication/archive_duplication.py       |  10 +-
 apps/duplication/archive_fetch.py             |  12 +-
 apps/duplication/archive_move.py              |  23 +-
 apps/duplication/archive_translate.py         |   4 +-
 apps/export/service.py                        |   4 +-
 apps/highlights/generate.py                   |  30 +-
 apps/highlights/service.py                    |   7 +-
 apps/io/feeding_services/reuters.py           |   9 +-
 apps/io/search_ingest.py                      |  12 +-
 apps/item_lock/components/item_lock.py        |  28 +-
 apps/keywords/alchemy.py                      |  11 +-
 apps/ldap/commands.py                         |  17 +-
 apps/ldap/ldap.py                             |  24 +-
 apps/ldap/users_service.py                    |   5 +-
 apps/legal_archive/commands.py                |  76 ++--
 apps/legal_archive/service.py                 |  29 +-
 apps/links.py                                 |   6 +-
 apps/macros/macro_register.py                 |   6 +-
 apps/marked_desks/service.py                  |  10 +-
 apps/packages/package_service.py              |  26 +-
 apps/picture_crop/__init__.py                 |   6 +-
 apps/picture_renditions.py                    |   3 +-
 apps/preferences.py                           |  10 +-
 apps/prepopulate/app_initialize.py            |  18 +-
 apps/prepopulate/app_prepopulate.py           |  22 +-
 apps/prepopulate/app_scaffold_data.py         |  11 +-
 apps/products/service.py                      |   8 +-
 apps/publish/content/common.py                | 114 +++---
 apps/publish/content/correct.py               |   7 +-
 apps/publish/content/kill.py                  |  17 +-
 apps/publish/content/publish.py               |   3 +-
 .../content/published_package_items.py        |  10 +-
 apps/publish/content/resend.py                |  17 +-
 apps/publish/content/tests.py                 |  63 ++--
 apps/publish/enqueue/__init__.py              |  17 +-
 apps/publish/enqueue/enqueue_corrected.py     |   4 +-
 apps/publish/enqueue/enqueue_killed.py        |  13 +-
 apps/publish/enqueue/enqueue_published.py     |   6 +-
 apps/publish/enqueue/enqueue_service.py       | 324 ++++++++++--------
 apps/publish/publish_content_tests.py         |   6 +-
 apps/publish/published_item.py                |  51 ++-
 apps/rules/routing_rules.py                   |   5 +-
 apps/rules/rule_handlers.py                   |   9 +-
 apps/rundowns/export.py                       |   7 +-
 apps/rundowns/formatters/html.py              |   3 +-
 apps/rundowns/rundowns.py                     |   2 +-
 apps/rundowns/tasks.py                        |   7 +-
 apps/rundowns/utils.py                        |  12 +-
 apps/saved_searches/__init__.py               |  16 +-
 apps/search/__init__.py                       |  24 +-
 apps/search_providers/proxy.py                |   5 +-
 apps/search_providers/service.py              |   6 +-
 apps/stages.py                                |  15 +-
 apps/system_message/service.py                |   9 +-
 apps/tasks.py                                 |   5 +-
 apps/templates/content_templates.py           |  47 +--
 apps/templates/filters.py                     |   9 +-
 apps/validate/validate.py                     |  11 +-
 apps/video_edit/__init__.py                   |   8 +-
 apps/vidible/vidible.py                       |   2 +-
 apps/workspace/workspace.py                   |   2 +-
 content_api/__init__.py                       |   4 +-
 content_api/api_audit/service.py              |   9 +-
 content_api/app/__init__.py                   |   4 +-
 content_api/assets/__init__.py                |   6 +-
 content_api/assets/service.py                 |   3 +-
 content_api/commands/remove_expired_items.py  |   6 +-
 content_api/items/service.py                  |  12 +-
 content_api/items_versions/service.py         |   9 +-
 content_api/publish/service.py                |  12 +-
 content_api/search/service.py                 |   2 +-
 content_api/tests/items_service_test.py       |   4 +-
 content_api/tests/packages_service_test.py    |   2 +-
 content_api/tests/search_service_test.py      |   3 +-
 content_api/tokens/__init__.py                |   5 +-
 content_api/users/service.py                  |   4 +-
 docs/settings.rst                             |   6 +-
 features/steps/highlights_steps.py            |   2 +-
 features/steps/steps_users.py                 |   2 +-
 prod_api/app/__init__.py                      |   5 +-
 prod_api/assets/__init__.py                   |   7 +-
 prod_api/auth.py                              |   8 +-
 prod_api/conftest.py                          |   2 +-
 prod_api/service.py                           |   3 +-
 prod_api/tests/test_auth.py                   |   2 +-
 prod_api/tests/test_contacts.py               |   2 +-
 prod_api/tests/test_desks.py                  |   2 +-
 prod_api/tests/test_items.py                  |   2 +-
 prod_api/tests/test_users.py                  |   2 +-
 superdesk/__init__.py                         |   9 +-
 superdesk/activity.py                         |  10 +-
 superdesk/allowed_values.py                   |   4 +-
 superdesk/attachments.py                      |   8 +-
 superdesk/audit/audit.py                      |   2 +-
 superdesk/audit/commands.py                   |  12 +-
 superdesk/auth/__init__.py                    |   6 +-
 superdesk/auth/decorator.py                   |   7 +-
 superdesk/auth/oauth.py                       |  13 +-
 superdesk/auth/saml.py                        |  12 +-
 superdesk/auth_server/oauth2.py               |   4 +-
 superdesk/backend_meta/backend_meta.py        |  12 +-
 superdesk/cache.py                            |   4 +-
 superdesk/celery_app.py                       |  14 +-
 superdesk/commands/clean_images.py            |   7 +-
 superdesk/commands/data_manipulation.py       |  11 +-
 superdesk/commands/data_updates.py            |   9 +-
 .../commands/delete_archived_document.py      |  12 +-
 superdesk/commands/flush_elastic_index.py     |  10 +-
 superdesk/commands/index_from_mongo.py        |  24 +-
 superdesk/commands/rebuild_elastic_index.py   |  10 +-
 superdesk/commands/remove_exported_files.py   |   6 +-
 superdesk/commands/schema.py                  |   6 +-
 superdesk/core/__init__.py                    |  11 +
 superdesk/core/app.py                         |  30 +-
 superdesk/core/resources/service.py           |   5 +-
 superdesk/core/resources/validators.py        |   6 +-
 superdesk/core/web/types.py                   |  77 +++++
 .../00001_20160722-111630_users.py            |   6 +-
 .../00003_20170814-114652_audit.py            |   7 +-
 .../00006_20171124-195408_content_types.py    |   5 +-
 .../00009_20180425-010702_vocabularies.py     |   4 +-
 .../00012_20180605-151019_vocabularies.py     |   4 +-
 .../00016_20181227-160331_archive.py          |   4 +-
 .../00023_20200513-180314_content_types.py    |   4 +-
 .../00024_20200909-142600_vocabularies.py     |   6 +-
 .../00028_20210211-020113_contacts.py         |   4 +-
 .../00029_20210305-132352_contacts.py         |   4 +-
 .../00030_20231127-142300_content_types.py    |   6 +-
 superdesk/datalayer.py                        |  14 +-
 superdesk/download.py                         |  14 +-
 superdesk/editor_utils.py                     |   4 +-
 superdesk/emails/__init__.py                  |  51 +--
 superdesk/errors.py                           |  15 +-
 superdesk/es_utils.py                         |  11 +-
 superdesk/etree.py                            |   5 +-
 superdesk/eve_backend.py                      |  70 ++--
 superdesk/factory/app.py                      |  29 +-
 superdesk/factory/elastic_apm.py              |   6 +-
 superdesk/factory/manager.py                  |   2 +-
 superdesk/filemeta.py                         |   2 +-
 superdesk/flask.py                            |  52 +++
 superdesk/ftp.py                              |   6 +-
 superdesk/geonames.py                         |  10 +-
 superdesk/http_proxy.py                       |  22 +-
 superdesk/io/commands/add_provider.py         |   9 +-
 .../io/commands/remove_expired_content.py     |   3 +-
 superdesk/io/commands/update_ingest.py        |  37 +-
 superdesk/io/feed_parsers/__init__.py         |   6 +-
 superdesk/io/feed_parsers/ana_mpe_newsml.py   |   4 +-
 superdesk/io/feed_parsers/ap_anpa.py          |   4 +-
 superdesk/io/feed_parsers/ap_media.py         |   6 +-
 superdesk/io/feed_parsers/efe_nitf.py         |   4 +-
 superdesk/io/feed_parsers/image_iptc.py       |  19 +-
 superdesk/io/feed_parsers/newsml_2_0.py       |   5 +-
 superdesk/io/feed_parsers/rfc822.py           |  15 +-
 superdesk/io/feed_parsers/scoop_newsml_2_0.py |   3 +-
 superdesk/io/feeding_services/__init__.py     |  13 +-
 superdesk/io/feeding_services/ap_media.py     |   4 +-
 superdesk/io/feeding_services/email.py        |   5 +-
 superdesk/io/feeding_services/file_service.py |   4 +-
 superdesk/io/feeding_services/ftp.py          |   4 +-
 superdesk/io/feeding_services/http_service.py |   5 +-
 superdesk/io/format_document_for_preview.py   |   7 +-
 superdesk/io/ingest.py                        |  14 +-
 superdesk/io/ingest_provider_model.py         |  20 +-
 superdesk/io/iptc.py                          |   2 +-
 superdesk/io/subjectcodes.py                  |   8 +-
 superdesk/io/webhooks/__init__.py             |   2 +-
 superdesk/json_utils.py                       |   6 +-
 superdesk/locales.py                          |   9 +-
 superdesk/locators/locators.py                |   6 +-
 superdesk/lock.py                             |  16 +-
 superdesk/macros/assign_status.py             |   4 +-
 .../internal_destination_auto_publish.py      |  10 +-
 .../macros/set_default_template_metadata.py   |   7 +-
 superdesk/macros/validate_for_publish.py      |   5 +-
 superdesk/media/crop.py                       |  18 +-
 superdesk/media/image.py                      |   2 +-
 superdesk/media/media_editor.py               |  14 +-
 superdesk/media/media_operations.py           |   9 +-
 superdesk/media/renditions.py                 |  17 +-
 superdesk/media/video.py                      |   2 +-
 superdesk/media/video_editor.py               |   4 +-
 superdesk/metadata/item.py                    |   4 +-
 superdesk/metadata/utils.py                   |  14 +-
 superdesk/notification.py                     |   3 +-
 superdesk/places/places_autocomplete.py       |   8 +-
 superdesk/profiling/__init__.py               |   8 +-
 superdesk/profiling/service.py                |   6 +-
 .../publish/formatters/email_formatter.py     |   2 +-
 .../formatters/newsml_1_2_formatter.py        |  15 +-
 .../publish/formatters/newsml_g2_formatter.py |  11 +-
 .../publish/formatters/ninjs_formatter.py     |  15 +-
 .../publish/formatters/ninjs_ftp_formatter.py |   4 +-
 .../formatters/ninjs_newsroom_formatter.py    |   8 +-
 .../publish/formatters/nitf_formatter.py      |   4 +-
 superdesk/publish/publish_content.py          |  27 +-
 superdesk/publish/publish_queue.py            |   4 +-
 superdesk/publish/publish_service.py          |  10 +-
 superdesk/publish/subscribers.py              |  23 +-
 superdesk/publish/transmitters/email.py       |   6 +-
 superdesk/publish/transmitters/ftp.py         |   3 +-
 superdesk/publish/transmitters/http_push.py   |   5 +-
 superdesk/publish/transmitters/imatrics.py    |   5 +-
 superdesk/publish/transmitters/odbc.py        |   4 +-
 superdesk/resource.py                         |   6 +-
 superdesk/resource_fields.py                  |  26 ++
 superdesk/resource_locking.py                 |   4 +-
 superdesk/roles/roles.py                      |   3 +-
 superdesk/sams/__init__.py                    |   2 +-
 superdesk/sams/assets.py                      |  10 +-
 superdesk/sams/client.py                      |  13 +-
 superdesk/sams/media_storage.py               |  11 +-
 superdesk/sams/sets.py                        |   8 +-
 superdesk/sams/storage_destinations.py        |   4 +-
 superdesk/sams/utils.py                       |   4 +-
 superdesk/server_config.py                    |   4 +-
 superdesk/services.py                         |  23 +-
 superdesk/storage/__init__.py                 |   4 +-
 superdesk/storage/desk_media_storage.py       |   8 +-
 superdesk/storage/fix_links.py                |   3 +-
 superdesk/storage/migrate.py                  |   4 +-
 superdesk/storage/superdesk_file.py           |   5 +-
 superdesk/system/health.py                    |  14 +-
 superdesk/tests/__init__.py                   |   5 +-
 superdesk/tests/async_test_client.py          |   3 +-
 superdesk/tests/asyncio.py                    |   3 +
 superdesk/tests/environment.py                |   2 +-
 superdesk/tests/mocks/search_provider.py      |   2 +-
 superdesk/tests/publish_steps.py              |   2 +-
 superdesk/tests/steps.py                      |   8 +-
 superdesk/text_checkers/ai/imatrics.py        |  15 +-
 superdesk/text_utils.py                       |   5 +-
 superdesk/upload.py                           |  17 +-
 superdesk/users/services.py                   |  53 +--
 superdesk/utils.py                            |  35 +-
 superdesk/validator.py                        |  19 +-
 superdesk/vocabularies/keywords.py            |   4 +-
 superdesk/vocabularies/vocabularies.py        |  16 +-
 superdesk/websockets_comms.py                 |   2 +-
 tests/archive/archive_test.py                 |   2 +-
 tests/auth/auth_test.py                       |   2 +-
 tests/auth/saml_test.py                       |  23 +-
 tests/backend_meta_test.py                    |  25 +-
 tests/backend_test.py                         |  18 +-
 tests/config_test.py                          |   5 +-
 tests/content_api_test.py                     |   4 +-
 tests/editor_utils_test.py                    |   5 +-
 tests/emails/superdesk_message_test.py        |   5 +-
 tests/http_proxy_tests.py                     |   7 +-
 tests/io/feed_parsers/newsml2_parser_test.py  |   5 +-
 tests/io/feed_parsers/nitf_tests.py           |   8 +-
 tests/io/update_ingest_tests.py               |   6 +-
 tests/media/media_editor_test.py              |   7 +-
 tests/media/media_operations_test.py          |   4 +-
 tests/pagination_test.py                      |   2 +-
 tests/prepopulate/app_initialization_test.py  |   2 +-
 tests/publish/get_queue_items_tests.py        |   5 +-
 .../http_push_transmitter_tests.py            |  24 +-
 tests/publish/transmitters/imatrics_test.py   |   8 +-
 tests/push_notification/push_content_test.py  |   4 +-
 tests/sentry_tests.py                         |   5 +-
 tests/storage/proxy_test.py                   |   6 +-
 tests/subjectcodes_test.py                    |   5 +-
 tests/templates/filters_test.py               |   2 +-
 tests/templates/render_templates_test.py      |   5 +-
 .../spellcheckers/default_test.py             |   3 +-
 .../spellcheckers/grammalecte_test.py         |   3 +-
 .../spellcheckers/leuven_dutch_test.py        |   3 +-
 tests/users/privileges_test.py                |   4 +-
 tests/video_edit/video_edit_test.py           |  12 +-
 312 files changed, 2202 insertions(+), 1779 deletions(-)
 create mode 100644 superdesk/flask.py
 create mode 100644 superdesk/resource_fields.py

diff --git a/apps/archive/archive.py b/apps/archive/archive.py
index 16a2298570..a15f6fa324 100644
--- a/apps/archive/archive.py
+++ b/apps/archive/archive.py
@@ -8,14 +8,16 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
 import datetime
+from copy import copy, deepcopy
+
 import superdesk
 import superdesk.signals as signals
 from superdesk import editor_utils
-
-from copy import copy, deepcopy
+from superdesk.core import json, get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, ITEMS, VERSION, LAST_UPDATED, DATE_CREATED, ETAG
+from superdesk.flask import request, abort
 from superdesk.resource import Resource
 from superdesk.metadata.utils import (
     extra_response_fields,
@@ -57,12 +59,11 @@
     transtype_metadata,
 )
 from superdesk.media.crop import CropService
-from flask import current_app as app, json, request
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from eve.versioning import resolve_document_version, versioned_id_field
 from superdesk.activity import add_activity, notify_and_add_activity, ACTIVITY_CREATE, ACTIVITY_UPDATE, ACTIVITY_DELETE
-from eve.utils import parse_request, config, date_to_str, ParsedRequest
+from eve.utils import parse_request, ParsedRequest
 from superdesk.services import BaseService
 from superdesk.users.services import current_user_has_privilege, is_admin
 from superdesk.metadata.item import (
@@ -114,7 +115,7 @@ def private_content_filter(req=None):
 
     Also filter out content of stages not visible to current user (if any).
     """
-    user = getattr(flask.g, "user", None)
+    user = get_current_app().get_current_user_dict()
     query = {
         "bool": {
             "must": [
@@ -250,7 +251,7 @@ def flush_renditions(updates, original):
 
 
 def remove_is_queued(item):
-    if config.PUBLISH_ASSOCIATED_ITEMS:
+    if get_app_config("PUBLISH_ASSOCIATED_ITEMS"):
         associations = item.get("associations") or {}
         for associations_key, associated_item in associations.items():
             if not associated_item:
@@ -340,7 +341,7 @@ def on_fetched(self, docs):
         """
         Overriding this to handle existing data in Mongo & Elastic
         """
-        self.enhance_items(docs[config.ITEMS])
+        self.enhance_items(docs[ITEMS])
 
     def on_fetched_item(self, doc):
         self.enhance_items([doc])
@@ -383,7 +384,7 @@ def on_create(self, docs):
 
             # let client create version 0 docs
             if doc.get("version") == 0:
-                doc[config.VERSION] = doc["version"]
+                doc[VERSION] = doc["version"]
 
             convert_task_attributes_to_objectId(doc)
             transtype_metadata(doc)
@@ -399,6 +400,7 @@ def on_created(self, docs):
         if packages:
             self.packageService.on_created(packages)
 
+        app = get_current_app().as_any()
         profiles = set()
         for doc in docs:
             subject = get_subject(doc)
@@ -453,7 +455,7 @@ def on_update(self, updates, original):
         self._validate_updates(original, updates, user)
 
         if self.__is_req_for_save(updates):
-            publish_from_personal = flask.request.args.get("publish_from_personal") if flask.request else False
+            publish_from_personal = request.args.get("publish_from_personal") if request else False
             update_state(original, updates, publish_from_personal)
 
         remove_unwanted(updates)
@@ -476,16 +478,16 @@ def _handle_media_updates(self, updates, original, user):
 
         # iterate over associations. Validate and process them if they are stored in database
         for item_name, item_obj in updates.get(ASSOCIATIONS).items():
-            if not (item_obj and config.ID_FIELD in item_obj):
+            if not (item_obj and ID_FIELD in item_obj):
                 continue
 
-            item_id = item_obj[config.ID_FIELD]
+            item_id = item_obj[ID_FIELD]
             media_item = self.find_one(req=None, _id=item_id)
             parent = (original.get(ASSOCIATIONS) or {}).get(item_name) or item_obj
             if (
-                app.settings.get("COPY_METADATA_FROM_PARENT")
+                get_app_config("COPY_METADATA_FROM_PARENT")
                 and item_obj.get(ITEM_TYPE) in MEDIA_TYPES
-                and item_id == parent.get(config.ID_FIELD)
+                and item_id == parent.get(ID_FIELD)
             ):
                 stored_item = parent
             else:
@@ -545,13 +547,13 @@ def on_updated(self, updates, original):
         updated = copy(original)
         updated.update(updates)
 
-        if config.VERSION in updates:
+        if VERSION in updates:
             add_activity(
                 ACTIVITY_UPDATE,
                 'created new version {{ version }} for item {{ type }} about "{{ subject }}"',
                 self.datasource,
                 item=updated,
-                version=updates[config.VERSION],
+                version=updates[VERSION],
                 subject=get_subject(updates, original),
                 type=updated[ITEM_TYPE],
             )
@@ -608,8 +610,9 @@ def on_deleted(self, doc):
             type=doc[ITEM_TYPE],
             subject=get_subject(doc),
         )
-        push_expired_notification([doc.get(config.ID_FIELD)])
+        push_expired_notification([doc.get(ID_FIELD)])
 
+        app = get_current_app().as_any()
         app.on_archive_item_deleted(doc)
 
     def replace(self, id, document, original):
@@ -647,7 +650,7 @@ def restore_version(self, id, doc, original):
         if curr is None:
             raise SuperdeskApiError.notFoundError(_("Invalid item id {item_id}").format(item_id=item_id))
 
-        if curr[config.VERSION] != last_version:
+        if curr[VERSION] != last_version:
             raise SuperdeskApiError.preconditionFailedError(
                 _("Invalid last version {last_version}").format(last_version=last_version)
             )
@@ -715,6 +718,8 @@ def duplicate_item(self, original_doc, state=None, extra_fields=None, operation=
         get_model(ItemModel).create([new_doc])
         self._duplicate_versions(original_doc["_id"], new_doc)
         self._duplicate_history(original_doc["_id"], new_doc)
+
+        app = get_current_app().as_any()
         app.on_archive_item_updated({"duplicate_id": new_doc["guid"]}, original_doc, operation or ITEM_DUPLICATE)
 
         if original_doc.get("task"):
@@ -740,16 +745,14 @@ def remove_after_copy(self, copied_item, extra_fields=None, delete_keys=None):
         :param extra_fields: extra fields to copy besides content fields
         """
         # get the archive schema keys
-        archive_schema_keys = list(app.config["DOMAIN"][SOURCE]["schema"].keys())
-        archive_schema_keys.extend(
-            [config.ID_FIELD, config.LAST_UPDATED, config.DATE_CREATED, config.VERSION, config.ETAG]
-        )
+        archive_schema_keys = list(get_app_config("DOMAIN")[SOURCE]["schema"].keys())
+        archive_schema_keys.extend([ID_FIELD, LAST_UPDATED, DATE_CREATED, VERSION, ETAG])
 
         # Delete the keys that are not part of archive schema.
         keys_to_delete = [key for key in copied_item.keys() if key not in archive_schema_keys]
         keys_to_delete.extend(
             [
-                config.ID_FIELD,
+                ID_FIELD,
                 "guid",
                 LINKED_IN_PACKAGES,
                 EMBARGO,
@@ -802,20 +805,20 @@ def _duplicate_versions(self, old_id, new_doc):
         :param old_id: identifier to fetch versions
         :param new_doc: identifiers from this doc will be used to create versions for the duplicated item.
         """
-        resource_def = app.config["DOMAIN"]["archive"]
+        resource_def = get_app_config("DOMAIN")["archive"]
         version_id = versioned_id_field(resource_def)
         old_versions = get_resource_service("archive_versions").get_from_mongo(req=None, lookup={version_id: old_id})
 
         new_versions = []
         for old_version in old_versions:
-            old_version[version_id] = new_doc[config.ID_FIELD]
-            del old_version[config.ID_FIELD]
+            old_version[version_id] = new_doc[ID_FIELD]
+            del old_version[ID_FIELD]
 
             old_version["guid"] = new_doc["guid"]
             old_version["unique_name"] = new_doc["unique_name"]
             old_version["unique_id"] = new_doc["unique_id"]
             old_version["versioncreated"] = utcnow()
-            if old_version[config.VERSION] == new_doc[config.VERSION]:
+            if old_version[VERSION] == new_doc[VERSION]:
                 old_version[ITEM_OPERATION] = new_doc[ITEM_OPERATION]
             new_versions.append(old_version)
 
@@ -839,7 +842,7 @@ def _duplicate_history(self, old_id, new_doc):
 
         new_history_items = []
         for old_history_item in old_history_items:
-            del old_history_item[config.ID_FIELD]
+            del old_history_item[ID_FIELD]
             old_history_item["item_id"] = new_doc["guid"]
             if not old_history_item.get("original_item_id"):
                 old_history_item["original_item_id"] = old_id
@@ -896,18 +899,18 @@ def deschedule_item(self, updates, original):
         get_resource_service("published").delete_by_article_id(original["_id"])
 
         # deschedule scheduled associations
-        if config.PUBLISH_ASSOCIATED_ITEMS:
+        if get_app_config("PUBLISH_ASSOCIATED_ITEMS"):
             associations = original.get(ASSOCIATIONS) or {}
             archive_service = get_resource_service("archive")
             for associations_key, associated_item in associations.items():
                 if not associated_item:
                     continue
-                orig_associated_item = archive_service.find_one(req=None, _id=associated_item[config.ID_FIELD])
+                orig_associated_item = archive_service.find_one(req=None, _id=associated_item[ID_FIELD])
                 if orig_associated_item and orig_associated_item.get("state") == CONTENT_STATE.SCHEDULED:
                     # deschedule associated item itself
-                    archive_service.patch(id=associated_item[config.ID_FIELD], updates={PUBLISH_SCHEDULE: None})
+                    archive_service.patch(id=associated_item[ID_FIELD], updates={PUBLISH_SCHEDULE: None})
                     # update associated item info in the original
-                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[config.ID_FIELD])
+                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[ID_FIELD])
                     orig_associated_item[PUBLISH_SCHEDULE] = None
                     orig_associated_item[SCHEDULE_SETTINGS] = {}
                     updates.setdefault(ASSOCIATIONS, {})[associations_key] = orig_associated_item
@@ -939,17 +942,17 @@ def delete_by_article_ids(self, ids):
 
         :param list ids: list of ids to be removed
         """
-        version_field = versioned_id_field(app.config["DOMAIN"]["archive_versions"])
+        version_field = versioned_id_field(get_app_config("DOMAIN")["archive_versions"])
         get_resource_service("archive_versions").delete_action(lookup={version_field: {"$in": ids}})
-        super().delete_action({config.ID_FIELD: {"$in": ids}})
+        super().delete_action({ID_FIELD: {"$in": ids}})
 
     def _set_association_timestamps(self, assoc_item, updates, new=True):
         if isinstance(assoc_item, dict):
-            assoc_item[config.LAST_UPDATED] = updates.get(config.LAST_UPDATED, datetime.datetime.now())
+            assoc_item[LAST_UPDATED] = updates.get(LAST_UPDATED, datetime.datetime.now())
             if new:
-                assoc_item[config.DATE_CREATED] = datetime.datetime.now()
-            elif config.DATE_CREATED in assoc_item:
-                del assoc_item[config.DATE_CREATED]
+                assoc_item[DATE_CREATED] = datetime.datetime.now()
+            elif DATE_CREATED in assoc_item:
+                del assoc_item[DATE_CREATED]
 
     def __is_req_for_save(self, doc):
         """Checks if doc contains req_for_save key.
@@ -1014,7 +1017,7 @@ def _test_readonly_stage(self, item, updates=None):
         def abort_if_readonly_stage(stage_id):
             stage = superdesk.get_resource_service("stages").find_one(req=None, _id=stage_id)
             if stage.get("local_readonly"):
-                flask.abort(403, response={"readonly": True})
+                abort(403, response={"readonly": True})
 
         orig_stage_id = item.get("task", {}).get("stage")
         if orig_stage_id and get_user() and not item.get(INGEST_ID):
@@ -1058,7 +1061,7 @@ def _validate_updates(self, original, updates, user):
 
         lock_user = original.get("lock_user", None)
         force_unlock = updates.get("force_unlock", False)
-        str_user_id = str(user.get(config.ID_FIELD)) if user else None
+        str_user_id = str(user.get(ID_FIELD)) if user else None
 
         if lock_user and str(lock_user) != str_user_id and not force_unlock:
             raise SuperdeskApiError.forbiddenError(_("The item was locked by another user"))
@@ -1145,7 +1148,7 @@ def _add_system_updates(self, original, updates, user):
         updates[ITEM_OPERATION] = ITEM_UPDATE
         updates.setdefault("original_creator", original.get("original_creator"))
         updates["versioncreated"] = utcnow()
-        updates["version_creator"] = str(user.get(config.ID_FIELD)) if user else None
+        updates["version_creator"] = str(user.get(ID_FIELD)) if user else None
 
         update_word_count(updates, original)
         update_version(updates, original)
@@ -1174,7 +1177,7 @@ def get_expired_items(self, expiry_datetime, last_id=None, invalid_only=False):
         :param bool invalid_only: True only invalid items
         :return pymongo.cursor: expired non published items.
         """
-        for i in range(app.config["MAX_EXPIRY_LOOPS"]):  # avoid blocking forever just in case
+        for i in range(get_app_config("MAX_EXPIRY_LOOPS")):  # avoid blocking forever just in case
             query = {
                 "$and": [
                     {"expiry": {"$lte": expiry_datetime}},
@@ -1192,7 +1195,7 @@ def get_expired_items(self, expiry_datetime, last_id=None, invalid_only=False):
 
             req = ParsedRequest()
             req.sort = "_id"
-            req.max_results = app.config["MAX_EXPIRY_QUERY_LIMIT"]
+            req.max_results = get_app_config("MAX_EXPIRY_QUERY_LIMIT")
             req.where = json.dumps(query)
 
             items = list(self.get_from_mongo(req=req, lookup={}))
@@ -1205,7 +1208,7 @@ def get_expired_items(self, expiry_datetime, last_id=None, invalid_only=False):
                 last_id = items[-1]["_id"]
 
         else:
-            logger.warning("get_expired_items did not finish in %d loops", app.config["MAX_EXPIRY_LOOPS"])
+            logger.warning("get_expired_items did not finish in %d loops", get_app_config("MAX_EXPIRY_LOOPS"))
 
     def handle_mark_user_notifications(self, updates, original, add_activity=True):
         """Notify user when item is marked or unmarked
@@ -1284,7 +1287,7 @@ def _send_mark_user_notifications(
             # and item_id for published media items as _id or guid does not match _id in archive for media items
             link_id = item.get("item_id") if item.get("state") in PUBLISH_STATES else item.get("_id")
 
-        client_url = app.config.get("CLIENT_URL", "").rstrip("/")
+        client_url = get_app_config("CLIENT_URL", "").rstrip("/")
         link = "{}/#/workspace?item={}&action=view".format(client_url, link_id)
 
         if add_activity:
@@ -1299,9 +1302,7 @@ def _send_mark_user_notifications(
                 **data,
             )
         # send separate notification for markForUser extension
-        push_notification(
-            activity_name, item_id=item.get(config.ID_FIELD), user_list=user_list, extension="markForUser"
-        )
+        push_notification(activity_name, item_id=item.get(ID_FIELD), user_list=user_list, extension="markForUser")
 
     def get_items_chain(self, item):
         """
diff --git a/apps/archive/archive_correction.py b/apps/archive/archive_correction.py
index 7fc807416c..65484bdc92 100644
--- a/apps/archive/archive_correction.py
+++ b/apps/archive/archive_correction.py
@@ -10,8 +10,11 @@
 
 import logging
 from apps.auth import get_user
-from flask import request, current_app as app
-from superdesk import get_resource_service, Service, config
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
+from superdesk import get_resource_service, Service
 from superdesk.metadata.item import ITEM_STATE, CONTENT_STATE, metadata_schema
 from superdesk.resource import Resource
 from apps.archive.common import ARCHIVE, ITEM_CANCEL_CORRECTION, ITEM_CORRECTION
@@ -46,7 +49,7 @@ def on_update(self, updates, original):
         self._validate_correction(original)
         archive_service = get_resource_service(ARCHIVE)
         published_service = get_resource_service("published")
-        archive_item = archive_service.find_one(req=None, _id=original.get(config.ID_FIELD))
+        archive_item = archive_service.find_one(req=None, _id=original.get(ID_FIELD))
 
         if remove_correction:
             published_article = published_service.find_one(
@@ -96,7 +99,7 @@ def on_update(self, updates, original):
         try:
             # modify item in published.
             _published_item = published_service.system_update(
-                published_article.get(config.ID_FIELD), published_item_updates, published_article
+                published_article.get(ID_FIELD), published_item_updates, published_article
             )
             assert (
                 remove_correction
@@ -108,7 +111,8 @@ def on_update(self, updates, original):
             ), "Being corrected is not generated"
 
             # modify item in archive.
-            archive_service.system_update(archive_item.get(config.ID_FIELD), archive_item_updates, archive_item)
+            archive_service.system_update(archive_item.get(ID_FIELD), archive_item_updates, archive_item)
+            app = get_current_app().as_any()
             app.on_archive_item_updated(archive_item_updates, archive_item, ITEM_CORRECTION)
 
         except Exception as e:
@@ -118,7 +122,7 @@ def on_update(self, updates, original):
             )
 
         user = get_user(required=True)
-        push_notification("item:correction", item=original.get(config.ID_FIELD), user=str(user.get(config.ID_FIELD)))
+        push_notification("item:correction", item=original.get(ID_FIELD), user=str(user.get(ID_FIELD)))
 
     def _validate_correction(self, original):
         """Validates the article to be corrected.
@@ -129,8 +133,7 @@ def _validate_correction(self, original):
         if not original:
             raise SuperdeskApiError.notFoundError(message=_("Cannot find the article"))
 
-        if (
-            not is_workflow_state_transition_valid("correction", original[ITEM_STATE])
-            and not config.ALLOW_UPDATING_SCHEDULED_ITEMS
+        if not is_workflow_state_transition_valid("correction", original[ITEM_STATE]) and not get_app_config(
+            "ALLOW_UPDATING_SCHEDULED_ITEMS"
         ):
             raise InvalidStateTransitionError()
diff --git a/apps/archive/archive_link.py b/apps/archive/archive_link.py
index ff632d7c5e..e9ba1de0bb 100644
--- a/apps/archive/archive_link.py
+++ b/apps/archive/archive_link.py
@@ -7,9 +7,10 @@
 # For the full copyright and license information, please see the
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
-from eve.utils import config
-from flask import request, current_app as app
 
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from superdesk import get_resource_service, Service
 from superdesk.metadata.item import GUID_TAG
 from superdesk.resource import Resource
@@ -72,5 +73,6 @@ def delete(self, lookup):
 
         archive_service.system_update(target_id, updates, target)
         user = get_user(required=True)
-        push_notification("item:unlink", item=target_id, user=str(user.get(config.ID_FIELD)))
+        push_notification("item:unlink", item=target_id, user=str(user.get(ID_FIELD)))
+        app = get_current_app().as_any()
         app.on_archive_item_updated(updates, target, ITEM_UNLINK)
diff --git a/apps/archive/archive_lock.py b/apps/archive/archive_lock.py
index ee414535f0..13a90c53d4 100644
--- a/apps/archive/archive_lock.py
+++ b/apps/archive/archive_lock.py
@@ -8,7 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import request
+from superdesk.flask import request
 from superdesk.resource import Resource, build_custom_hateoas
 from superdesk.metadata.item import get_schema
 from superdesk.metadata.utils import item_url
diff --git a/apps/archive/archive_media.py b/apps/archive/archive_media.py
index 9b887bbb8d..75517c72f3 100644
--- a/apps/archive/archive_media.py
+++ b/apps/archive/archive_media.py
@@ -10,8 +10,9 @@
 
 import logging
 
-from flask import abort, current_app as app
-from eve.utils import config
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, VERSION
+from superdesk.flask import abort
 from apps.archive.common import copy_metadata_from_user_preferences
 from superdesk.media.media_operations import process_file_from_stream, decode_metadata
 from superdesk.media.renditions import generate_renditions, delete_file_on_error, get_renditions_spec
@@ -41,6 +42,7 @@ class ArchiveMediaService:
     def on_create(self, docs):
         """Create corresponding item on file upload."""
 
+        app = get_current_app()
         for doc in docs:
             if "media" not in doc or doc["media"] is None:
                 abort(400, description="No media found")
@@ -48,7 +50,7 @@ def on_create(self, docs):
             content_type = app.media._get_mimetype(doc["media"])
             doc["media"].seek(0)
             file_type = content_type.split("/")[0]
-            if file_type == "video" and app.config.get("VIDEO_SERVER_ENABLED"):
+            if file_type == "video" and get_app_config("VIDEO_SERVER_ENABLED"):
                 # upload media to video server
                 res, renditions, metadata = self.upload_file_to_video_server(doc)
                 # get thumbnails for timeline bar
@@ -58,7 +60,7 @@ def on_create(self, docs):
                 inserted = [doc["media"]]
                 # if no_custom_crops is set to False the custom crops are generated automatically on media upload
                 # see (SDESK-4742)
-                rendition_spec = get_renditions_spec(no_custom_crops=app.config.get("NO_CUSTOM_CROPS"))
+                rendition_spec = get_renditions_spec(no_custom_crops=get_app_config("NO_CUSTOM_CROPS"))
                 with timer("archive:renditions"):
                     renditions = generate_renditions(
                         file, doc["media"], inserted, file_type, content_type, rendition_spec, url_for_media
@@ -94,8 +96,8 @@ def _set_metadata(self, doc):
         update_dates_for(doc)
         generate_unique_id_and_name(doc)
         doc.setdefault("guid", generate_guid(type=GUID_TAG))
-        doc.setdefault(config.ID_FIELD, doc["guid"])
-        doc[config.VERSION] = 1
+        doc.setdefault(ID_FIELD, doc["guid"])
+        doc[VERSION] = 1
         set_item_expiry({}, doc)
 
         if not doc.get("_import", None):
@@ -104,7 +106,7 @@ def _set_metadata(self, doc):
         doc.setdefault(ITEM_STATE, CONTENT_STATE.DRAFT)
 
         if not doc.get("ingest_provider"):
-            doc["source"] = app.config.get("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
+            doc["source"] = get_app_config("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
 
         copy_metadata_from_user_preferences(doc)
 
@@ -118,6 +120,7 @@ def get_file_from_document(self, doc):
             file_name, content_type, metadata = res
             logger.debug("Going to save media file with %s " % file_name)
             content.seek(0)
+            app = get_current_app()
             with timer("media:put.original"):
                 doc["media"] = app.media.put(content, filename=file_name, content_type=content_type, metadata=metadata)
             return content, content_type, decode_metadata(metadata)
diff --git a/apps/archive/archive_rewrite.py b/apps/archive/archive_rewrite.py
index 6ab9293f3b..c9594e75c0 100644
--- a/apps/archive/archive_rewrite.py
+++ b/apps/archive/archive_rewrite.py
@@ -11,9 +11,12 @@
 import logging
 from apps.auth import get_user, get_user_id
 from eve.versioning import resolve_document_version
-from flask import request, current_app as app
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from apps.archive import ArchiveSpikeService
-from superdesk import get_resource_service, Service, config
+from superdesk import get_resource_service, Service
 from superdesk.metadata.item import (
     ITEM_STATE,
     EMBARGO,
@@ -105,13 +108,14 @@ def create(self, docs, **kwargs):
 
         # signal
         item_rewrite.send(self, item=rewrite, original=original)
+        app = get_current_app().as_any()
 
         if update_document:
             # process the existing story
-            archive_service.patch(update_document[config.ID_FIELD], rewrite)
+            archive_service.patch(update_document[ID_FIELD], rewrite)
             app.on_archive_item_updated(rewrite, update_document, ITEM_LINK)
-            rewrite[config.ID_FIELD] = update_document[config.ID_FIELD]
-            ids = [update_document[config.ID_FIELD]]
+            rewrite[ID_FIELD] = update_document[ID_FIELD]
+            ids = [update_document[ID_FIELD]]
         else:
             # Set the version.
             resolve_document_version(rewrite, ARCHIVE, "POST")
@@ -150,16 +154,15 @@ def _validate_rewrite(self, original, update):
         if original.get("rewritten_by"):
             raise SuperdeskApiError.badRequestError(message=_("Article has been rewritten before !"))
 
-        if (
-            not is_workflow_state_transition_valid("rewrite", original[ITEM_STATE])
-            and not config.ALLOW_UPDATING_SCHEDULED_ITEMS
+        if not is_workflow_state_transition_valid("rewrite", original[ITEM_STATE]) and not get_app_config(
+            "ALLOW_UPDATING_SCHEDULED_ITEMS"
         ):
             raise InvalidStateTransitionError()
 
         if (
             original.get("rewrite_of")
             and not (original.get(ITEM_STATE) in PUBLISH_STATES)
-            and not app.config["WORKFLOW_ALLOW_MULTIPLE_UPDATES"]
+            and not get_app_config("WORKFLOW_ALLOW_MULTIPLE_UPDATES")
         ):
             raise SuperdeskApiError.badRequestError(
                 message=_("Rewrite is not published. Cannot rewrite the story again.")
@@ -216,8 +219,8 @@ def _create_rewrite_article(self, original, existing_item=None, desk_id=None):
         ]
         existing_item_preserve_fields = (ASSOCIATIONS, "flags", "extra")
 
-        if app.config.get("COPY_ON_REWRITE_FIELDS"):
-            fields.extend(app.config["COPY_ON_REWRITE_FIELDS"])
+        if get_app_config("COPY_ON_REWRITE_FIELDS"):
+            fields.extend(get_app_config("COPY_ON_REWRITE_FIELDS"))
 
         if existing_item:
             # for associate an existing file as update merge subjects
@@ -276,11 +279,11 @@ def _create_rewrite_article(self, original, existing_item=None, desk_id=None):
             rewrite["flags"]["marked_for_sms"] = False
 
         # SD-4595 - Default value for the update article to be set based on the system config.
-        if config.RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES:
+        if get_app_config("RESET_PRIORITY_VALUE_FOR_UPDATE_ARTICLES"):
             # if True then reset to the default priority value.
-            rewrite["priority"] = int(config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)
+            rewrite["priority"] = int(get_app_config("DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES"))
 
-        rewrite["rewrite_of"] = original[config.ID_FIELD]
+        rewrite["rewrite_of"] = original[ID_FIELD]
         rewrite["rewrite_sequence"] = (original.get("rewrite_sequence") or 0) + 1
         rewrite.pop(PROCESSED_FROM, None)
 
@@ -309,15 +312,12 @@ def _add_rewritten_flag(self, original, rewrite):
         :param dict original: item on which rewrite is triggered
         :param dict rewrite: rewritten document
         """
-        get_resource_service("published").update_published_items(
-            original[config.ID_FIELD], "rewritten_by", rewrite[config.ID_FIELD]
-        )
+        get_resource_service("published").update_published_items(original[ID_FIELD], "rewritten_by", rewrite[ID_FIELD])
 
         # modify the original item as well.
-        get_resource_service(ARCHIVE).system_update(
-            original[config.ID_FIELD], {"rewritten_by": rewrite[config.ID_FIELD]}, original
-        )
-        app.on_archive_item_updated({"rewritten_by": rewrite[config.ID_FIELD]}, original, ITEM_REWRITE)
+        get_resource_service(ARCHIVE).system_update(original[ID_FIELD], {"rewritten_by": rewrite[ID_FIELD]}, original)
+        app = get_current_app().as_any()
+        app.on_archive_item_updated({"rewritten_by": rewrite[ID_FIELD]}, original, ITEM_REWRITE)
 
     def _set_take_key(self, rewrite):
         """Sets the anpa take key of the rewrite with ordinal.
@@ -368,5 +368,6 @@ def delete(self, lookup):
 
         archive_service.system_update(target_id, updates, target)
         user = get_user(required=True)
-        push_notification("item:unlink", item=target_id, user=str(user.get(config.ID_FIELD)))
+        push_notification("item:unlink", item=target_id, user=str(user.get(ID_FIELD)))
+        app = get_current_app().as_any()
         app.on_archive_item_updated(updates, target, ITEM_UNLINK)
diff --git a/apps/archive/archive_spike.py b/apps/archive/archive_spike.py
index 3a8fbcaee9..b0000c190b 100644
--- a/apps/archive/archive_spike.py
+++ b/apps/archive/archive_spike.py
@@ -11,10 +11,10 @@
 
 import logging
 
-from flask import current_app as app
-
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD
 import superdesk
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError, InvalidStateTransitionError
 from superdesk.metadata.item import (
     ITEM_STATE,
@@ -97,7 +97,7 @@ def _validate_item(self, original):
         """
         packages = [x.get(PACKAGE) for x in original.get(LINKED_IN_PACKAGES, [])]
         if packages:
-            query = {"$and": [{config.ID_FIELD: {"$in": packages}}]}
+            query = {"$and": [{ID_FIELD: {"$in": packages}}]}
             cursor = get_resource_service(ARCHIVE).get_from_mongo(req=None, lookup=query)
             if cursor.count() > 0:
                 raise SuperdeskApiError.badRequestError(
@@ -108,7 +108,7 @@ def _validate_item(self, original):
     def update_rewrite(self, original):
         """Removes the reference from the rewritten story in published collection."""
         if original.get("rewrite_of") and original.get(ITEM_EVENT_ID):
-            clear_rewritten_flag(original.get(ITEM_EVENT_ID), original[config.ID_FIELD], "rewritten_by")
+            clear_rewritten_flag(original.get(ITEM_EVENT_ID), original[ID_FIELD], "rewritten_by")
 
         # write the rewritten_by to the story before spiked
         archive_service = get_resource_service(ARCHIVE)
@@ -119,6 +119,7 @@ def update_rewrite(self, original):
             rewrite_id = original.get("rewritten_by")
             rewritten_by = archive_service.find_one(req=None, _id=rewrite_id)
             archive_service.system_update(rewrite_id, {"rewrite_of": None, "rewrite_sequence": 0}, rewritten_by)
+            app = get_current_app().as_any()
             app.on_archive_item_updated({"rewrite_of": None, "rewrite_sequence": 0}, original, ITEM_UNLINK)
 
     def _removed_refs_from_package(self, item):
@@ -138,10 +139,10 @@ def _get_spike_expiry(self, desk_id, stage_id):
         :return:
         """
         # If no maximum spike expiry is set then return the desk/stage values
-        if app.settings["SPIKE_EXPIRY_MINUTES"] is None:
+        if get_app_config("SPIKE_EXPIRY_MINUTES") is None:
             return get_expiry(desk_id=desk_id, stage_id=stage_id)
         else:
-            return get_expiry_date(app.settings["SPIKE_EXPIRY_MINUTES"])
+            return get_expiry_date(get_app_config("SPIKE_EXPIRY_MINUTES"))
 
     def update(self, id, updates, original):
         original_state = original[ITEM_STATE]
@@ -181,12 +182,12 @@ def update(self, id, updates, original):
             updates["translated_from"] = None
             updates["translation_id"] = None
 
-            id_to_remove = original.get(config.ID_FIELD)
+            id_to_remove = original.get(ID_FIELD)
 
             # Remove the translated item from the list of translations in the original item
             # where orignal item can be in archive or in both archive and published resource as well
             translated_from = archive_service.find_one(req=None, _id=original.get("translated_from"))
-            translated_from_id = translated_from.get(config.ID_FIELD)
+            translated_from_id = translated_from.get(ID_FIELD)
             self._remove_translations(archive_service, translated_from, id_to_remove)
 
             if translated_from.get("state") in PUBLISH_STATES:
@@ -219,10 +220,10 @@ def update(self, id, updates, original):
                     linked_in_packages = [
                         linked
                         for linked in package_item.get(LINKED_IN_PACKAGES, [])
-                        if linked.get(PACKAGE) != original.get(config.ID_FIELD)
+                        if linked.get(PACKAGE) != original.get(ID_FIELD)
                     ]
                     super().system_update(
-                        package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item
+                        package_item[ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item
                     )
 
             # keep the structure of old group in order to be able to unspike the package
@@ -231,11 +232,12 @@ def update(self, id, updates, original):
             updates["groups"] = []
 
         item = self.backend.update(self.datasource, id, updates, original)
-        push_notification("item:spike", item=str(id), user=str(user.get(config.ID_FIELD)))
+        push_notification("item:spike", item=str(id), user=str(user.get(ID_FIELD)))
 
         history_updates = dict(updates)
         if original.get("task"):
             history_updates["task"] = original.get("task")
+        app = get_current_app().as_any()
         app.on_archive_item_updated(history_updates, original, ITEM_SPIKE)
         self._removed_refs_from_package(id)
         return item
@@ -260,7 +262,7 @@ def _remove_translations(self, service, article, id_to_remove):
         """
 
         translations = article.get("translations")
-        article_id = article.get(config.ID_FIELD)
+        article_id = article.get(ID_FIELD)
 
         if ObjectId.is_valid(article_id) and not isinstance(article_id, ObjectId):
             article_id = ObjectId(article_id)
@@ -298,7 +300,7 @@ def set_unspike_updates(self, doc, updates):
             stage_id = None
 
         if not stage_id and desk_id:  # get incoming stage for selected desk
-            desk = app.data.find_one("desks", None, _id=desk_id)
+            desk = get_current_app().data.find_one("desks", None, _id=desk_id)
             stage_id = desk["incoming_stage"] if desk else stage_id
 
         updates["task"] = {"desk": desk_id, "stage": stage_id, "user": None}
@@ -320,11 +322,11 @@ def on_updated(self, updates, original):
                     linked_in_packages = [
                         linked
                         for linked in package_item.get(LINKED_IN_PACKAGES, [])
-                        if linked.get(PACKAGE) != original.get(config.ID_FIELD)
+                        if linked.get(PACKAGE) != original.get(ID_FIELD)
                     ]
-                    linked_in_packages.append({PACKAGE: original.get(config.ID_FIELD)})
+                    linked_in_packages.append({PACKAGE: original.get(ID_FIELD)})
                     super().system_update(
-                        package_item[config.ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item
+                        package_item[ID_FIELD], {LINKED_IN_PACKAGES: linked_in_packages}, package_item
                     )
 
     def on_update(self, updates, original):
@@ -345,7 +347,8 @@ def update(self, id, updates, original):
         self.backend.update(self.datasource, id, updates, original)
 
         item = get_resource_service(ARCHIVE).find_one(req=None, _id=id)
-        push_notification("item:unspike", item=str(id), user=str(user.get(config.ID_FIELD)))
+        push_notification("item:unspike", item=str(id), user=str(user.get(ID_FIELD)))
+        app = get_current_app().as_any()
         app.on_archive_item_updated(updates, original, ITEM_UNSPIKE)
 
         return item
diff --git a/apps/archive/autocomplete.py b/apps/archive/autocomplete.py
index 59e93f7ae6..c9aa554694 100644
--- a/apps/archive/autocomplete.py
+++ b/apps/archive/autocomplete.py
@@ -1,10 +1,12 @@
-from typing import List, Dict, Callable
+from typing import List, Dict, Callable, cast
 import warnings
 import superdesk
 
-from flask import current_app as app, request
 from flask_babel import _
 from datetime import timedelta
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.flask import request
 from superdesk.utils import ListCursor
 from superdesk.utc import utcnow
 from superdesk.errors import SuperdeskApiError
@@ -41,7 +43,7 @@ def get(self, req, lookup):
             else request.args.get("resources").split(",")
         )
         field: str = request.args.get("field", "slugline")
-        language: str = request.args.get("language", app.config.get("DEFAULT_LANGUAGE", "en"))
+        language: str = request.args.get("language", get_app_config("DEFAULT_LANGUAGE", "en"))
 
         all_suggestions: Dict[str, int] = {}
         for resource in resources:
@@ -59,7 +61,7 @@ def get(self, req, lookup):
 
 
 def get_archive_suggestions(field: str, language: str) -> Dict[str, int]:
-    if not app.config.get(SETTING_ENABLED):
+    if not get_app_config(SETTING_ENABLED):
         raise SuperdeskApiError(_("Archive autocomplete is not enabled"), 404)
 
     field_mapping = {"slugline": "slugline.keyword"}
@@ -70,8 +72,8 @@ def get_archive_suggestions(field: str, language: str) -> Dict[str, int]:
     versioncreated_min = (
         utcnow()
         - timedelta(
-            days=app.config[SETTING_DAYS],
-            hours=app.config[SETTING_HOURS],
+            days=cast(int, get_app_config(SETTING_DAYS)),
+            hours=cast(int, get_app_config(SETTING_HOURS)),
         )
     ).replace(
         microsecond=0
@@ -91,13 +93,13 @@ def get_archive_suggestions(field: str, language: str) -> Dict[str, int]:
             "values": {
                 "terms": {
                     "field": field_mapping[field],
-                    "size": app.config[SETTING_LIMIT],
+                    "size": get_app_config(SETTING_LIMIT),
                     "order": {"_key": "asc"},
                 },
             },
         },
     }
-    res = app.data.elastic.search(query, "archive", params={"size": 0})
+    res = get_current_app().data.elastic.search(query, "archive", params={"size": 0})
     return {bucket["key"]: bucket["doc_count"] for bucket in res.hits["aggregations"]["values"]["buckets"]}
 
 
diff --git a/apps/archive/commands.py b/apps/archive/commands.py
index 7d91103d1e..11653cba43 100644
--- a/apps/archive/commands.py
+++ b/apps/archive/commands.py
@@ -11,10 +11,11 @@
 import functools as ft
 import logging
 import superdesk
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD, VERSION
 import superdesk.signals as signals
 
-from flask import current_app as app
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from copy import deepcopy
 from apps.packages import PackageService
 from superdesk.celery_task_utils import get_lock_id
@@ -107,7 +108,7 @@ def run(self):
 
     @log_exeption
     def _remove_expired_publish_queue_items(self, now):
-        expire_interval = app.config.get("PUBLISH_QUEUE_EXPIRY_MINUTES", 0)
+        expire_interval = get_app_config("PUBLISH_QUEUE_EXPIRY_MINUTES", 0)
         if expire_interval:
             expire_time = now - timedelta(minutes=expire_interval)
             logger.info("{} Removing publish queue items created before {}".format(self.log_msg, str(expire_time)))
@@ -126,7 +127,7 @@ def _remove_expired_items(self, expiry_datetime):
         archive_service = get_resource_service(ARCHIVE)
         published_service = get_resource_service("published")
         preserve_published_desks = {
-            desk.get(config.ID_FIELD): 1
+            desk.get(ID_FIELD): 1
             for desk in get_resource_service("desks").find(where={"preserve_published_content": True})
         }
 
@@ -153,7 +154,7 @@ def _remove_expired_items(self, expiry_datetime):
 
             # get killed items
             killed_items = {
-                item.get(config.ID_FIELD): item
+                item.get(ID_FIELD): item
                 for item in expired_items
                 if item.get(ITEM_STATE) in {CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED}
             }
@@ -168,7 +169,7 @@ def _remove_expired_items(self, expiry_datetime):
 
             # Get the not killed and spiked items
             not_killed_items = {
-                item.get(config.ID_FIELD): item
+                item.get(ID_FIELD): item
                 for item in expired_items
                 if item.get(ITEM_STATE) not in {CONTENT_STATE.KILLED, CONTENT_STATE.SPIKED, CONTENT_STATE.RECALLED}
             }
@@ -180,7 +181,7 @@ def _remove_expired_items(self, expiry_datetime):
 
             # Processing items to expire
             for item_id, item in not_killed_items.items():
-                item.setdefault(config.VERSION, 1)
+                item.setdefault(VERSION, 1)
                 item.setdefault("expiry", expiry_datetime)
                 item.setdefault("unique_name", "")
                 expiry_msg = log_msg_format.format(**item)
@@ -263,15 +264,15 @@ def _remove_expired_items(self, expiry_datetime):
             for _item_id, item in items_having_issues.items():
                 msg = log_msg_format.format(**item)
                 try:
-                    archive_service.system_update(item.get(config.ID_FIELD), {"expiry_status": "invalid"}, item)
+                    archive_service.system_update(item.get(ID_FIELD), {"expiry_status": "invalid"}, item)
                     logger.info("{} Setting item expiry status. {}".format(self.log_msg, msg))
                 except Exception:
                     logger.exception("{} Failed to set expiry status for item. {}".format(self.log_msg, msg))
 
     @log_exeption
     def _remove_expired_archived_items(self, now):
-        EXPIRY_MINUTES = app.config.get("ARCHIVED_EXPIRY_MINUTES")
-        EXPIRY_LIMIT = app.config.get("MAX_EXPIRY_QUERY_LIMIT", 100)
+        EXPIRY_MINUTES = get_app_config("ARCHIVED_EXPIRY_MINUTES")
+        EXPIRY_LIMIT = get_app_config("MAX_EXPIRY_QUERY_LIMIT", 100)
         if not EXPIRY_MINUTES:
             return
         logger.info("%s Starting to remove expired items from archived.", self.log_msg)
@@ -288,7 +289,7 @@ def _remove_expired_archived_items(self, now):
                 logger.error("%s Item was not removed from archived item=%s", self.log_msg, item["item_id"])
                 continue
             signals.archived_item_removed.send(archived_service, item=item)
-            if not app.config.get("LEGAL_ARCHIVE") and not archived_service.find_one(req=None, item_id=item["item_id"]):
+            if not get_app_config("LEGAL_ARCHIVE") and not archived_service.find_one(req=None, item_id=item["item_id"]):
                 remove_media_files(item, published=True)
 
     def _can_remove_item(self, item, now, processed_item=None, preserve_published_desks=None):
@@ -314,7 +315,7 @@ def _can_remove_item(self, item, now, processed_item=None, preserve_published_de
             broadcast_items = get_resource_service("archive_broadcast").get_broadcast_items_from_master_story(item)
             # If master story expires then check if broadcast item is included in a package.
             # If included in a package then check the package expiry.
-            item_refs.extend([broadcast_item.get(config.ID_FIELD) for broadcast_item in broadcast_items])
+            item_refs.extend([broadcast_item.get(ID_FIELD) for broadcast_item in broadcast_items])
 
             if item.get("rewrite_of"):
                 item_refs.append(item.get("rewrite_of"))
@@ -354,10 +355,10 @@ def _can_remove_item(self, item, now, processed_item=None, preserve_published_de
 
         if is_expired:
             # now check recursively for all references
-            if item[config.ID_FIELD] in processed_item:
+            if item[ID_FIELD] in processed_item:
                 return is_expired
 
-            processed_item[item[config.ID_FIELD]] = item
+            processed_item[item[ID_FIELD]] = item
             if item_refs:
                 archive_items = archive_service.get_from_mongo(req=None, lookup={"_id": {"$in": item_refs}})
                 for archive_item in archive_items:
@@ -368,9 +369,7 @@ def _can_remove_item(self, item, now, processed_item=None, preserve_published_de
 
         # If this item is not expired then it is potentially keeping it's parent alive.
         if not is_expired:
-            logger.info(
-                "{} Item ID: [{}] has not expired. Reason: {}".format(self.log_msg, item[config.ID_FIELD], reason)
-            )
+            logger.info("{} Item ID: [{}] has not expired. Reason: {}".format(self.log_msg, item[ID_FIELD], reason))
         return is_expired
 
     def _get_associated_media_id(self, item):
@@ -381,7 +380,7 @@ def _get_associated_media_id(self, item):
         ids = []
         for _key, associated_item in (item.get(ASSOCIATIONS) or {}).items():
             if associated_item:
-                ids.append(associated_item.get(config.ID_FIELD))
+                ids.append(associated_item.get(ID_FIELD))
         return ids
 
     def _get_associated_items(self, item):
@@ -393,7 +392,7 @@ def _get_associated_items(self, item):
             return []
 
         associated_items = list(
-            get_resource_service("media_references").get(req=None, lookup={"associated_id": item.get(config.ID_FIELD)})
+            get_resource_service("media_references").get(req=None, lookup={"associated_id": item.get(ID_FIELD)})
         )
 
         ids = set()
@@ -403,7 +402,7 @@ def _get_associated_items(self, item):
         # extra query just to ensure that item is not deleted
         # get the associated items from the archive collection
         archive_docs = list(get_resource_service(ARCHIVE).get_from_mongo(req=None, lookup={"_id": {"$in": list(ids)}}))
-        return [doc.get(config.ID_FIELD) for doc in archive_docs]
+        return [doc.get(ID_FIELD) for doc in archive_docs]
 
     def _move_to_archived(self, item, filter_conditions):
         """Moves all the published version of an article to archived.
@@ -416,7 +415,7 @@ def _move_to_archived(self, item, filter_conditions):
         published_service = get_resource_service("published")
         archived_service = get_resource_service("archived")
         archive_service = get_resource_service("archive")
-        item_id = item.get(config.ID_FIELD)
+        item_id = item.get(ID_FIELD)
         moved_to_archived = self._conforms_to_archived_filter(item, filter_conditions)
         published_items = list(published_service.get_from_mongo(req=None, lookup={"item_id": item_id}))
 
@@ -441,7 +440,7 @@ def _move_to_archived(self, item, filter_conditions):
             archive_service.delete_by_article_ids([item_id])
             logger.info("{} Deleted archive item. {}".format(self.log_msg, item_id))
         except Exception:
-            failed_items = [item.get(config.ID_FIELD) for item in published_items]
+            failed_items = [item.get(ID_FIELD) for item in published_items]
             logger.exception("{} Failed to move to archived. {}".format(self.log_msg, failed_items))
 
     def _conforms_to_archived_filter(self, item, filter_conditions):
@@ -453,23 +452,17 @@ def _conforms_to_archived_filter(self, item, filter_conditions):
         """
         if not filter_conditions:
             logger.info(
-                "{} No filter conditions specified for Archiving item {}.".format(
-                    self.log_msg, item.get(config.ID_FIELD)
-                )
+                "{} No filter conditions specified for Archiving item {}.".format(self.log_msg, item.get(ID_FIELD))
             )
             return True
 
         filter_service = get_resource_service("content_filters")
         for fc in filter_conditions:
             if filter_service.does_match(fc, item):
-                logger.info(
-                    "{} Filter conditions {} matched for item {}.".format(self.log_msg, fc, item.get(config.ID_FIELD))
-                )
+                logger.info("{} Filter conditions {} matched for item {}.".format(self.log_msg, fc, item.get(ID_FIELD)))
                 return False
 
-        logger.info(
-            "{} No filter conditions matched Archiving item {}.".format(self.log_msg, item.get(config.ID_FIELD))
-        )
+        logger.info("{} No filter conditions matched Archiving item {}.".format(self.log_msg, item.get(ID_FIELD)))
         return True
 
     def delete_spiked_items(self, items):
@@ -480,7 +473,7 @@ def delete_spiked_items(self, items):
         try:
             logger.info("{} deleting spiked items.".format(self.log_msg))
             spiked_ids = [
-                item.get(config.ID_FIELD)
+                item.get(ID_FIELD)
                 for item in items
                 if item.get(ITEM_STATE) == CONTENT_STATE.SPIKED and not self._get_associated_items(item)
             ]
@@ -497,7 +490,7 @@ def check_if_items_imported_to_legal_archive(self, items_to_expire):
         :param dict items_to_expire:
         :return dict: dict of items having issues.
         """
-        if not app.config.get("LEGAL_ARCHIVE"):
+        if not get_app_config("LEGAL_ARCHIVE"):
             return []
 
         logger.info("{} checking for items in legal archive. Items: {}".format(self.log_msg, items_to_expire.keys()))
diff --git a/apps/archive/common.py b/apps/archive/common.py
index 93d1f32a49..1f7df7bf38 100644
--- a/apps/archive/common.py
+++ b/apps/archive/common.py
@@ -12,15 +12,15 @@
 from bson import ObjectId
 
 import logging
-from eve.utils import config
 from datetime import datetime
 from dateutil.parser import parse as date_parse
-from flask import current_app as app
 from eve.versioning import insert_versioning_documents
 from pytz import timezone
 from copy import deepcopy
 from dateutil.parser import parse
 
+from superdesk.core import get_app_config, get_current_app
+from superdesk.resource_fields import ID_FIELD, VERSION
 import superdesk
 from superdesk import editor_utils
 from superdesk.users.services import get_sign_off
@@ -157,13 +157,13 @@
 
 
 def get_default_source():
-    return app.config.get("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES", "")
+    return get_app_config("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES", "")
 
 
 def update_version(updates, original):
     """Increment version number if possible."""
-    if config.VERSION in updates and original.get("version", 0) == 0:
-        updates.setdefault("version", updates[config.VERSION])
+    if VERSION in updates and original.get("version", 0) == 0:
+        updates.setdefault("version", updates[VERSION])
 
 
 def on_create_item(docs, repo_type=ARCHIVE, media_service=None):
@@ -190,7 +190,7 @@ def on_create_item(docs, repo_type=ARCHIVE, media_service=None):
             doc["event_id"] = generate_guid(type=GUID_TAG)
 
         set_default_state(doc, CONTENT_STATE.DRAFT)
-        doc.setdefault(config.ID_FIELD, doc[GUID_FIELD])
+        doc.setdefault(ID_FIELD, doc[GUID_FIELD])
 
         if repo_type == ARCHIVE:
             # set the source for the article
@@ -202,15 +202,15 @@ def on_create_item(docs, repo_type=ARCHIVE, media_service=None):
         if (
             doc.get("profile") in ignore_profiles
             and doc.get("type") == "text"
-            and app.config.get("DEFAULT_CONTENT_TYPE", None)
+            and get_app_config("DEFAULT_CONTENT_TYPE", None)
         ):
-            doc["profile"] = app.config["DEFAULT_CONTENT_TYPE"]
+            doc["profile"] = get_app_config("DEFAULT_CONTENT_TYPE")
 
         copy_metadata_from_profile(doc)
         copy_metadata_from_user_preferences(doc, repo_type)
 
         if "language" not in doc:
-            doc["language"] = app.config.get("DEFAULT_LANGUAGE", "en")
+            doc["language"] = get_app_config("DEFAULT_LANGUAGE", "en")
 
             if doc.get("task", None) and doc["task"].get("desk", None):
                 desk = superdesk.get_resource_service("desks").find_one(req=None, _id=doc["task"]["desk"])
@@ -350,8 +350,9 @@ def clear_rewritten_flag(event_id, rewrite_id, rewrite_field):
         event_id, rewrite_id, rewrite_field
     )
     processed_items = set()
+    app = get_current_app().as_any()
     for doc in published_rewritten_stories:
-        doc_id = doc.get(config.ID_FIELD)
+        doc_id = doc.get(ID_FIELD)
         publish_service.update_published_items(doc_id, rewrite_field, None)
         if doc_id not in processed_items:
             # clear the flag from the archive as well.
@@ -398,7 +399,7 @@ def set_sign_off(updates, original=None, repo_type=ARCHIVE, user=None):
         return
 
     # remove the sign off from the list if already there
-    if not app.config.get("FULL_SIGN_OFF"):
+    if not get_app_config("FULL_SIGN_OFF"):
         current_sign_off = current_sign_off.replace(sign_off + "/", "")
 
     updated_sign_off = "{}/{}".format(current_sign_off, sign_off)
@@ -443,7 +444,7 @@ def insert_into_versions(id_=None, doc=None):
         raise SuperdeskApiError.badRequestError(message=_("Document not found in archive collection"))
 
     remove_unwanted(doc_in_archive_collection)
-    if app.config["VERSION"] in doc_in_archive_collection:
+    if VERSION in doc_in_archive_collection:
         insert_versioning_documents(ARCHIVE, doc_in_archive_collection)
 
 
@@ -471,20 +472,20 @@ def fetch_item(doc, desk_id, stage_id, state=None, target=None):
     if doc.get("guid"):
         dest_doc.setdefault("uri", doc[GUID_FIELD])
 
-    dest_doc[config.ID_FIELD] = new_id
+    dest_doc[ID_FIELD] = new_id
     dest_doc[GUID_FIELD] = new_id
     generate_unique_id_and_name(dest_doc)
 
     # avoid circular import
     from apps.tasks import send_to
 
-    dest_doc[config.VERSION] = 1
+    dest_doc[VERSION] = 1
     dest_doc["versioncreated"] = utcnow()
     send_to(doc=dest_doc, desk_id=desk_id, stage_id=stage_id)
     dest_doc[ITEM_STATE] = state or CONTENT_STATE.FETCHED
 
-    dest_doc[FAMILY_ID] = doc[config.ID_FIELD]
-    dest_doc[INGEST_ID] = doc[config.ID_FIELD]
+    dest_doc[FAMILY_ID] = doc[ID_FIELD]
+    dest_doc[INGEST_ID] = doc[ID_FIELD]
     dest_doc[ITEM_OPERATION] = ITEM_FETCH
 
     remove_unwanted(dest_doc)
@@ -518,6 +519,7 @@ def remove_media_files(doc, published=False):
     if doc.get("guid"):
         remove_media_references(doc["guid"], published)
 
+    app = get_current_app()
     for renditions in references:
         for rendition in renditions.values():
             if not rendition.get("media"):
@@ -569,7 +571,7 @@ def get_item_expiry(desk, stage, offset=None):
     :param datetime offset: datetime passed in case of embargo.
     :return datetime: expiry datetime
     """
-    expiry_minutes = app.settings["CONTENT_EXPIRY_MINUTES"]
+    expiry_minutes = get_app_config("CONTENT_EXPIRY_MINUTES")
     if stage and (stage.get("content_expiry") or 0) > 0:
         expiry_minutes = stage.get("content_expiry")
     elif desk and (desk.get("content_expiry") or 0) > 0:
@@ -839,9 +841,9 @@ def copy_metadata_from_profile(doc):
     :param doc
     """
     defaults = {}
-    defaults.setdefault("priority", config.DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES)
-    defaults.setdefault("urgency", config.DEFAULT_URGENCY_VALUE_FOR_MANUAL_ARTICLES)
-    defaults.setdefault("genre", config.DEFAULT_GENRE_VALUE_FOR_MANUAL_ARTICLES)
+    defaults.setdefault("priority", get_app_config("DEFAULT_PRIORITY_VALUE_FOR_MANUAL_ARTICLES"))
+    defaults.setdefault("urgency", get_app_config("DEFAULT_URGENCY_VALUE_FOR_MANUAL_ARTICLES"))
+    defaults.setdefault("genre", get_app_config("DEFAULT_GENRE_VALUE_FOR_MANUAL_ARTICLES"))
     for field in defaults:
         if field in doc and not doc[field]:
             del doc[field]
diff --git a/apps/archive/ingest.py b/apps/archive/ingest.py
index 2bc9c13faf..7c3ffc15b5 100644
--- a/apps/archive/ingest.py
+++ b/apps/archive/ingest.py
@@ -12,10 +12,10 @@
 from superdesk.workflow import set_default_state
 from .common import on_create_item, handle_existing_data
 from .archive import update_word_count
-from eve.utils import config
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ITEMS
 from superdesk.io.ingest import IngestResource, IngestService  # NOQA
-from flask import current_app as app
 from apps.archive.highlights_search_mixin import HighlightsSearchMixin
 
 
@@ -26,15 +26,15 @@ def on_fetched(self, docs):
         Overriding this to handle existing data in Mongo & Elastic
         """
 
-        for item in docs[config.ITEMS]:
+        for item in docs[ITEMS]:
             handle_existing_data(item, doc_type="ingest")
 
     def on_create(self, docs):
         for doc in docs:
             set_default_state(doc, CONTENT_STATE.INGESTED)
-            if not app.config.get("DEFAULT_CONTENT_TYPE", None):
-                doc.setdefault(ITEM_PRIORITY, int(config.DEFAULT_PRIORITY_VALUE_FOR_INGESTED_ARTICLES))
-                doc.setdefault(ITEM_URGENCY, int(config.DEFAULT_URGENCY_VALUE_FOR_INGESTED_ARTICLES))
+            if not get_app_config("DEFAULT_CONTENT_TYPE", None):
+                doc.setdefault(ITEM_PRIORITY, int(get_app_config("DEFAULT_PRIORITY_VALUE_FOR_INGESTED_ARTICLES")))
+                doc.setdefault(ITEM_URGENCY, int(get_app_config("DEFAULT_URGENCY_VALUE_FOR_INGESTED_ARTICLES")))
             handle_existing_data(doc, doc_type="ingest")
             update_word_count(doc)
 
diff --git a/apps/archive/related.py b/apps/archive/related.py
index 4cd3949076..ebbd7da019 100644
--- a/apps/archive/related.py
+++ b/apps/archive/related.py
@@ -1,4 +1,4 @@
-from flask import abort, request
+from superdesk.flask import abort, request
 from superdesk.resource import Resource
 from superdesk.services import Service
 from superdesk.metadata.utils import item_url
diff --git a/apps/archive_broadcast/broadcast.py b/apps/archive_broadcast/broadcast.py
index 78073163fd..0daba53eb3 100644
--- a/apps/archive_broadcast/broadcast.py
+++ b/apps/archive_broadcast/broadcast.py
@@ -13,7 +13,9 @@
 import json
 from eve.utils import ParsedRequest
 from eve.versioning import resolve_document_version
-from flask import request
+
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED
+from superdesk.flask import request
 from apps.archive.common import CUSTOM_HATEOAS, insert_into_versions, get_user, ITEM_CREATE, BROADCAST_GENRE, is_genre
 from apps.packages import PackageService
 from superdesk.metadata.packages import GROUPS
@@ -21,7 +23,7 @@
 from superdesk.services import BaseService
 from superdesk.metadata.utils import item_url
 from superdesk.metadata.item import CONTENT_TYPE, CONTENT_STATE, ITEM_TYPE, ITEM_STATE, PUBLISH_STATES, metadata_schema
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from apps.archive.archive import SOURCE
 from apps.publish.content.common import ITEM_CORRECT, ITEM_PUBLISH
@@ -75,7 +77,7 @@ def create(self, docs):
         doc.pop("desk", None)
         doc["task"] = {}
         if desk:
-            doc["task"]["desk"] = desk.get(config.ID_FIELD)
+            doc["task"]["desk"] = desk.get(ID_FIELD)
             doc["task"]["stage"] = desk.get("working_stage")
 
         doc["task"]["user"] = get_user().get("_id")
@@ -101,9 +103,9 @@ def create(self, docs):
 
         resolve_document_version(document=doc, resource=SOURCE, method="POST")
         service.post(docs)
-        insert_into_versions(id_=doc[config.ID_FIELD])
+        insert_into_versions(id_=doc[ID_FIELD])
         build_custom_hateoas(CUSTOM_HATEOAS, doc)
-        return [doc[config.ID_FIELD]]
+        return [doc[ID_FIELD]]
 
     def _valid_broadcast_item(self, item):
         """Validates item for broadcast.
@@ -160,7 +162,7 @@ def get_broadcast_items_from_master_story(self, item, include_archived_repo=Fals
         if is_genre(item, BROADCAST_GENRE):
             return []
 
-        ids = [str(item.get(config.ID_FIELD))]
+        ids = [str(item.get(ID_FIELD))]
         return list(self._get_broadcast_items(ids, include_archived_repo))
 
     def on_broadcast_master_updated(self, item_event, item, rewrite_id=None):
@@ -205,13 +207,13 @@ def on_broadcast_master_updated(self, item_event, item, rewrite_id=None):
                 if not updates["broadcast"]["rewrite_id"] and rewrite_id:
                     updates["broadcast"]["rewrite_id"] = rewrite_id
 
-                if not broadcast_item.get(config.ID_FIELD) in processed_ids:
+                if not broadcast_item.get(ID_FIELD) in processed_ids:
                     self._update_broadcast_status(broadcast_item, updates)
                     # list of ids that are processed.
-                    processed_ids.add(broadcast_item.get(config.ID_FIELD))
+                    processed_ids.add(broadcast_item.get(ID_FIELD))
             except Exception:
                 logger.exception(
-                    "Failed to update status for the broadcast item {}".format(broadcast_item.get(config.ID_FIELD))
+                    "Failed to update status for the broadcast item {}".format(broadcast_item.get(ID_FIELD))
                 )
 
     def _update_broadcast_status(self, item, updates):
@@ -228,11 +230,11 @@ def _update_broadcast_status(self, item, updates):
             CONTENT_STATE.RECALLED,
         }:
             get_resource_service("published").update_published_items(
-                item.get(config.ID_FIELD), "broadcast", updates.get("broadcast")
+                item.get(ID_FIELD), "broadcast", updates.get("broadcast")
             )
 
-        archive_item = get_resource_service(SOURCE).find_one(req=None, _id=item.get(config.ID_FIELD))
-        get_resource_service(SOURCE).system_update(archive_item.get(config.ID_FIELD), updates, archive_item)
+        archive_item = get_resource_service(SOURCE).find_one(req=None, _id=item.get(ID_FIELD))
+        get_resource_service(SOURCE).system_update(archive_item.get(ID_FIELD), updates, archive_item)
 
     def remove_rewrite_refs(self, item):
         """Remove the rewrite references from the broadcast item if the re-write is spiked.
@@ -247,7 +249,7 @@ def remove_rewrite_refs(self, item):
                 "bool": {
                     "filter": [
                         {"term": {"genre.name": BROADCAST_GENRE}},
-                        {"term": {"broadcast.rewrite_id": item.get(config.ID_FIELD)}},
+                        {"term": {"broadcast.rewrite_id": item.get(ID_FIELD)}},
                     ]
                 }
             }
@@ -269,7 +271,7 @@ def remove_rewrite_refs(self, item):
                 self._update_broadcast_status(broadcast_item, updates)
             except Exception:
                 logger.exception(
-                    "Failed to remove rewrite id for the broadcast item {}".format(broadcast_item.get(config.ID_FIELD))
+                    "Failed to remove rewrite id for the broadcast item {}".format(broadcast_item.get(ID_FIELD))
                 )
 
     def reset_broadcast_status(self, updates, original):
@@ -300,7 +302,7 @@ def spike_item(self, original):
         spike_service = get_resource_service("archive_spike")
 
         for item in broadcast_items:
-            id_ = item.get(config.ID_FIELD)
+            id_ = item.get(ID_FIELD)
             try:
                 self.packageService.remove_spiked_refs_from_package(id_)
                 updates = {ITEM_STATE: CONTENT_STATE.SPIKED}
@@ -331,40 +333,35 @@ def kill_broadcast(self, updates, original, operation):
         kill_service = get_resource_service("archive_{}".format(operation))
 
         for item in broadcast_items:
-            item_id = item.get(config.ID_FIELD)
+            item_id = item.get(ID_FIELD)
             packages = self.packageService.get_packages(item_id)
 
             processed_packages = set()
             for package in packages:
-                if (
-                    str(package[config.ID_FIELD]) in processed_packages
-                    or package.get(ITEM_STATE) == CONTENT_STATE.RECALLED
-                ):
+                if str(package[ID_FIELD]) in processed_packages or package.get(ITEM_STATE) == CONTENT_STATE.RECALLED:
                     continue
                 try:
                     if package.get(ITEM_STATE) in {CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED}:
                         package_updates = {
-                            config.LAST_UPDATED: utcnow(),
+                            LAST_UPDATED: utcnow(),
                             GROUPS: self.packageService.remove_group_ref(package, item_id),
                         }
 
                         refs = self.packageService.get_residrefs(package_updates)
                         if refs:
-                            correct_service.patch(package.get(config.ID_FIELD), package_updates)
+                            correct_service.patch(package.get(ID_FIELD), package_updates)
                         else:
                             package_updates["body_html"] = updates.get("body_html", "")
-                            kill_service.patch(package.get(config.ID_FIELD), package_updates)
+                            kill_service.patch(package.get(ID_FIELD), package_updates)
 
-                        processed_packages.add(package.get(config.ID_FIELD))
+                        processed_packages.add(package.get(ID_FIELD))
                     else:
                         package_list = self.packageService.remove_refs_in_package(package, item_id, processed_packages)
 
                         processed_packages = processed_packages.union(set(package_list))
                 except Exception:
                     logger.exception(
-                        "Failed to remove the broadcast item {} from package {}".format(
-                            item_id, package.get(config.ID_FIELD)
-                        )
+                        "Failed to remove the broadcast item {} from package {}".format(item_id, package.get(ID_FIELD))
                     )
 
             kill_service.kill_item(updates, item)
diff --git a/apps/archive_history/service.py b/apps/archive_history/service.py
index 2a0a2eafe4..42b77c970b 100644
--- a/apps/archive_history/service.py
+++ b/apps/archive_history/service.py
@@ -9,9 +9,9 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-from eve.utils import config
 from copy import deepcopy
 
+from superdesk.resource_fields import ID_FIELD, VERSION
 from superdesk import get_resource_service
 from superdesk.resource import Resource
 from superdesk.services import BaseService
@@ -77,14 +77,14 @@ def on_item_updated(self, updates, original, operation=None):
             self._save_history(item, updates, operation or ITEM_UPDATE)
 
     def on_item_deleted(self, doc):
-        lookup = {"item_id": doc[config.ID_FIELD]}
+        lookup = {"item_id": doc[ID_FIELD]}
         self.delete(lookup=lookup)
 
     def on_item_locked(self, item, user_id):
         self._save_lock_history(item, "item_lock")
 
     def on_item_unlocked(self, item, user_id):
-        if item.get(config.VERSION, 1) == 0:
+        if item.get(VERSION, 1) == 0:
             # version 0 items get deleted on unlock, along with all history documents
             # so there is no need to record a history item here
             return
@@ -95,7 +95,7 @@ def _save_lock_history(self, item, operation):
         self.post(
             [
                 {
-                    "item_id": item[config.ID_FIELD],
+                    "item_id": item[ID_FIELD],
                     "user_id": self.get_user_id(item),
                     "operation": operation,
                     "update": {
@@ -104,7 +104,7 @@ def _save_lock_history(self, item, operation):
                         LOCK_ACTION: item.get(LOCK_ACTION),
                         LOCK_TIME: item.get(LOCK_TIME),
                     },
-                    "version": item.get(config.VERSION, 1),
+                    "version": item.get(VERSION, 1),
                 }
             ]
         )
@@ -112,7 +112,7 @@ def _save_lock_history(self, item, operation):
     def get_user_id(self, item):
         user = get_user()
         if user:
-            return user.get(config.ID_FIELD)
+            return user.get(ID_FIELD)
 
     def _save_history(self, item, update, operation):
         # in case of auto-routing, if the original_creator exists in our database
@@ -127,11 +127,11 @@ def _save_history(self, item, update, operation):
 
             if user:
                 history = {
-                    "item_id": item[config.ID_FIELD],
-                    "user_id": user.get(config.ID_FIELD),
+                    "item_id": item[ID_FIELD],
+                    "user_id": user.get(ID_FIELD),
                     "operation": ITEM_CREATE,
                     "update": self._remove_unwanted_fields(update, item),
-                    "version": item.get(config.VERSION, 1),
+                    "version": item.get(VERSION, 1),
                     "_created": firstcreated,
                     "_updated": firstcreated,
                 }
@@ -139,11 +139,11 @@ def _save_history(self, item, update, operation):
                 self.post([history])
 
         history = {
-            "item_id": item[config.ID_FIELD],
+            "item_id": item[ID_FIELD],
             "user_id": self.get_user_id(item),
             "operation": operation,
             "update": self._remove_unwanted_fields(update, item),
-            "version": item.get(config.VERSION, 1),
+            "version": item.get(VERSION, 1),
         }
 
         self.post([history])
diff --git a/apps/archived/archived.py b/apps/archived/archived.py
index 76ec515c81..90baccb545 100644
--- a/apps/archived/archived.py
+++ b/apps/archived/archived.py
@@ -10,12 +10,13 @@
 
 from operator import itemgetter
 from copy import deepcopy
-from flask import current_app as app
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 import logging
 
 from eve.versioning import resolve_document_version
 
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD, VERSION, ETAG, LAST_UPDATED
 from apps.legal_archive.commands import import_into_legal_archive
 from apps.legal_archive.resource import LEGAL_PUBLISH_QUEUE_NAME
 from apps.publish.content.common import ITEM_KILL
@@ -98,7 +99,7 @@ def on_create(self, docs):
             doc.pop("lock_session", None)
             doc.pop("highlights", None)
             doc.pop("marked_desks", None)
-            doc["archived_id"] = self._get_archived_id(doc.get("item_id"), doc.get(config.VERSION))
+            doc["archived_id"] = self._get_archived_id(doc.get("item_id"), doc.get(VERSION))
 
             if doc.get(ITEM_TYPE) == CONTENT_TYPE.COMPOSITE:
                 for ref in package_service.get_item_refs(doc):
@@ -124,11 +125,11 @@ def validate_delete_action(self, doc, allow_all_types=False):
 
         bad_req_error = SuperdeskApiError.badRequestError
 
-        id_field = doc[config.ID_FIELD]
+        id_field = doc[ID_FIELD]
         item_id = doc["item_id"]
 
         doc["item_id"] = id_field
-        doc[config.ID_FIELD] = item_id
+        doc[ID_FIELD] = item_id
 
         if not allow_all_types and doc[ITEM_TYPE] != CONTENT_TYPE.TEXT:
             raise bad_req_error(message=_("Only Text articles are allowed to be Killed in Archived repo"))
@@ -153,7 +154,7 @@ def validate_delete_action(self, doc, allow_all_types=False):
                 raise bad_req_error(message=_("Can't Kill as the Digital Story is still available in production"))
 
             req = ParsedRequest()
-            req.sort = '[("%s", -1)]' % config.VERSION
+            req.sort = '[("%s", -1)]' % VERSION
             takes_package = list(self.get(req=req, lookup={"item_id": takes_package_id}))
             if not takes_package:
                 raise bad_req_error(message=_("Digital Story of the article not found in Archived repo"))
@@ -175,13 +176,13 @@ def validate_delete_action(self, doc, allow_all_types=False):
                         raise bad_req_error(message=_("Can't kill as one of Take(s) is part of a Package"))
 
         doc["item_id"] = item_id
-        doc[config.ID_FIELD] = id_field
+        doc[ID_FIELD] = id_field
 
     def on_delete(self, doc):
         self.validate_delete_action(doc)
 
     def delete(self, lookup):
-        if app.testing and len(lookup) == 0:
+        if get_current_app().testing and len(lookup) == 0:
             super().delete(lookup)
             return
 
@@ -241,14 +242,14 @@ def update(self, id, updates, original):
             # An email is sent to all subscribers
             if original.get("flags", {}).get("marked_archived_only", False):
                 super().delete({"item_id": article["item_id"]})
-                logger.info("Delete for article: {}".format(article[config.ID_FIELD]))
+                logger.info("Delete for article: {}".format(article[ID_FIELD]))
                 kill_service.broadcast_kill_email(article, updates_copy)
-                logger.info("Broadcast kill email for article: {}".format(article[config.ID_FIELD]))
+                logger.info("Broadcast kill email for article: {}".format(article[ID_FIELD]))
                 continue
 
             # Step 3(i)
             self._remove_and_set_kill_properties(article, articles_to_kill, updated)
-            logger.info("Removing and setting properties for article: {}".format(article[config.ID_FIELD]))
+            logger.info("Removing and setting properties for article: {}".format(article[ID_FIELD]))
 
             # Step 3(ii)
             transmission_details = list(
@@ -260,11 +261,11 @@ def update(self, id, updates, original):
                     article, transmission_details
                 )
 
-            article[config.ID_FIELD] = article.pop("item_id", article["item_id"])
+            article[ID_FIELD] = article.pop("item_id", article["item_id"])
 
             # Step 3(iv)
-            super().delete({"item_id": article[config.ID_FIELD]})
-            logger.info("Delete for article: {}".format(article[config.ID_FIELD]))
+            super().delete({"item_id": article[ID_FIELD]})
+            logger.info("Delete for article: {}".format(article[ID_FIELD]))
 
             # Step 3(i) - Creating entries in published collection
             docs = [article]
@@ -273,21 +274,19 @@ def update(self, id, updates, original):
             published_doc = deepcopy(article)
             published_doc[QUEUE_STATE] = PUBLISH_STATE.QUEUED
             get_resource_service("published").post([published_doc])
-            logger.info("Insert into archive and published for article: {}".format(article[config.ID_FIELD]))
+            logger.info("Insert into archive and published for article: {}".format(article[ID_FIELD]))
 
             # Step 3(iii)
-            import_into_legal_archive.apply_async(countdown=3, kwargs={"item_id": article[config.ID_FIELD]})
-            logger.info("Legal Archive import for article: {}".format(article[config.ID_FIELD]))
+            import_into_legal_archive.apply_async(countdown=3, kwargs={"item_id": article[ID_FIELD]})
+            logger.info("Legal Archive import for article: {}".format(article[ID_FIELD]))
 
             # Step 3(v)
             kill_service.broadcast_kill_email(article, updates_copy)
-            logger.info("Broadcast kill email for article: {}".format(article[config.ID_FIELD]))
+            logger.info("Broadcast kill email for article: {}".format(article[ID_FIELD]))
 
     def on_updated(self, updates, original):
         user = get_user()
-        push_notification(
-            "item:deleted:archived", item=str(original[config.ID_FIELD]), user=str(user.get(config.ID_FIELD))
-        )
+        push_notification("item:deleted:archived", item=str(original[ID_FIELD]), user=str(user.get(ID_FIELD)))
 
     def on_fetched_item(self, doc):
         doc["_type"] = "archived"
@@ -297,7 +296,7 @@ def _get_archived_id(self, item_id, version):
 
     def get_archived_takes_package(self, package_id, take_id, version, include_other_takes=True):
         req = ParsedRequest()
-        req.sort = '[("%s", -1)]' % config.VERSION
+        req.sort = '[("%s", -1)]' % VERSION
         take_packages = list(self.get(req=req, lookup={"item_id": package_id}))
 
         for take_package in take_packages:
@@ -322,7 +321,7 @@ def find_articles_to_kill(self, lookup, include_other_takes=True):
             return
 
         req = ParsedRequest()
-        req.sort = '[("%s", -1)]' % config.VERSION
+        req.sort = '[("%s", -1)]' % VERSION
         archived_doc = list(self.get(req=req, lookup={"item_id": archived_doc["item_id"]}))[0]
         articles_to_kill = [archived_doc]
         takes_package_id = self._get_take_package_id(archived_doc)
@@ -355,7 +354,7 @@ def _remove_and_set_kill_properties(self, article, articles_to_kill, updates):
         article.pop("_type", None)
         article.pop("_links", None)
         article.pop("queue_state", None)
-        article.pop(config.ETAG, None)
+        article.pop(ETAG, None)
 
         for field in ["headline", "abstract", "body_html"]:
             article[field] = updates.get(field, article.get(field, ""))
@@ -363,10 +362,10 @@ def _remove_and_set_kill_properties(self, article, articles_to_kill, updates):
         article[ITEM_STATE] = CONTENT_STATE.KILLED if updates[ITEM_OPERATION] == ITEM_KILL else CONTENT_STATE.RECALLED
         article[ITEM_OPERATION] = updates[ITEM_OPERATION]
         article["pubstatus"] = PUB_STATUS.CANCELED
-        article[config.LAST_UPDATED] = utcnow()
+        article[LAST_UPDATED] = utcnow()
 
         user = get_user()
-        article["version_creator"] = str(user[config.ID_FIELD])
+        article["version_creator"] = str(user[ID_FIELD])
 
         resolve_document_version(article, ARCHIVE, "PATCH", article)
 
@@ -375,10 +374,10 @@ def _remove_and_set_kill_properties(self, article, articles_to_kill, updates):
             item_refs = package_service.get_item_refs(article)
             for ref in item_refs:
                 item_in_package = [
-                    item for item in articles_to_kill if item.get("item_id", item.get(config.ID_FIELD)) == ref[RESIDREF]
+                    item for item in articles_to_kill if item.get("item_id", item.get(ID_FIELD)) == ref[RESIDREF]
                 ]
                 ref["location"] = ARCHIVE
-                ref[config.VERSION] = item_in_package[0][config.VERSION]
+                ref[VERSION] = item_in_package[0][VERSION]
 
     def _get_take_package_id(self, item):
         """Checks if the item is in a 'takes' package and returns the package id
@@ -391,7 +390,7 @@ def _get_take_package_id(self, item):
             if package.get(PACKAGE_TYPE) == TAKES_PACKAGE
         ]
         if len(takes_package) > 1:
-            message = "Multiple takes found for item: {0}".format(item[config.ID_FIELD])
+            message = "Multiple takes found for item: {0}".format(item[ID_FIELD])
             logger.error(message)
         return takes_package[0] if takes_package else None
 
diff --git a/apps/auth/__init__.py b/apps/auth/__init__.py
index 73d3ea4df2..91bad33a97 100644
--- a/apps/auth/__init__.py
+++ b/apps/auth/__init__.py
@@ -8,11 +8,11 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import g
 import superdesk
 from superdesk.errors import SuperdeskApiError
 from superdesk.services import BaseService
@@ -59,8 +59,8 @@ def get_user(required=False):
 
     :param boolean required: if True and there is no user it will raise an error
     """
-    user = flask.g.get("user", {})
-    if config.ID_FIELD not in user and required:
+    user = g.get("user", {})
+    if ID_FIELD not in user and required:
         raise SuperdeskApiError.notFoundError(_("Invalid user."))
     return user
 
@@ -71,12 +71,12 @@ def get_user_id(required=False):
     :param boolean required: if True and there is no user it will raise an error
     """
     user = get_user(required)
-    return user.get(config.ID_FIELD)
+    return user.get(ID_FIELD)
 
 
 def get_auth():
     """Get authenticated session data."""
-    auth = flask.g.get("auth", {})
+    auth = g.get("auth", {})
     return auth
 
 
diff --git a/apps/auth/auth.py b/apps/auth/auth.py
index aaf5bb2cf2..231cf8a33a 100644
--- a/apps/auth/auth.py
+++ b/apps/auth/auth.py
@@ -8,13 +8,15 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
 import superdesk
 
 from datetime import timedelta
-from flask import request, current_app as app, session
 from eve.auth import TokenAuth
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import LAST_UPDATED
+from superdesk.flask import g, session, request
 from superdesk.resource import Resource
 from superdesk.errors import SuperdeskApiError
 from superdesk import (
@@ -101,7 +103,7 @@ def check_permissions(self, resource, method, user):
             return True
 
         # Step 2: Get User's Privileges
-        get_resource_service("users").set_privileges(user, flask.g.role)
+        get_resource_service("users").set_privileges(user, g.role)
 
         try:
             resource_privileges = get_resource_privileges(resource).get(method, None)
@@ -149,25 +151,20 @@ def check_auth(self, token, allowed_roles, resource, method):
             if session.get("session_token") != token:
                 session["session_token"] = token
             user_id = str(auth_token["user"])
-            flask.g.user = user_service.find_one(req=None, _id=user_id)
-            flask.g.role = user_service.get_role(flask.g.user)
-            flask.g.auth = auth_token
-            flask.g.auth_value = auth_token["user"]
+            g.user = user_service.find_one(req=None, _id=user_id)
+            g.role = user_service.get_role(g.user)
+            g.auth = auth_token
+            g.auth_value = auth_token["user"]
             if method in ("POST", "PUT", "PATCH") or method == "GET" and not request.args.get("auto"):
                 now = utcnow()
                 auth_updated = False
-                if (
-                    auth_token[app.config["LAST_UPDATED"]] + timedelta(seconds=app.config["SESSION_UPDATE_SECONDS"])
-                    < now
-                ):
-                    auth_service.update_session({app.config["LAST_UPDATED"]: now})
+                if auth_token[LAST_UPDATED] + timedelta(seconds=get_app_config("SESSION_UPDATE_SECONDS")) < now:
+                    auth_service.update_session({LAST_UPDATED: now})
                     auth_updated = True
-                if not flask.g.user.get("last_activity_at") or auth_updated:
-                    user_service.system_update(
-                        flask.g.user["_id"], {"last_activity_at": now, "_updated": now}, flask.g.user
-                    )
+                if not g.user.get("last_activity_at") or auth_updated:
+                    user_service.system_update(g.user["_id"], {"last_activity_at": now, "_updated": now}, g.user)
 
-            return self.check_permissions(resource, method, flask.g.user)
+            return self.check_permissions(resource, method, g.user)
 
         # pop invalid session
         session.pop("session_token", None)
diff --git a/apps/auth/db/commands.py b/apps/auth/db/commands.py
index ee0a34ddb3..69634e9474 100644
--- a/apps/auth/db/commands.py
+++ b/apps/auth/db/commands.py
@@ -13,7 +13,7 @@
 import csv
 from pathlib import Path
 from base64 import b64encode
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 import superdesk
 from superdesk.utils import get_hash, is_hashed
 
@@ -56,10 +56,11 @@ def run(self, username, password, email, admin=False, support=False):
             "needs_activation": not admin,
         }
 
+        app = get_current_app().as_any()
         with app.test_request_context("/users", method="POST"):
             if userdata.get("password", None) and not is_hashed(userdata.get("password")):
                 userdata["password"] = get_hash(
-                    userdata.get("password"), app.config.get("BCRYPT_GENSALT_WORK_FACTOR", 12)
+                    userdata.get("password"), get_app_config("BCRYPT_GENSALT_WORK_FACTOR", 12)
                 )
 
             user = superdesk.get_resource_service("users").find_one(username=userdata.get("username"), req=None)
@@ -234,7 +235,7 @@ def run(self):
             pwd = user.get("password")
             if not is_hashed(pwd):
                 updates = {}
-                hashed = get_hash(user["password"], app.config.get("BCRYPT_GENSALT_WORK_FACTOR", 12))
+                hashed = get_hash(user["password"], get_app_config("BCRYPT_GENSALT_WORK_FACTOR", 12))
                 user_id = user.get("_id")
                 updates["password"] = hashed
                 superdesk.get_resource_service("users").patch(user_id, updates=updates)
diff --git a/apps/auth/db/db.py b/apps/auth/db/db.py
index 4ecf9dd2e2..ec32b783ab 100644
--- a/apps/auth/db/db.py
+++ b/apps/auth/db/db.py
@@ -9,11 +9,11 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import bcrypt
-from flask import g
+from superdesk.core import get_app_config
+from superdesk.flask import g
 from apps.auth.service import AuthService
 from superdesk import get_resource_service
 from apps.auth.errors import CredentialsAuthError, PasswordExpiredError, ExternalUserError
-from flask import current_app as app
 from superdesk.utc import utcnow
 import datetime
 from flask_babel import _
@@ -40,8 +40,8 @@ def authenticate(self, credentials, ignore_expire=False):
         if not bcrypt.checkpw(password, hashed):
             raise CredentialsAuthError(credentials)
 
-        if not ignore_expire and app.settings.get("PASSWORD_EXPIRY_DAYS", 0) > 0:
-            days = app.settings.get("PASSWORD_EXPIRY_DAYS")
+        if not ignore_expire and get_app_config("PASSWORD_EXPIRY_DAYS", 0) > 0:
+            days = get_app_config("PASSWORD_EXPIRY_DAYS")
             date = user.get("password_changed_on")
             if date is None or (date + datetime.timedelta(days=days)) < utcnow():
                 raise PasswordExpiredError()
diff --git a/apps/auth/db/reset_password.py b/apps/auth/db/reset_password.py
index 4c569ef28b..0d25817a16 100644
--- a/apps/auth/db/reset_password.py
+++ b/apps/auth/db/reset_password.py
@@ -11,7 +11,9 @@
 import logging
 import superdesk
 from datetime import timedelta
-from flask import current_app as app
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import DATE_CREATED, LAST_UPDATED
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.utc import utcnow
@@ -78,8 +80,8 @@ def create(self, docs, **kwargs):
 
     def store_reset_password_token(self, doc, email, days_alive, user_id):
         now = utcnow()
-        doc[app.config["DATE_CREATED"]] = now
-        doc[app.config["LAST_UPDATED"]] = now
+        doc[DATE_CREATED] = now
+        doc[LAST_UPDATED] = now
         doc["expire_time"] = now + timedelta(days=days_alive)
         doc["user"] = user_id
         doc["token"] = get_random_string()
@@ -87,7 +89,7 @@ def store_reset_password_token(self, doc, email, days_alive, user_id):
         return ids
 
     def initialize_reset_password(self, doc, email):
-        token_ttl = app.config["RESET_PASSWORD_TOKEN_TIME_TO_LIVE"]
+        token_ttl = get_app_config("RESET_PASSWORD_TOKEN_TIME_TO_LIVE")
 
         user = superdesk.get_resource_service("users").find_one(req=None, email=email)
         if not user:
diff --git a/apps/auth/errors.py b/apps/auth/errors.py
index 835e981520..e30bfe5562 100644
--- a/apps/auth/errors.py
+++ b/apps/auth/errors.py
@@ -10,7 +10,7 @@
 
 from superdesk.errors import SuperdeskApiError
 import logging
-from flask import json
+from superdesk.core import json
 
 
 logger = logging.getLogger(__name__)
diff --git a/apps/auth/oidc/auth.py b/apps/auth/oidc/auth.py
index 76a1f54856..c8292fb716 100644
--- a/apps/auth/oidc/auth.py
+++ b/apps/auth/oidc/auth.py
@@ -8,15 +8,15 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import g, request
 from flask_babel import _
 
 # from flask_oidc_ex import OpenIDConnect
 
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import g, request
 import superdesk
 from apps.auth.errors import CredentialsAuthError
 from apps.auth.service import AuthService
-from eve.utils import config
 from superdesk import get_resource_service
 from superdesk.resource import Resource
 from superdesk.utils import ignorecase_query
@@ -87,7 +87,7 @@ def authenticate(self, credentials):
             )
             users_service.post([sync_data])
         else:
-            users_service.patch(user[config.ID_FIELD], sync_data)
+            users_service.patch(user[ID_FIELD], sync_data)
 
         user.update(sync_data)
         return user
diff --git a/apps/auth/service.py b/apps/auth/service.py
index c6b839cea2..38777fe9e6 100644
--- a/apps/auth/service.py
+++ b/apps/auth/service.py
@@ -8,11 +8,11 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
-from flask import request, current_app as app
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request, session as flask_session, g
 from superdesk import utils as utils, get_resource_service, get_resource_privileges
 from superdesk.services import BaseService
 from superdesk.errors import SuperdeskApiError
@@ -34,8 +34,8 @@ def authenticate(self, document):
 
     def on_create(self, docs):
         # Clear the session data when creating a new session
-        if flask.session:
-            flask.session.pop("session_token", None)
+        if flask_session:
+            flask_session.pop("session_token", None)
         for doc in docs:
             user = self.authenticate(doc)
             if not user:
@@ -73,7 +73,7 @@ def update_session(self, updates=None):
         """
         if not updates:
             updates = {}
-        self.system_update(flask.g.auth["_id"], updates, flask.g.auth)
+        self.system_update(g.auth["_id"], updates, g.auth)
 
     def on_fetched_item(self, doc: dict) -> None:
         if str(doc["user"]) != str(auth.get_user_id()):
@@ -86,10 +86,11 @@ def on_deleted(self, doc):
         :return:
         """
         # Clear the session data when session has ended
-        flask.session.pop("session_token", None)
+        flask_session.pop("session_token", None)
 
         # notify that the session has ended
         sessions = self.get(req=None, lookup={"user": doc["user"]})
+        app = get_current_app().as_any()
         app.on_session_end(doc["user"], doc["_id"], is_last_session=not sessions.count())
         self.set_user_last_activity(doc["user"], done=True)
 
@@ -135,13 +136,13 @@ def delete(self, lookup):
         # Delete all the sessions except current session
         current_session_id = auth.get_auth().get("_id")
         for session in sessions:
-            if str(session[config.ID_FIELD]) != str(current_session_id):
-                get_resource_service("auth").delete_action({config.ID_FIELD: str(session[config.ID_FIELD])})
+            if str(session[ID_FIELD]) != str(current_session_id):
+                get_resource_service("auth").delete_action({ID_FIELD: str(session[ID_FIELD])})
 
         # Check if any orphan session_preferences exist for the user
         if user.get("session_preferences"):
             # Delete the orphan sessions
-            users_service.patch(user[config.ID_FIELD], {"session_preferences": {}})
+            users_service.patch(user[ID_FIELD], {"session_preferences": {}})
 
         return [{"complete": True}]
 
diff --git a/apps/auth/session_purge.py b/apps/auth/session_purge.py
index d53c9f3c9f..592bcc2ad6 100644
--- a/apps/auth/session_purge.py
+++ b/apps/auth/session_purge.py
@@ -13,7 +13,7 @@
 from superdesk.utc import utcnow
 from eve.utils import date_to_str
 from superdesk import get_resource_service
-from flask import current_app as app
+from superdesk.core import get_app_config
 import logging
 
 logger = logging.getLogger(__name__)
@@ -36,7 +36,7 @@ def run(self):
 
     def remove_expired_sessions(self):
         auth_service = get_resource_service("auth")
-        expiry_minutes = app.settings["SESSION_EXPIRY_MINUTES"]
+        expiry_minutes = get_app_config("SESSION_EXPIRY_MINUTES")
         expiration_time = utcnow() - timedelta(minutes=expiry_minutes)
         logger.info("Deleting session not updated since {}".format(expiration_time))
         query = {"_updated": {"$lte": date_to_str(expiration_time)}}
diff --git a/apps/auth/xmpp/auth.py b/apps/auth/xmpp/auth.py
index 8c8d92ac51..6a07f86f8a 100644
--- a/apps/auth/xmpp/auth.py
+++ b/apps/auth/xmpp/auth.py
@@ -8,13 +8,13 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.core import get_app_config
 from apps.auth.service import AuthService
 from superdesk import get_resource_service
 from superdesk.resource import Resource
 from apps.auth.errors import CredentialsAuthError
 from superdesk.errors import SuperdeskApiError
 from superdesk import utils
-from flask import current_app as app
 import requests
 import superdesk
 
@@ -36,10 +36,10 @@ class XMPPAuthResource(Resource):
 
 class XMPPAuthService(AuthService):
     def authenticate(self, credentials):
-        auth_url = app.config["XMPP_AUTH_URL"]
+        auth_url = get_app_config("XMPP_AUTH_URL")
         if not auth_url:
             raise SuperdeskApiError.notConfiguredError()
-        domain = app.config["XMPP_AUTH_DOMAIN"]
+        domain = get_app_config("XMPP_AUTH_DOMAIN")
         jid = credentials.get("jid")
         if not jid:
             raise CredentialsAuthError(credentials)
@@ -49,7 +49,7 @@ def authenticate(self, credentials):
 
         try:
             r = requests.post(
-                app.config["XMPP_AUTH_URL"],
+                get_app_config("XMPP_AUTH_URL"),
                 data={"jid": jid, "domain": domain, "transaction_id": credentials.get("transactionId")},
             )
         except Exception:
diff --git a/apps/client_config.py b/apps/client_config.py
index 8bf1d53555..ce567413b7 100644
--- a/apps/client_config.py
+++ b/apps/client_config.py
@@ -1,6 +1,6 @@
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.utils import ListCursor
 from superdesk.default_schema import DEFAULT_SCHEMA, DEFAULT_EDITOR
 
@@ -16,7 +16,7 @@ def get(self, req, lookup):
         return ListCursor()
 
     def on_fetched(self, docs):
-        docs["config"] = getattr(app, "client_config", {})
+        docs["config"] = getattr(get_current_app(), "client_config", {})
 
 
 def init_app(app) -> None:
diff --git a/apps/comments/comments.py b/apps/comments/comments.py
index 4360821073..9e7faae972 100644
--- a/apps/comments/comments.py
+++ b/apps/comments/comments.py
@@ -8,7 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import g, current_app as app
+from superdesk.core import get_current_app, get_app_config
 from superdesk.resource import Resource
 from superdesk.notification import push_notification
 from superdesk.services import BaseService
@@ -74,9 +74,10 @@ class CommentsService(BaseService):
     notifications = True
 
     def on_create(self, docs):
+        app = get_current_app()
         for doc in docs:
             sent_user = doc.get("user", None)
-            user = g.user
+            user = app.get_current_user_dict()
             if sent_user and sent_user != str(user["_id"]):
                 message = _("Commenting on behalf of someone else is prohibited.")
                 raise SuperdeskApiError.forbiddenError(message)
@@ -100,7 +101,7 @@ def on_created(self, docs):
             decode_keys(doc, "mentioned_desks")
 
         if self.notifications:
-            notify_mentioned_users(docs, app.config.get("CLIENT_URL", "").rstrip("/"))
+            notify_mentioned_users(docs, get_app_config("CLIENT_URL", "").rstrip("/"))
             notify_mentioned_desks(docs)
 
     def on_updated(self, updates, original):
diff --git a/apps/comments/inline_comments.py b/apps/comments/inline_comments.py
index 5513a92ebe..68a0eb84c4 100644
--- a/apps/comments/inline_comments.py
+++ b/apps/comments/inline_comments.py
@@ -1,7 +1,7 @@
 import re
 import bson
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 
 from .user_mentions import notify_mentioned_users
 
@@ -28,7 +28,7 @@ def handle_inline_mentions(sender, updates, original):
                             "mentioned_users": {user: bson.ObjectId(user) for user in users},
                         }
                     ],
-                    app.config.get("CLIENT_URL", "").rstrip("/"),
+                    get_app_config("CLIENT_URL", "").rstrip("/"),
                     item=updated,
                 )
             comment["notified"] = True
diff --git a/apps/comments/user_mentions.py b/apps/comments/user_mentions.py
index 88dbf45aa1..a1672aeaac 100644
--- a/apps/comments/user_mentions.py
+++ b/apps/comments/user_mentions.py
@@ -8,9 +8,9 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.core import get_current_app
 from superdesk.activity import add_activity
 from eve.utils import ParsedRequest
-from flask import g
 from superdesk.emails import send_user_mentioned_email
 import re
 import superdesk
@@ -41,7 +41,8 @@ def send_email_to_mentioned_users(doc, mentioned_users, origin):
             user_doc = superdesk.get_resource_service("users").find_one(req=None, _id=user)
             recipients.append(user_doc["email"])
     if recipients:
-        username = g.user.get("display_name") or g.user.get("username")
+        user = get_current_app().get_current_user_dict() or {}
+        username = user.get("display_name") or user.get("username")
         url = "{}/#/workspace?item={}&action=edit&comments={}".format(origin, doc["item"], doc["_id"])
         send_user_mentioned_email(recipients, username, doc, url)
 
diff --git a/apps/common/models/io/base_proxy.py b/apps/common/models/io/base_proxy.py
index 48a40b4ffa..e3d9a3233d 100644
--- a/apps/common/models/io/base_proxy.py
+++ b/apps/common/models/io/base_proxy.py
@@ -9,8 +9,9 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from .data_layer import DataLayer
-from eve.utils import ParsedRequest, config, document_etag
-from eve import ID_FIELD
+from eve.utils import ParsedRequest, document_etag
+
+from superdesk.resource_fields import ID_FIELD, ETAG
 
 
 class BaseProxy(DataLayer):
@@ -23,7 +24,7 @@ def __init__(self, data_layer):
         self.data_layer = data_layer
 
     def etag(self, doc):
-        return doc.get(config.ETAG, document_etag(doc))
+        return doc.get(ETAG, document_etag(doc))
 
     def find_one(self, resource, filter, projection):
         req = ParsedRequest()
diff --git a/apps/contacts/service.py b/apps/contacts/service.py
index 5ce9a76428..308d5f545c 100644
--- a/apps/contacts/service.py
+++ b/apps/contacts/service.py
@@ -10,10 +10,10 @@
 
 
 from superdesk import get_resource_service
+from superdesk.resource_fields import ID_FIELD
 from superdesk.services import Service
 from superdesk.notification import push_notification
 from superdesk.errors import SuperdeskApiError
-from eve.utils import config
 from eve.utils import ParsedRequest
 from flask_babel import _
 from copy import deepcopy
@@ -37,7 +37,7 @@ def on_created(self, docs):
         :param docs:
         :return:
         """
-        push_notification("contacts:create", _id=[doc.get(config.ID_FIELD) for doc in docs])
+        push_notification("contacts:create", _id=[doc.get(ID_FIELD) for doc in docs])
 
     def on_update(self, updates, original):
         item = deepcopy(original)
@@ -51,7 +51,7 @@ def on_updated(self, updates, original):
         :param original:
         :return:
         """
-        push_notification("contacts:update", _id=[original.get(config.ID_FIELD)])
+        push_notification("contacts:update", _id=[original.get(ID_FIELD)])
 
     def on_deleted(self, doc):
         """
@@ -59,7 +59,7 @@ def on_deleted(self, doc):
         :param doc:
         :return:
         """
-        push_notification("contacts:deleted", _id=[doc.get(config.ID_FIELD)])
+        push_notification("contacts:deleted", _id=[doc.get(ID_FIELD)])
 
     def _validate_assignable(self, contact):
         """Validates a required email address if the contact_type has assignable flag turned on"""
diff --git a/apps/content/__init__.py b/apps/content/__init__.py
index 6a11cd1a77..8e47e039f4 100644
--- a/apps/content/__init__.py
+++ b/apps/content/__init__.py
@@ -1,7 +1,7 @@
 """Content related helpers and utils.
 """
 
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD, VERSION
 from superdesk.notification import push_notification
 from apps.auth import get_user
 
@@ -26,7 +26,7 @@ def push_content_notification(items, event="content:update"):
         if task.get("stage"):
             stages[str(task.get("stage", ""))] = 1
     user = get_user()
-    push_notification(event, user=str(user.get(config.ID_FIELD, "")), items=ids, desks=desks, stages=stages)
+    push_notification(event, user=str(user.get(ID_FIELD, "")), items=ids, desks=desks, stages=stages)
 
 
 def push_item_move_notification(original, doc, event="item:move"):
@@ -41,9 +41,9 @@ def push_item_move_notification(original, doc, event="item:move"):
     user = get_user()
     push_notification(
         event,
-        user=str(user.get(config.ID_FIELD, "")),
-        item=str(original.get(config.ID_FIELD)),
-        item_version=str(original.get(config.VERSION)),
+        user=str(user.get(ID_FIELD, "")),
+        item=str(original.get(ID_FIELD)),
+        item_version=str(original.get(VERSION)),
         from_desk=str(from_task.get("desk")),
         from_stage=str(from_task.get("stage")),
         to_desk=str(to_task.get("desk")),
diff --git a/apps/content_filters/content_filter/content_filter_service.py b/apps/content_filters/content_filter/content_filter_service.py
index cb70958d6a..04bdacde51 100644
--- a/apps/content_filters/content_filter/content_filter_service.py
+++ b/apps/content_filters/content_filter/content_filter_service.py
@@ -8,9 +8,9 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
 
+from superdesk.flask import g
 from superdesk.services import CacheableService
 from eve.utils import ParsedRequest
 from superdesk.errors import SuperdeskApiError
@@ -197,9 +197,9 @@ def does_match(self, content_filter, article, filters=None):
         if not content_filter:
             return True  # a non-existing filter matches every thing
         cache_id = _cache_id("filter-match", content_filter.get("_id") or content_filter.get("name"), article)
-        if not hasattr(flask.g, cache_id):
-            setattr(flask.g, cache_id, self._does_match(content_filter, article, filters))
-        return getattr(flask.g, cache_id)
+        if not hasattr(g, cache_id):
+            setattr(g, cache_id, self._does_match(content_filter, article, filters))
+        return getattr(g, cache_id)
 
     def _does_match(self, content_filter, article, filters):
         for index, expression in enumerate(content_filter.get("content_filter", [])):
@@ -220,7 +220,7 @@ def _does_filter_condition_match(self, content_filter, article, filters, express
         filter_condition_service = get_resource_service("filter_conditions")
         for f in expression["expression"]["fc"]:
             cache_id = _cache_id("filter-condition-match", f, article)
-            if not hasattr(flask.g, cache_id):
+            if not hasattr(g, cache_id):
                 fc = (
                     filters.get("filter_conditions", {}).get(f, {}).get("fc")
                     if filters
@@ -230,20 +230,20 @@ def _does_filter_condition_match(self, content_filter, article, filters, express
                     logger.error("Missing filter condition %s in content filter %s", f, content_filter.get("name"))
                     return False
                 filter_condition = FilterCondition.parse(fc)
-                setattr(flask.g, cache_id, filter_condition.does_match(article))
-            if not getattr(flask.g, cache_id):
+                setattr(g, cache_id, filter_condition.does_match(article))
+            if not getattr(g, cache_id):
                 return False
         return True
 
     def _does_content_filter_match(self, content_filter, article, filters, expression) -> bool:
         for f in expression["expression"]["pf"]:
             cache_id = _cache_id("content-filter-match", f, article)
-            if not hasattr(flask.g, cache_id):
+            if not hasattr(g, cache_id):
                 current_filter = (
                     filters.get("content_filters", {}).get(f, {}).get("cf") if filters else self.get_cached_by_id(f)
                 )
-                setattr(flask.g, cache_id, self.does_match(current_filter, article, filters=filters))
-            if not getattr(flask.g, cache_id):
+                setattr(g, cache_id, self.does_match(current_filter, article, filters=filters))
+            if not getattr(g, cache_id):
                 return False
         return True
 
diff --git a/apps/content_filters/content_filter/content_filter_test.py b/apps/content_filters/content_filter/content_filter_test.py
index 442f3413a4..a77d22c5c8 100644
--- a/apps/content_filters/content_filter/content_filter_test.py
+++ b/apps/content_filters/content_filter/content_filter_test.py
@@ -9,10 +9,10 @@
 # at https://www.sourcefabric.org/superdesk/license
 import json
 
-from flask import current_app as app
 from eve.utils import ParsedRequest
 from flask_babel import _
 
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
 from superdesk.resource import Resource
 from superdesk.services import BaseService
@@ -51,7 +51,7 @@ def create(self, docs, **kwargs):
             if "article_id" in doc:
                 article_id = doc.get("article_id")
                 article = get_resource_service("archive").find_one(req=None, _id=article_id)
-                if not article and "planning" in app.config.get("INSTALLED_APPS", []):
+                if not article and "planning" in get_app_config("INSTALLED_APPS", []):
                     article = get_resource_service("planning").find_one(None, _id=article_id)
                 if not article:
                     article = get_resource_service("ingest").find_one(req=None, _id=article_id)
diff --git a/apps/content_filters/filter_condition/filter_condition_field.py b/apps/content_filters/filter_condition/filter_condition_field.py
index 26a9a64499..ae35c12218 100644
--- a/apps/content_filters/filter_condition/filter_condition_field.py
+++ b/apps/content_filters/filter_condition/filter_condition_field.py
@@ -11,7 +11,8 @@
 from typing import Dict
 from enum import Enum
 from lxml import etree
-from flask import g
+
+from superdesk.flask import g
 from superdesk.text_utils import get_text
 from superdesk.utc import utcnow
 from superdesk import get_resource_service
diff --git a/apps/content_filters/filter_condition/filter_condition_parameters.py b/apps/content_filters/filter_condition/filter_condition_parameters.py
index 5f74ee6cb7..a91198cc02 100644
--- a/apps/content_filters/filter_condition/filter_condition_parameters.py
+++ b/apps/content_filters/filter_condition/filter_condition_parameters.py
@@ -12,12 +12,14 @@
 import copy
 import logging
 from flask_babel import _
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.utils import ListCursor
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
 from superdesk.io.subjectcodes import get_subjectcodeitems
-from flask import current_app as app
 from eve.utils import ParsedRequest
 
 
@@ -164,7 +166,7 @@ def get(self, req, lookup):
             },
         ]
 
-        if "planning" in app.config.get("INSTALLED_APPS", []):
+        if "planning" in get_app_config("INSTALLED_APPS", []):
             fields.append(
                 {
                     "field": "agendas",
@@ -182,11 +184,11 @@ def get_from_mongo(self, req, lookup, projection=None):
         return self.get(req, lookup)
 
     def _get_vocabulary_fields(self, values):
-        excluded_vocabularies = copy.copy(app.config.get("EXCLUDED_VOCABULARY_FIELDS", []))
+        excluded_vocabularies = copy.copy(get_app_config("EXCLUDED_VOCABULARY_FIELDS", []))
         excluded_vocabularies.extend(values)
         lookup = {"_id": {"$nin": excluded_vocabularies}, "type": "manageable"}
         for vocabulary in get_resource_service("vocabularies").get_from_mongo(req=None, lookup=lookup):
-            field = {"field": vocabulary[config.ID_FIELD], "label": vocabulary["display_name"]}
+            field = {"field": vocabulary[ID_FIELD], "label": vocabulary["display_name"]}
 
             if vocabulary.get("field_type") and vocabulary.get("field_type", "") != "text":
                 continue
diff --git a/apps/content_filters/filter_condition/filter_condition_resource.py b/apps/content_filters/filter_condition/filter_condition_resource.py
index 2df07e1477..8d266a41a3 100644
--- a/apps/content_filters/filter_condition/filter_condition_resource.py
+++ b/apps/content_filters/filter_condition/filter_condition_resource.py
@@ -9,9 +9,11 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import copy
-from superdesk import get_resource_service, config
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk import get_resource_service
 from superdesk.resource import Resource
-from flask import current_app as app
 
 
 default_allowed_filters = [
@@ -92,7 +94,7 @@ def pre_request_patch(self, request, lookup):
 
     def _init_allowed_filters(self):
         self.schema["field"]["allowed"] = copy.copy(default_allowed_filters)
-        self.schema["field"]["allowed"].extend(app.config.get("EXCLUDED_VOCABULARY_FIELDS", []))
+        self.schema["field"]["allowed"].extend(get_app_config("EXCLUDED_VOCABULARY_FIELDS", []))
         lookup = {"_id": {"$nin": self.schema["field"]["allowed"]}, "type": "manageable"}
         for vocabulary in get_resource_service("vocabularies").get_from_mongo(req=None, lookup=lookup):
-            self.schema["field"]["allowed"].append(vocabulary[config.ID_FIELD])
+            self.schema["field"]["allowed"].append(vocabulary[ID_FIELD])
diff --git a/apps/content_types/content_types.py b/apps/content_types/content_types.py
index b995c3ab68..d286e4e26f 100644
--- a/apps/content_types/content_types.py
+++ b/apps/content_types/content_types.py
@@ -3,7 +3,8 @@
 import superdesk
 
 from copy import deepcopy
-from eve.utils import config
+
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from superdesk.default_schema import DEFAULT_SCHEMA, DEFAULT_EDITOR, DEFAULT_SCHEMA_MAP
@@ -147,7 +148,7 @@ def on_update(self, updates, original):
     def on_delete_res_vocabularies(self, doc):
         req = ParsedRequest()
         req.projection = '{"label": 1}'
-        res = self.get(req=req, lookup={"schema." + doc[config.ID_FIELD]: {"$type": 3}})
+        res = self.get(req=req, lookup={"schema." + doc[ID_FIELD]: {"$type": 3}})
         if res.count():
             payload = {"content_types": [doc_hateoas for doc_hateoas in map(self._build_hateoas, res)]}
             message = _("Vocabulary {vocabulary} is used in {count} content type(s)").format(
@@ -615,11 +616,11 @@ def remove_profile_from_templates(item):
     :param item: deleted content profile
     """
     templates = list(
-        superdesk.get_resource_service("content_templates").get_templates_by_profile_id(item.get(config.ID_FIELD))
+        superdesk.get_resource_service("content_templates").get_templates_by_profile_id(item.get(ID_FIELD))
     )
     for template in templates:
         template.get("data", {}).pop("profile", None)
-        superdesk.get_resource_service("content_templates").patch(template[config.ID_FIELD], template)
+        superdesk.get_resource_service("content_templates").patch(template[ID_FIELD], template)
 
 
 def get_profile(_id):
diff --git a/apps/desks.py b/apps/desks.py
index fe7b7a133a..15a3b8dfc6 100644
--- a/apps/desks.py
+++ b/apps/desks.py
@@ -13,12 +13,13 @@
 from typing import Dict, Any, List
 
 import superdesk
-from flask import current_app as app, request
 
+from superdesk.core import get_app_config, get_current_app
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from superdesk.resource import Resource
-from superdesk import config
 from superdesk.utils import SuperdeskBaseEnum
 from superdesk.timer import timer
 from bson.objectid import ObjectId
@@ -141,7 +142,7 @@ def create(self, docs, **kwargs):
             self._ensure_unique_members(desk)
 
             if desk.get("content_expiry") == 0:
-                desk["content_expiry"] = app.settings["CONTENT_EXPIRY_MINUTES"]
+                desk["content_expiry"] = get_app_config("CONTENT_EXPIRY_MINUTES")
 
             if "working_stage" not in desk:
                 stages_to_be_linked_with_desk.append("working_stage")
@@ -156,20 +157,20 @@ def create(self, docs, **kwargs):
             desk.setdefault("desk_type", DeskTypes.authoring.value)
             super().create([desk], **kwargs)
             for stage_type in stages_to_be_linked_with_desk:
-                stage_service.patch(desk[stage_type], {"desk": desk[config.ID_FIELD]})
+                stage_service.patch(desk[stage_type], {"desk": desk[ID_FIELD]})
 
             # make the desk available in default content template
             content_templates = get_resource_service("content_templates")
             template = content_templates.find_one(req=None, _id=desk.get("default_content_template"))
             if template:
-                template.setdefault("template_desks", []).append(desk.get(config.ID_FIELD))
+                template.setdefault("template_desks", []).append(desk.get(ID_FIELD))
                 content_templates.patch(desk.get("default_content_template"), template)
 
-        return [doc[config.ID_FIELD] for doc in docs]
+        return [doc[ID_FIELD] for doc in docs]
 
     def on_created(self, docs):
         for doc in docs:
-            push_notification(self.notification_key, created=1, desk_id=str(doc.get(config.ID_FIELD)))
+            push_notification(self.notification_key, created=1, desk_id=str(doc.get(ID_FIELD)))
             get_resource_service("users").update_stage_visibility_for_users()
 
     def on_update(self, updates, original):
@@ -181,8 +182,8 @@ def on_update(self, updates, original):
         if updates.get("desk_type") and updates.get("desk_type") != original.get("desk_type", ""):
             archive_versions_query = {
                 "$or": [
-                    {"task.last_authoring_desk": str(original[config.ID_FIELD])},
-                    {"task.last_production_desk": str(original[config.ID_FIELD])},
+                    {"task.last_authoring_desk": str(original[ID_FIELD])},
+                    {"task.last_production_desk": str(original[ID_FIELD])},
                 ]
             }
 
@@ -210,7 +211,7 @@ def on_delete(self, desk):
             3. The desk is associated with routing rule(s)
         """
 
-        as_default_desk = superdesk.get_resource_service("users").get(req=None, lookup={"desk": desk[config.ID_FIELD]})
+        as_default_desk = superdesk.get_resource_service("users").get(req=None, lookup={"desk": desk[ID_FIELD]})
         if as_default_desk and as_default_desk.count():
             raise SuperdeskApiError.preconditionFailedError(
                 message=_("Cannot delete desk as it is assigned as default desk to user(s).")
@@ -218,8 +219,8 @@ def on_delete(self, desk):
 
         routing_rules_query = {
             "$or": [
-                {"rules.actions.fetch.desk": desk[config.ID_FIELD]},
-                {"rules.actions.publish.desk": desk[config.ID_FIELD]},
+                {"rules.actions.fetch.desk": desk[ID_FIELD]},
+                {"rules.actions.publish.desk": desk[ID_FIELD]},
             ]
         }
         routing_rules = superdesk.get_resource_service("routing_schemes").get(req=None, lookup=routing_rules_query)
@@ -230,9 +231,9 @@ def on_delete(self, desk):
 
         archive_versions_query = {
             "$or": [
-                {"task.desk": str(desk[config.ID_FIELD])},
-                {"task.last_authoring_desk": str(desk[config.ID_FIELD])},
-                {"task.last_production_desk": str(desk[config.ID_FIELD])},
+                {"task.desk": str(desk[ID_FIELD])},
+                {"task.last_authoring_desk": str(desk[ID_FIELD])},
+                {"task.last_production_desk": str(desk[ID_FIELD])},
             ]
         }
 
@@ -257,14 +258,12 @@ def delete(self, lookup):
         Overriding to delete stages before deleting a desk
         """
 
-        superdesk.get_resource_service("stages").delete(lookup={"desk": lookup.get(config.ID_FIELD)})
+        superdesk.get_resource_service("stages").delete(lookup={"desk": lookup.get(ID_FIELD)})
         super().delete(lookup)
 
     def on_deleted(self, doc):
         desk_user_ids = [str(member["user"]) for member in doc.get("members", [])]
-        push_notification(
-            self.notification_key, deleted=1, user_ids=desk_user_ids, desk_id=str(doc.get(config.ID_FIELD))
-        )
+        push_notification(self.notification_key, deleted=1, user_ids=desk_user_ids, desk_id=str(doc.get(ID_FIELD)))
 
     def __compare_members(self, original, updates):
         original_members = set([member["user"] for member in original])
@@ -274,7 +273,7 @@ def __compare_members(self, original, updates):
         return added, removed
 
     def __send_notification(self, updates, desk):
-        desk_id = desk[config.ID_FIELD]
+        desk_id = desk[ID_FIELD]
         users_service = superdesk.get_resource_service("users")
 
         if "members" in updates:
@@ -303,7 +302,7 @@ def __send_notification(self, updates, desk):
                 users_service.update_stage_visibility_for_user(user)
 
         else:
-            push_notification(self.notification_key, updated=1, desk_id=str(desk.get(config.ID_FIELD)))
+            push_notification(self.notification_key, updated=1, desk_id=str(desk.get(ID_FIELD)))
 
     def get_desk_name(self, desk_id):
         """Return the item desk.
@@ -320,7 +319,7 @@ def get_desk_name(self, desk_id):
 
     def on_fetched(self, res):
         members_set = set()
-        db_users = app.data.mongo.pymongo("users").db["users"]
+        db_users = get_current_app().data.mongo.pymongo("users").db["users"]
 
         # find display_name from the users document for each member in desks document
         for desk in res["_items"]:
@@ -627,7 +626,7 @@ def _do_request(self, doc):
             agg_query["aggs"]["overview"]["aggs"] = {"top_docs": {"top_hits": {"size": 100}}}
 
         with timer(timer_label):
-            response = app.data.elastic.search(agg_query, collection, params={"size": 0})
+            response = get_current_app().data.elastic.search(agg_query, collection, params={"size": 0})
 
         doc["_items"] = [
             {
@@ -672,6 +671,7 @@ def _users_aggregation(self, desk_id: str) -> List[Dict]:
         for d in found:
             members.update({m["user"] for m in d.get("members", [])})
 
+        app = get_current_app()
         users_aggregation = app.data.pymongo().db.users.aggregate(
             [
                 {"$match": {"_id": {"$in": list(members)}}},
@@ -766,9 +766,9 @@ def remove_profile_from_desks(item):
     req = ParsedRequest()
     desks = list(superdesk.get_resource_service("desks").get(req=req, lookup={}))
     for desk in desks:
-        if desk.get("default_content_profile") == str(item.get(config.ID_FIELD)):
+        if desk.get("default_content_profile") == str(item.get(ID_FIELD)):
             desk["default_content_profile"] = None
-            superdesk.get_resource_service("desks").patch(desk[config.ID_FIELD], desk)
+            superdesk.get_resource_service("desks").patch(desk[ID_FIELD], desk)
 
 
 def format_buckets(aggs):
diff --git a/apps/dictionaries/service.py b/apps/dictionaries/service.py
index 3ea9621888..4c6b14c3a9 100644
--- a/apps/dictionaries/service.py
+++ b/apps/dictionaries/service.py
@@ -13,10 +13,11 @@
 import logging
 import collections
 
-from flask import json, current_app as app, request
 from simplejson.errors import JSONDecodeError
-from eve.utils import config
 
+from superdesk.core import json, get_current_app
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from superdesk.errors import SuperdeskApiError
 from superdesk.services import BaseService
 from superdesk.notification import push_notification
@@ -88,7 +89,7 @@ def fetch_dict(doc):
     :param doc
     """
     if doc and doc.get(FILE_ID):
-        content_file = app.storage.get(doc[FILE_ID])
+        content_file = get_current_app().storage.get(doc[FILE_ID])
         content = json.loads(content_file.read())
         return content
 
@@ -107,6 +108,7 @@ def store_dict(updates, original):
     :param original
     """
     content = updates.pop("content", {})
+    app = get_current_app()
     if content:
         content_json = json.dumps(content)
         if is_big(content_json):
@@ -323,7 +325,7 @@ def on_fetched_item(self, doc):
         self.__enhance_items([doc])
 
     def on_fetched(self, docs):
-        self.__enhance_items(docs[config.ITEMS])
+        self.__enhance_items(docs[ID_FIELD])
 
     def __enhance_items(self, docs):
         for doc in docs:
diff --git a/apps/duplication/archive_copy.py b/apps/duplication/archive_copy.py
index 0858044562..cdf2d407ee 100644
--- a/apps/duplication/archive_copy.py
+++ b/apps/duplication/archive_copy.py
@@ -10,10 +10,11 @@
 
 import superdesk
 
-from flask import request, current_app as app
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError, InvalidStateTransitionError
 from superdesk.metadata.item import ITEM_STATE, CONTENT_STATE
@@ -55,7 +56,7 @@ def create(self, docs, **kwargs):
                 )
 
             current_desk_of_item = archived_doc.get("task", {}).get("desk")
-            if current_desk_of_item and not app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]:
+            if current_desk_of_item and not get_app_config("WORKFLOW_ALLOW_COPY_TO_PERSONAL"):
                 raise SuperdeskApiError.preconditionFailedError(message=_("Copy is not allowed on items in a desk."))
             elif current_desk_of_item:
                 archived_doc["task"] = {}
@@ -69,7 +70,7 @@ def create(self, docs, **kwargs):
 
         if kwargs.get("notify", True):
             user = get_user()
-            push_notification("item:copy", copied=1, user=str(user.get(config.ID_FIELD, "")))
+            push_notification("item:copy", copied=1, user=str(user.get(ID_FIELD, "")))
 
         return guid_of_copied_items
 
diff --git a/apps/duplication/archive_duplication.py b/apps/duplication/archive_duplication.py
index e301caf902..21c5062c1b 100644
--- a/apps/duplication/archive_duplication.py
+++ b/apps/duplication/archive_duplication.py
@@ -9,9 +9,11 @@
 # at https://www.sourcefabric.org/superdesk/license
 import json
 
-from eve.utils import config, ParsedRequest
-from flask import request, current_app as app
+from eve.utils import ParsedRequest
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 import superdesk
 from apps.archive.archive import SOURCE as ARCHIVE, remove_is_queued
 from apps.auth import get_user, get_user_id
@@ -50,7 +52,7 @@ class DuplicateResource(Resource):
 class DuplicateService(BaseService):
     def on_create(self, docs):
         for doc in docs:
-            if not doc.get("desk") and not app.config["WORKFLOW_ALLOW_COPY_TO_PERSONAL"]:
+            if not doc.get("desk") and not get_app_config("WORKFLOW_ALLOW_COPY_TO_PERSONAL"):
                 raise SuperdeskApiError.forbiddenError(message=_("Duplicate to Personal space is not allowed."))
 
     def create(self, docs, **kwargs):
@@ -136,7 +138,7 @@ def _validate(self, doc_in_archive, doc, guid_to_duplicate):
         lock_user = doc_in_archive.get("lock_user", None)
         force_unlock = doc_in_archive.get("force_unlock", False)
         user = get_user()
-        str_user_id = str(user.get(config.ID_FIELD)) if user else None
+        str_user_id = str(user.get(ID_FIELD)) if user else None
         if lock_user and str(lock_user) != str_user_id and not force_unlock:
             raise SuperdeskApiError.forbiddenError(_("The item was locked by another user"))
 
diff --git a/apps/duplication/archive_fetch.py b/apps/duplication/archive_fetch.py
index fef3415476..8db40978e3 100644
--- a/apps/duplication/archive_fetch.py
+++ b/apps/duplication/archive_fetch.py
@@ -11,10 +11,10 @@
 import superdesk
 
 from copy import deepcopy
-from flask import request
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from apps.archive.usage import update_refs
 from apps.archive.archive import SOURCE as ARCHIVE
 from apps.content import push_item_move_notification
@@ -62,7 +62,7 @@ def fetch(self, docs, id=None, **kwargs):
         id_of_fetched_items = []
 
         for doc in docs:
-            id_of_item_to_be_fetched = doc.get(config.ID_FIELD) if id is None else id
+            id_of_item_to_be_fetched = doc.get(ID_FIELD) if id is None else id
 
             desk_id = doc.get("desk")
             stage_id = doc.get("stage")
@@ -99,10 +99,10 @@ def fetch(self, docs, id=None, **kwargs):
                 target=ingest_doc.get("target", doc.get("target")),
             )
 
-            id_of_fetched_items.append(dest_doc[config.ID_FIELD])
+            id_of_fetched_items.append(dest_doc[ID_FIELD])
             ingest_service.patch(id_of_item_to_be_fetched, {"archived": dest_doc["versioncreated"]})
 
-            dest_doc[FAMILY_ID] = ingest_doc[config.ID_FIELD]
+            dest_doc[FAMILY_ID] = ingest_doc[ID_FIELD]
             dest_doc[INGEST_ID] = self.__strip_version_from_guid(ingest_doc[GUID_FIELD], ingest_doc.get("version"))
             dest_doc[INGEST_VERSION] = ingest_doc.get("version")
 
@@ -160,7 +160,7 @@ def __fetch_items_in_package(self, dest_doc, desk, stage, state):
             ref["location"] = ARCHIVE
 
         refs = [
-            {config.ID_FIELD: ref.get(RESIDREF), "desk": desk, "stage": stage, ITEM_STATE: state}
+            {ID_FIELD: ref.get(RESIDREF), "desk": desk, "stage": stage, ITEM_STATE: state}
             for group in dest_doc.get(GROUPS, [])
             for ref in group.get(REFS, [])
             if ref.get(RESIDREF)
diff --git a/apps/duplication/archive_move.py b/apps/duplication/archive_move.py
index b84e9a0c28..d2fedb080b 100644
--- a/apps/duplication/archive_move.py
+++ b/apps/duplication/archive_move.py
@@ -11,11 +11,12 @@
 import superdesk
 import superdesk.signals as signals
 
-from eve.utils import config
 from eve.versioning import resolve_document_version
-from flask import request, current_app as app
 from copy import deepcopy
 
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD, ETAG
+from superdesk.flask import request
 from apps.tasks import send_to, apply_onstage_rule
 from apps.desks import DeskTypes
 from superdesk import get_resource_service
@@ -91,7 +92,7 @@ def create(self, docs, **kwargs):
             # doc represents the target desk and stage
             doc = docs[0]
             moved_item = self.move_content(guid_of_item_to_be_moved, doc)
-            guid_of_moved_items.append(moved_item.get(config.ID_FIELD))
+            guid_of_moved_items.append(moved_item.get(ID_FIELD))
 
             if moved_item.get("type", None) == "composite" and doc.get("allPackageItems", False):
                 try:
@@ -105,7 +106,7 @@ def create(self, docs, **kwargs):
                         item_lock_id = "item_move {}".format(item_id)
                         if lock(item_lock_id, expire=5):
                             item = self.move_content(item_id, doc)
-                            guid_of_moved_items.append(item.get(config.ID_FIELD))
+                            guid_of_moved_items.append(item.get(ID_FIELD))
                             unlock(item_lock_id, remove=True)
                             item_lock_id = None
                         else:
@@ -146,7 +147,7 @@ def _move(self, archived_doc, doc):
             doc=archived_doc,
             desk_id=doc.get("task", {}).get("desk"),
             stage_id=doc.get("task", {}).get("stage"),
-            user_id=user.get(config.ID_FIELD),
+            user_id=user.get(ID_FIELD),
         )
         if archived_doc[ITEM_STATE] not in (
             {
@@ -166,20 +167,22 @@ def _move(self, archived_doc, doc):
         convert_task_attributes_to_objectId(archived_doc)
         resolve_document_version(archived_doc, ARCHIVE, "PATCH", original)
 
-        del archived_doc[config.ID_FIELD]
-        del archived_doc[config.ETAG]  # force etag update
+        del archived_doc[ID_FIELD]
+        del archived_doc[ETAG]  # force etag update
         archived_doc["versioncreated"] = utcnow()
 
         signals.item_move.send(self, item=archived_doc, original=original)
-        archive_service.update(original[config.ID_FIELD], archived_doc, original)
+        archive_service.update(original[ID_FIELD], archived_doc, original)
 
-        insert_into_versions(id_=original[config.ID_FIELD])
+        insert_into_versions(id_=original[ID_FIELD])
         push_item_move_notification(original, archived_doc)
+
+        app = get_current_app().as_any()
         app.on_archive_item_updated(archived_doc, original, ITEM_MOVE)
 
         # make sure `item._id` is there in signal
         moved_item = archived_doc.copy()
-        moved_item[config.ID_FIELD] = original[config.ID_FIELD]
+        moved_item[ID_FIELD] = original[ID_FIELD]
         signals.item_moved.send(self, item=moved_item, original=original)
 
     def _validate(self, archived_doc, doc):
diff --git a/apps/duplication/archive_translate.py b/apps/duplication/archive_translate.py
index df0584dca7..8e89736494 100644
--- a/apps/duplication/archive_translate.py
+++ b/apps/duplication/archive_translate.py
@@ -10,6 +10,7 @@
 
 
 import superdesk
+from superdesk.core import get_app_config
 from apps.archive.archive import SOURCE as ARCHIVE, remove_is_queued
 from apps.content import push_content_notification
 from apps.auth import get_user_id
@@ -23,7 +24,6 @@
 from superdesk.utc import utcnow
 from apps.packages import PackageService
 from flask_babel import _
-from flask import current_app as app
 
 
 package_service = PackageService()
@@ -86,7 +86,7 @@ def _translate_item(self, guid, language, task=None, service=None, state=None, *
 
         extra_fields = ["translation_id", "translated_from"]
 
-        UPDATE_TRANSLATION_METADATA_MACRO = app.config.get("UPDATE_TRANSLATION_METADATA_MACRO")
+        UPDATE_TRANSLATION_METADATA_MACRO = get_app_config("UPDATE_TRANSLATION_METADATA_MACRO")
 
         if UPDATE_TRANSLATION_METADATA_MACRO and macros_service.get_macro_by_name(UPDATE_TRANSLATION_METADATA_MACRO):
             macros_service.execute_macro(item, UPDATE_TRANSLATION_METADATA_MACRO)
diff --git a/apps/export/service.py b/apps/export/service.py
index 007351951a..b0ea007cc8 100644
--- a/apps/export/service.py
+++ b/apps/export/service.py
@@ -1,5 +1,7 @@
 import logging
 import re
+
+from superdesk.core import get_current_app
 from superdesk.services import BaseService
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
@@ -8,7 +10,6 @@
 from superdesk.validation import ValidationError
 from io import BytesIO
 from zipfile import ZipFile
-from flask import current_app as app
 from flask_babel import _
 
 logger = logging.getLogger(__name__)
@@ -51,6 +52,7 @@ def create(self, docs, **kwargs):
             # Store the zip file on media_storage
             # only if at least one item is formatted successfully
             if unsuccessful_exports < len(doc.get("item_ids")):
+                app = get_current_app()
                 zip_id = app.media.put(
                     in_memory_zip.getvalue(),
                     filename="export_{}.zip".format(get_random_string()),
diff --git a/apps/highlights/generate.py b/apps/highlights/generate.py
index 82e57304fb..56393ba4ad 100644
--- a/apps/highlights/generate.py
+++ b/apps/highlights/generate.py
@@ -1,9 +1,18 @@
 import superdesk
-from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, CONTENT_STATE, ITEM_STATE, get_schema
-from flask import render_template, render_template_string
+from superdesk.metadata.item import CONTENT_TYPE, CONTENT_STATE, get_schema
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import (
+    ID_FIELD,
+    DATE_CREATED,
+    LAST_UPDATED,
+    ETAG,
+    VERSION,
+    ITEM_STATE,
+    ITEM_TYPE,
+)
+from superdesk.flask import render_template, render_template_string, abort
 from superdesk.errors import SuperdeskApiError
-from eve.utils import config
-from flask import current_app as app
 from .service import get_highlight_name
 from apps.archive.common import ITEM_EXPORT_HIGHLIGHT, ITEM_CREATE_HIGHLIGHT
 
@@ -22,10 +31,10 @@
     "lock_time",
     "lock_user",
     "lock_session",
-    config.ID_FIELD,
-    config.LAST_UPDATED,
-    config.DATE_CREATED,
-    config.ETAG,
+    ID_FIELD,
+    LAST_UPDATED,
+    DATE_CREATED,
+    ETAG,
     "version",
     "_current_version",
     "version_creator",
@@ -57,11 +66,12 @@ def create(self, docs, **kwargs):
         If doc.preview is True it won't save the item, only return.
         """
         service = superdesk.get_resource_service("archive")
+        app = get_current_app().as_any()
         for doc in docs:
             preview = doc.get("preview", False)
             package = service.find_one(req=None, _id=doc["package"])
             if not package:
-                superdesk.abort(404)
+                abort(404)
             export = doc.get("export")
             template = get_template(package.get("highlight"))
             stringTemplate = None
@@ -72,7 +82,7 @@ def create(self, docs, **kwargs):
             doc[ITEM_TYPE] = CONTENT_TYPE.TEXT
             doc["family_id"] = package.get("guid")
             doc[ITEM_STATE] = CONTENT_STATE.SUBMITTED
-            doc[config.VERSION] = 1
+            doc[VERSION] = 1
 
             for field in package:
                 if field not in PACKAGE_FIELDS:
diff --git a/apps/highlights/service.py b/apps/highlights/service.py
index c5dbc6c6a6..0e47da31c5 100644
--- a/apps/highlights/service.py
+++ b/apps/highlights/service.py
@@ -7,14 +7,13 @@
 # package_service, caused by circular dependencies.
 # When that issue is resolved, the workaround should be removed.
 
+from superdesk.core import get_current_app, get_app_config
 import apps.packages.package_service as package
-from flask import current_app as app
 from superdesk import get_resource_service
 from superdesk.services import BaseService
 from eve.utils import ParsedRequest
 from superdesk.notification import push_notification
 from superdesk.utc import get_timezone_offset, utcnow
-from eve.utils import config
 from apps.archive.common import ITEM_MARK, ITEM_UNMARK
 from bson import ObjectId
 
@@ -37,7 +36,7 @@ def get_highlighted_items(highlights_id):
                             "range": {
                                 "versioncreated": {
                                     "gte": highlight.get("auto_insert", "now/d"),
-                                    "time_zone": get_timezone_offset(config.DEFAULT_TIMEZONE, utcnow()),
+                                    "time_zone": get_timezone_offset(get_app_config("DEFAULT_TIMEZONE"), utcnow()),
                                 }
                             }
                         },
@@ -101,6 +100,7 @@ def on_delete(self, doc):
         query = {"query": {"filtered": {"filter": {"term": {"highlights": highlights_id}}}}}
         req = init_parsed_request(query)
         proposedItems = service.get(req=req, lookup=None)
+        app = get_current_app().as_any()
         for item in proposedItems:
             app.on_archive_item_updated(
                 {"highlight_id": highlights_id, "highlight_name": get_highlight_name(highlights_id)}, item, ITEM_UNMARK
@@ -115,6 +115,7 @@ def create(self, docs, **kwargs):
         service = get_resource_service("archive")
         publishedService = get_resource_service("published")
         ids = []
+        app = get_current_app().as_any()
         for doc in docs:
             item = service.find_one(req=None, _id=doc["marked_item"])
             if not item:
diff --git a/apps/io/feeding_services/reuters.py b/apps/io/feeding_services/reuters.py
index f40c9af028..a01e147116 100644
--- a/apps/io/feeding_services/reuters.py
+++ b/apps/io/feeding_services/reuters.py
@@ -12,8 +12,9 @@
 import traceback
 import superdesk
 import requests
-from flask import current_app as app
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.errors import IngestApiError
 from superdesk.etree import etree, ParseError
 from superdesk.io.registry import register_feeding_service, register_feeding_service_parser
@@ -71,7 +72,7 @@ def _update(self, provider, update):
         updated = utcnow()
 
         last_updated = provider.get("last_updated")
-        ttl_minutes = app.config["INGEST_EXPIRY_MINUTES"]
+        ttl_minutes = get_app_config("INGEST_EXPIRY_MINUTES")
         if not last_updated or last_updated < updated - datetime.timedelta(minutes=ttl_minutes):
             last_updated = updated - datetime.timedelta(minutes=ttl_minutes)
 
@@ -235,14 +236,14 @@ def _save_poll_token(self, channel, poll_token):
         """
         # get the provider in case it has been updated by another channel
         ingest_provider_service = superdesk.get_resource_service("ingest_providers")
-        provider = ingest_provider_service.find_one(req=None, _id=self.provider[superdesk.config.ID_FIELD])
+        provider = ingest_provider_service.find_one(req=None, _id=self.provider[ID_FIELD])
         provider_token = provider.get("tokens")
         if "poll_tokens" not in provider_token:
             provider_token["poll_tokens"] = {channel: poll_token}
         else:
             provider_token["poll_tokens"][channel] = poll_token
         upd_provider = {"tokens": provider_token}
-        ingest_provider_service.system_update(self.provider[superdesk.config.ID_FIELD], upd_provider, self.provider)
+        ingest_provider_service.system_update(self.provider[ID_FIELD], upd_provider, self.provider)
 
     def _get_poll_token(self, channel):
         """Get the poll token from provider config if it is available.
diff --git a/apps/io/search_ingest.py b/apps/io/search_ingest.py
index 2dfb34b95d..577d0ef14d 100644
--- a/apps/io/search_ingest.py
+++ b/apps/io/search_ingest.py
@@ -12,10 +12,10 @@
 import logging
 import superdesk
 
-from flask import json
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.core import json
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.utc import utcnow
 from superdesk.errors import SuperdeskApiError, ProviderError
@@ -68,14 +68,14 @@ def create(self, docs, **kwargs):
             new_guids.append(dest_doc["guid"])
 
             if provider:
-                dest_doc["ingest_provider"] = str(provider[superdesk.config.ID_FIELD])
+                dest_doc["ingest_provider"] = str(provider[ID_FIELD])
 
             superdesk.get_resource_service(ARCHIVE).post([dest_doc])
-            insert_into_versions(dest_doc.get("_id"))
+            insert_into_versions(dest_doc.get(ID_FIELD))
 
         if new_guids:
             get_resource_service("search_providers").system_update(
-                provider.get(config.ID_FIELD), {"last_item_update": utcnow()}, provider
+                provider.get(ID_FIELD), {"last_item_update": utcnow()}, provider
             )
 
         return new_guids
@@ -86,7 +86,7 @@ def get(self, req, lookup):
             query = self._get_query(req)
             results = self.backend.find(self.source, query, None)
             for doc in results.docs:
-                doc["ingest_provider"] = str(provider[superdesk.config.ID_FIELD])
+                doc["ingest_provider"] = str(provider[ID_FIELD])
             return results
         else:
             raise ProviderNotFoundError(_("provider not found source={source}").format(source=self.source))
diff --git a/apps/item_lock/components/item_lock.py b/apps/item_lock/components/item_lock.py
index f41e8f0f63..7ebccb9123 100644
--- a/apps/item_lock/components/item_lock.py
+++ b/apps/item_lock/components/item_lock.py
@@ -8,17 +8,17 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
-import superdesk
 
+from superdesk.resource_fields import ID_FIELD, VERSION, ETAG
+from superdesk.flask import g
+import superdesk
 from superdesk.metadata.item import ITEM_STATE, CONTENT_STATE, ITEM_TYPE, CONTENT_TYPE, PUBLISH_STATES
 from superdesk.errors import SuperdeskApiError
 from superdesk.notification import push_notification
 from superdesk.users.services import current_user_has_privilege
 from superdesk.utc import utcnow
 from superdesk.lock import lock, unlock
-from eve.utils import config
 from eve.versioning import resolve_document_version, insert_versioning_documents
 
 from apps.common.components.base_component import BaseComponent
@@ -40,12 +40,12 @@
 def push_unlock_notification(item, user_id, session_id):
     push_notification(
         "item:unlock",
-        item=str(item.get(config.ID_FIELD)),
-        item_version=str(item.get(config.VERSION)),
+        item=str(item.get(ID_FIELD)),
+        item_version=str(item.get(VERSION)),
         state=item.get(ITEM_STATE),
         user=str(user_id),
         lock_session=item.get(LOCK_SESSION) or str(session_id),
-        _etag=item.get(config.ETAG),
+        _etag=item.get(ETAG),
     )
 
 
@@ -75,7 +75,7 @@ def lock(self, item_filter, user_id, session_id, action):
         item = item_model.find_one(item_filter)
 
         # set the lock_id it per item
-        lock_id = "item_lock {}".format(item.get(config.ID_FIELD))
+        lock_id = "item_lock {}".format(item.get(ID_FIELD))
 
         if not item:
             raise SuperdeskApiError.notFoundError()
@@ -100,18 +100,18 @@ def lock(self, item_filter, user_id, session_id, action):
                     updates[TASK] = {"user": user_id}
 
                 # tasks service will update the user
-                superdesk.get_resource_service("tasks").assign_user(item[config.ID_FIELD], updates)
+                superdesk.get_resource_service("tasks").assign_user(item[ID_FIELD], updates)
 
                 item = item_model.find_one(item_filter)
                 self.app.on_item_locked(item, user_id)
                 push_notification(
                     "item:lock",
-                    item=str(item.get(config.ID_FIELD)),
-                    item_version=str(item.get(config.VERSION)),
+                    item=str(item.get(ID_FIELD)),
+                    item_version=str(item.get(VERSION)),
                     user=str(user_id),
                     lock_time=updates[LOCK_TIME],
                     lock_session=str(session_id),
-                    _etag=item.get(config.ETAG),
+                    _etag=item.get(ETAG),
                 )
             else:
                 raise SuperdeskApiError.forbiddenError(message=error_message)
@@ -140,7 +140,7 @@ def unlock(self, item_filter, user_id, session_id, etag):
 
             # delete the item if nothing is saved so far
             # version 0 created on lock item
-            if item.get(config.VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT:
+            if item.get(VERSION, 0) == 0 and item[ITEM_STATE] == CONTENT_STATE.DRAFT:
                 if item.get(ITEM_TYPE) == CONTENT_TYPE.COMPOSITE:
                     # if item is composite then update referenced items in package.
                     PackageService().update_groups({}, item)
@@ -152,8 +152,8 @@ def unlock(self, item_filter, user_id, session_id, etag):
                 set_unlock_updates(updates, True)
                 autosave = superdesk.get_resource_service("archive_autosave").find_one(req=None, _id=item["_id"])
                 if autosave and item[ITEM_STATE] not in PUBLISH_STATES:
-                    if not hasattr(flask.g, "user"):  # user is not set when session expires
-                        flask.g.user = superdesk.get_resource_service("users").find_one(req=None, _id=user_id)
+                    if not hasattr(g, "user"):  # user is not set when session expires
+                        g.user = superdesk.get_resource_service("users").find_one(req=None, _id=user_id)
                     autosave.update(updates)
                     resolve_document_version(autosave, "archive", "PATCH", item)
                     superdesk.get_resource_service("archive").patch(item["_id"], autosave)
diff --git a/apps/keywords/alchemy.py b/apps/keywords/alchemy.py
index d1eee39ffe..9ef337185a 100644
--- a/apps/keywords/alchemy.py
+++ b/apps/keywords/alchemy.py
@@ -11,7 +11,7 @@
 import urllib
 import requests
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.errors import SuperdeskApiError
 from flask_babel import _
 
@@ -23,13 +23,16 @@ def __init__(self):
         self._http = requests.Session()
 
     def get_keywords(self, text):
-        if not app.config["KEYWORDS_KEY_API"]:
+        if not get_app_config("KEYWORDS_KEY_API"):
             raise SuperdeskApiError.notFoundError(_("AlchemyAPI key is not set"))
 
-        params = {"apikey": app.config["KEYWORDS_KEY_API"], "outputMode": "json"}
+        params = {"apikey": get_app_config("KEYWORDS_KEY_API"), "outputMode": "json"}
 
         url = (
-            app.config["KEYWORDS_BASE_URL"] + "/text/TextGetRankedNamedEntities" + "?" + urllib.parse.urlencode(params)
+            get_app_config("KEYWORDS_BASE_URL")
+            + "/text/TextGetRankedNamedEntities"
+            + "?"
+            + urllib.parse.urlencode(params)
         )
 
         values = {"text": text}
diff --git a/apps/ldap/commands.py b/apps/ldap/commands.py
index 2931866cd7..340bf01ba4 100644
--- a/apps/ldap/commands.py
+++ b/apps/ldap/commands.py
@@ -9,11 +9,11 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-from flask import current_app as app
+
+from superdesk.core import get_app_config
 from superdesk.errors import SuperdeskApiError
 import superdesk
 from .ldap import ADAuth, add_default_values, get_user_query
-from flask_babel import _
 
 logger = logging.getLogger(__name__)
 
@@ -52,14 +52,13 @@ def run(self, ad_username, ad_password, username, admin="false"):
         user_type = "administrator" if admin is not None and admin.lower() == "true" else "user"
 
         # Authenticate and fetch profile from AD
-        settings = app.settings
         ad_auth = ADAuth(
-            settings["LDAP_SERVER"],
-            settings["LDAP_SERVER_PORT"],
-            settings["LDAP_BASE_FILTER"],
-            settings["LDAP_USER_FILTER"],
-            settings["LDAP_USER_ATTRIBUTES"],
-            settings["LDAP_FQDN"],
+            get_app_config("LDAP_SERVER"),
+            get_app_config("LDAP_SERVER_PORT"),
+            get_app_config("LDAP_BASE_FILTER"),
+            get_app_config("LDAP_USER_FILTER"),
+            get_app_config("LDAP_USER_ATTRIBUTES"),
+            get_app_config("LDAP_FQDN"),
         )
 
         user_data = ad_auth.authenticate_and_fetch_profile(ad_username, ad_password, username)
diff --git a/apps/ldap/ldap.py b/apps/ldap/ldap.py
index a4b7e12ca0..1c8ae947ca 100644
--- a/apps/ldap/ldap.py
+++ b/apps/ldap/ldap.py
@@ -12,12 +12,13 @@
 import re
 from ldap3 import Server, Connection, SUBTREE
 from ldap3.core.exceptions import LDAPException
+
+from superdesk.core import get_app_config
 from apps.auth.service import AuthService
 from superdesk.users.services import UsersService
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from superdesk.resource import Resource
-from flask import current_app as app
 import superdesk
 from apps.auth.errors import CredentialsAuthError
 from apps.auth import get_user
@@ -141,14 +142,13 @@ def authenticate(self, credentials):
         :param credentials: an object having "username" and "password" attributes
         :return: if success returns User object, otherwise throws Error
         """
-        settings = app.settings
         ad_auth = ADAuth(
-            settings["LDAP_SERVER"],
-            settings["LDAP_SERVER_PORT"],
-            settings["LDAP_BASE_FILTER"],
-            settings["LDAP_USER_FILTER"],
-            settings["LDAP_USER_ATTRIBUTES"],
-            settings["LDAP_FQDN"],
+            get_app_config("LDAP_SERVER"),
+            get_app_config("LDAP_SERVER_PORT"),
+            get_app_config("LDAP_BASE_FILTER"),
+            get_app_config("LDAP_USER_FILTER"),
+            get_app_config("LDAP_USER_ATTRIBUTES"),
+            get_app_config("LDAP_FQDN"),
         )
 
         username = credentials.get("username")
@@ -171,12 +171,12 @@ def authenticate(self, credentials):
         user = superdesk.get_resource_service("users").find_one(req=None, **query)
 
         if (
-            app.settings.get("LDAP_SET_DISPLAY_NAME", False)
+            get_app_config("LDAP_SET_DISPLAY_NAME", False)
             and "display_name" in user_data
-            and all(f in user_data for f in app.settings.get("LDAP_SET_DISPLAY_NAME_FIELDS", []))
+            and all(f in user_data for f in get_app_config("LDAP_SET_DISPLAY_NAME_FIELDS", []))
         ):
-            user_data["display_name"] = app.settings.get("LDAP_SET_DISPLAY_NAME_FORMAT", "").format(
-                *[user_data.get(f) for f in app.settings.get("LDAP_SET_DISPLAY_NAME_FIELDS", [])]
+            user_data["display_name"] = get_app_config("LDAP_SET_DISPLAY_NAME_FORMAT", "").format(
+                *[user_data.get(f) for f in get_app_config("LDAP_SET_DISPLAY_NAME_FIELDS", [])]
             )
 
         if not user:
diff --git a/apps/ldap/users_service.py b/apps/ldap/users_service.py
index 2e511fc18a..61bd983694 100644
--- a/apps/ldap/users_service.py
+++ b/apps/ldap/users_service.py
@@ -9,7 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-from eve.utils import config
+
+from superdesk.core import get_app_config
 from superdesk.users import UsersService, UsersResource, is_admin  # NOQA
 
 
@@ -38,7 +39,7 @@ def set_defaults(self, doc):
         :param dict doc: user
         """
         readonly = {}
-        user_attributes = config.LDAP_USER_ATTRIBUTES
+        user_attributes = get_app_config("LDAP_USER_ATTRIBUTES")
         for value in user_attributes.values():
             if value in self.readonly_fields:
                 readonly[value] = True
diff --git a/apps/legal_archive/commands.py b/apps/legal_archive/commands.py
index 45ecbfeeea..a10330a0b0 100644
--- a/apps/legal_archive/commands.py
+++ b/apps/legal_archive/commands.py
@@ -15,11 +15,13 @@
 
 import superdesk
 from copy import deepcopy
-from flask import current_app as app
 from eve.utils import ParsedRequest
 from eve.versioning import versioned_id_field
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD, VERSION, ETAG
 from superdesk.celery_app import celery
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
 from superdesk.celery_task_utils import get_lock_id
 from .resource import (
     LEGAL_ARCHIVE_NAME,
@@ -41,7 +43,7 @@
 
 def is_legal_archive_enabled():
     """Test if legal archive is enabled."""
-    return app.config["LEGAL_ARCHIVE"]
+    return get_app_config("LEGAL_ARCHIVE")
 
 
 class LegalArchiveImport:
@@ -71,7 +73,7 @@ def upsert_into_legal_archive(self, item_id):
 
             # setting default values in case they are missing other log message will fail.
             doc.setdefault("unique_name", "NO UNIQUE NAME")
-            doc.setdefault(config.VERSION, 1)
+            doc.setdefault(VERSION, 1)
             doc.setdefault("expiry", utcnow())
 
             if not doc.get(ITEM_STATE) in PUBLISH_STATES:
@@ -90,11 +92,11 @@ def upsert_into_legal_archive(self, item_id):
             legal_archive_history_service = get_resource_service(LEGAL_ARCHIVE_HISTORY_NAME)
 
             log_msg = self.log_msg_format.format(**legal_archive_doc)
-            version_id_field = versioned_id_field(app.config["DOMAIN"][ARCHIVE])
+            version_id_field = versioned_id_field(get_app_config("DOMAIN")[ARCHIVE])
             logger.info("Preparing Article to be inserted into Legal Archive " + log_msg)
 
             # Removing irrelevant properties
-            legal_archive_doc.pop(config.ETAG, None)
+            legal_archive_doc.pop(ETAG, None)
             legal_archive_doc.pop("lock_user", None)
             legal_archive_doc.pop("lock_session", None)
             legal_archive_doc.pop("lock_time", None)
@@ -103,16 +105,14 @@ def upsert_into_legal_archive(self, item_id):
             logger.info("Removed irrelevant properties from the article {}".format(log_msg))
 
             # Step 1
-            article_in_legal_archive = legal_archive_service.find_one(req=None, _id=legal_archive_doc[config.ID_FIELD])
+            article_in_legal_archive = legal_archive_service.find_one(req=None, _id=legal_archive_doc[ID_FIELD])
 
-            if article_in_legal_archive and article_in_legal_archive.get(config.VERSION, 0) > legal_archive_doc.get(
-                config.VERSION
-            ):
+            if article_in_legal_archive and article_in_legal_archive.get(VERSION, 0) > legal_archive_doc.get(VERSION):
                 logger.info(
                     "Item {} version: {} already in legal archive. Legal Archive document version {}".format(
-                        legal_archive_doc.get(config.ID_FIELD),
-                        legal_archive_doc.get(config.VERSION),
-                        article_in_legal_archive.get(config.VERSION),
+                        legal_archive_doc.get(ID_FIELD),
+                        legal_archive_doc.get(VERSION),
+                        article_in_legal_archive.get(VERSION),
                     )
                 )
                 self._set_moved_to_legal(doc)
@@ -126,12 +126,12 @@ def upsert_into_legal_archive(self, item_id):
             logger.info("Upserting Legal Archive Repo with article {}".format(log_msg))
 
             if article_in_legal_archive:
-                legal_archive_service.put(legal_archive_doc[config.ID_FIELD], legal_archive_doc)
+                legal_archive_service.put(legal_archive_doc[ID_FIELD], legal_archive_doc)
             else:
                 legal_archive_service.post([legal_archive_doc])
 
             # Step 4 - Get Versions and De-normalize and Inserting Legal Archive Versions
-            lookup = {version_id_field: legal_archive_doc[config.ID_FIELD]}
+            lookup = {version_id_field: legal_archive_doc[ID_FIELD]}
             versions = list(get_resource_service("archive_versions").get(req=None, lookup=lookup))
             legal_versions = list(legal_archive_versions_service.get(req=None, lookup=lookup))
 
@@ -140,14 +140,12 @@ def upsert_into_legal_archive(self, item_id):
                 version
                 for version in versions
                 if not any(
-                    legal_version
-                    for legal_version in legal_versions
-                    if version[config.VERSION] == legal_version[config.VERSION]
+                    legal_version for legal_version in legal_versions if version[VERSION] == legal_version[VERSION]
                 )
             ]
 
             # Step 5 - Get History and de-normalize and insert into Legal Archive History
-            lookup = {"item_id": legal_archive_doc[config.ID_FIELD]}
+            lookup = {"item_id": legal_archive_doc[ID_FIELD]}
             history_items = list(get_resource_service("archive_history").get(req=None, lookup=lookup))
             legal_history_items = list(legal_archive_history_service.get(req=None, lookup=lookup))
 
@@ -158,20 +156,20 @@ def upsert_into_legal_archive(self, item_id):
                 if not any(
                     legal_version
                     for legal_version in legal_history_items
-                    if history[config.ID_FIELD] == legal_version[config.ID_FIELD]
+                    if history[ID_FIELD] == legal_version[ID_FIELD]
                 )
             ]
 
             # This happens when user kills an article from Dusty Archive
             if (
                 article_in_legal_archive
-                and article_in_legal_archive[config.VERSION] < legal_archive_doc[config.VERSION]
+                and article_in_legal_archive[VERSION] < legal_archive_doc[VERSION]
                 and len(versions_to_insert) == 0
             ):
-                resource_def = app.config["DOMAIN"][ARCHIVE]
+                resource_def = get_resource_service("DOMAIN")[ARCHIVE]
                 versioned_doc = deepcopy(legal_archive_doc)
-                versioned_doc[versioned_id_field(resource_def)] = legal_archive_doc[config.ID_FIELD]
-                versioned_doc[config.ID_FIELD] = ObjectId()
+                versioned_doc[versioned_id_field(resource_def)] = legal_archive_doc[ID_FIELD]
+                versioned_doc[ID_FIELD] = ObjectId()
                 versions_to_insert.append(versioned_doc)
 
             for version_doc in versions_to_insert:
@@ -180,11 +178,11 @@ def upsert_into_legal_archive(self, item_id):
                     self.log_msg_format.format(
                         _id=version_doc[version_id_field],
                         unique_name=version_doc.get("unique_name"),
-                        _current_version=version_doc[config.VERSION],
+                        _current_version=version_doc[VERSION],
                         expiry=version_doc.get("expiry"),
                     ),
                 )
-                version_doc.pop(config.ETAG, None)
+                version_doc.pop(ETAG, None)
 
             if versions_to_insert:
                 legal_archive_versions_service.post(versions_to_insert)
@@ -192,7 +190,7 @@ def upsert_into_legal_archive(self, item_id):
 
             for history_doc in history_to_insert:
                 self._denormalize_history(history_doc)
-                history_doc.pop(config.ETAG, None)
+                history_doc.pop(ETAG, None)
 
             if history_to_insert:
                 legal_archive_history_service.post(history_to_insert)
@@ -293,7 +291,7 @@ def _set_moved_to_legal(self, doc):
 
         :param dict doc: document
         """
-        get_resource_service("published").set_moved_to_legal(doc.get(config.ID_FIELD), doc.get(config.VERSION), True)
+        get_resource_service("published").set_moved_to_legal(doc.get(ID_FIELD), doc.get(VERSION), True)
 
     def import_legal_publish_queue(self, force_move=False, page_size=500):
         """Import legal publish queue.
@@ -322,15 +320,15 @@ def process_queue_items(self, queue_items, force_move=False):
         logger.info("Items to import {}.".format(len(queue_items)))
         logger.info("Get subscribers info for de-normalising queue items.")
         subscriber_ids = list({str(queue_item["subscriber_id"]) for queue_item in queue_items})
-        query = {"$and": [{config.ID_FIELD: {"$in": subscriber_ids}}]}
+        query = {"$and": [{ID_FIELD: {"$in": subscriber_ids}}]}
         subscribers = list(get_resource_service("subscribers").get(req=None, lookup=query))
-        subscribers = {str(subscriber[config.ID_FIELD]): subscriber for subscriber in subscribers}
+        subscribers = {str(subscriber[ID_FIELD]): subscriber for subscriber in subscribers}
 
         for queue_item in queue_items:
             try:
                 self._upsert_into_legal_archive_publish_queue(queue_item, subscribers, force_move)
             except Exception:
-                logger.exception("Failed to import publish queue item. {}".format(queue_item.get(config.ID_FIELD)))
+                logger.exception("Failed to import publish queue item. {}".format(queue_item.get(ID_FIELD)))
 
     def _upsert_into_legal_archive_publish_queue(self, queue_item, subscribers, force_move):
         """Upsert into legal publish queue.
@@ -351,7 +349,7 @@ def _upsert_into_legal_archive_publish_queue(self, queue_item, subscribers, forc
 
         logger.info("Processing queue item: {}".format(log_msg))
 
-        existing_queue_item = legal_publish_queue_service.find_one(req=None, _id=legal_queue_item.get(config.ID_FIELD))
+        existing_queue_item = legal_publish_queue_service.find_one(req=None, _id=legal_queue_item.get(ID_FIELD))
         if str(queue_item["subscriber_id"]) in subscribers:
             legal_queue_item["subscriber_id"] = subscribers[str(queue_item["subscriber_id"])]["name"]
             legal_queue_item["_subscriber_id"] = queue_item["subscriber_id"]
@@ -364,7 +362,7 @@ def _upsert_into_legal_archive_publish_queue(self, queue_item, subscribers, forc
             legal_publish_queue_service.post([legal_queue_item])
             logger.info("Inserted queue item: {}".format(log_msg))
         else:
-            legal_publish_queue_service.put(existing_queue_item.get(config.ID_FIELD), legal_queue_item)
+            legal_publish_queue_service.put(existing_queue_item.get(ID_FIELD), legal_queue_item)
             logger.info("Updated queue item: {}".format(log_msg))
 
         if (
@@ -375,9 +373,7 @@ def _upsert_into_legal_archive_publish_queue(self, queue_item, subscribers, forc
             updates["moved_to_legal"] = True
 
             try:
-                get_resource_service("publish_queue").system_update(
-                    queue_item.get(config.ID_FIELD), updates, queue_item
-                )
+                get_resource_service("publish_queue").system_update(queue_item.get(ID_FIELD), updates, queue_item)
                 logger.info("Queue item moved to legal. {}".format(log_msg))
             except Exception:
                 logger.exception("Failed to set moved to legal flag for queue item {}.".format(log_msg))
@@ -410,7 +406,7 @@ def get_publish_queue_items(self, page_size, expired_items=None):
         no_of_pages = 0
         if count:
             no_of_pages = len(range(0, count, page_size))
-            queue_id = cursor[0][config.ID_FIELD]
+            queue_id = cursor[0][ID_FIELD]
         logger.info("Number of items to move to legal archive publish queue: {}, pages={}".format(count, no_of_pages))
 
         for page in range(0, no_of_pages):
@@ -425,7 +421,7 @@ def get_publish_queue_items(self, page_size, expired_items=None):
             cursor = service.get(req=req, lookup=None)
             items = list(cursor)
             if len(items) > 0:
-                queue_id = items[len(items) - 1][config.ID_FIELD]
+                queue_id = items[len(items) - 1][ID_FIELD]
             logger.info(
                 "Fetched No. of Items: {} for page: {} "
                 "For import in to legal archive publish_queue.".format(len(items), (page + 1))
@@ -514,12 +510,12 @@ def run(self, page_size=None):
             expired_items = set()
             for items in self.get_expired_items(page_size):
                 for item in items:
-                    self._move_to_legal(item.get("item_id"), item.get(config.VERSION), expired_items)
+                    self._move_to_legal(item.get("item_id"), item.get(VERSION), expired_items)
 
             # get the invalid items from archive.
             for items in get_resource_service(ARCHIVE).get_expired_items(utcnow(), invalid_only=True):
                 for item in items:
-                    self._move_to_legal(item.get(config.ID_FIELD), item.get(config.VERSION), expired_items)
+                    self._move_to_legal(item.get(ID_FIELD), item.get(VERSION), expired_items)
 
             # if publish item is moved but publish_queue item is not.
             if len(expired_items):
diff --git a/apps/legal_archive/service.py b/apps/legal_archive/service.py
index 9571fe1670..4f6199a295 100644
--- a/apps/legal_archive/service.py
+++ b/apps/legal_archive/service.py
@@ -11,9 +11,11 @@
 import logging
 
 from eve.versioning import versioned_id_field
-from flask import g, current_app as app
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from .resource import LEGAL_ARCHIVE_NAME
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, ITEMS
 from superdesk import Service, get_resource_privileges
 from superdesk.errors import SuperdeskApiError
 from superdesk.metadata.item import ITEM_TYPE, GUID_FIELD, CONTENT_TYPE
@@ -39,7 +41,7 @@ def on_create(self, docs):
         super().on_create(docs)
         for doc in docs:
             if ITEM_TYPE in doc:
-                doc.setdefault(config.ID_FIELD, doc.get(GUID_FIELD))
+                doc.setdefault(ID_FIELD, doc.get(GUID_FIELD))
                 if doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE:
                     self._change_location_of_items_in_package(doc)
             set_mongo_lang(doc)
@@ -94,10 +96,11 @@ def check_get_access_privilege(self):
         :raises: SuperdeskApiError.forbiddenError() if user is unauthorized to access the Legal Archive resources.
         """
 
-        if not hasattr(g, "user"):
+        user = get_current_app().get_current_user_dict()
+        if user is None:
             return
 
-        privileges = g.user.get("active_privileges", {})
+        privileges = user.get("active_privileges", {})
         resource_privileges = get_resource_privileges(self.datasource).get("GET", None)
         if privileges.get(resource_privileges, 0) == 0:
             raise SuperdeskApiError.forbiddenError()
@@ -131,7 +134,7 @@ def on_fetched(self, docs):
         Overriding this to enhance the published article with the one in archive collection
         """
 
-        self.enhance(docs[config.ITEMS])
+        self.enhance(docs[ITEMS])
 
     def on_fetched_item(self, doc):
         """
@@ -153,8 +156,8 @@ def create(self, docs, **kwargs):
         ids = []
         for doc in docs:
             doc_if_exists = None
-            if doc.get(config.ID_FIELD):
-                doc_if_exists = self.find_one(req=None, _id=doc.get(config.ID_FIELD))
+            if doc.get(ID_FIELD):
+                doc_if_exists = self.find_one(req=None, _id=doc.get(ID_FIELD))
             if doc_if_exists is None:
                 ids.extend(super().create([doc]))
 
@@ -171,7 +174,7 @@ def create(self, docs, **kwargs):
         for doc in docs:
             doc_if_exists = None
 
-            if config.ID_FIELD in doc:  # This happens when inserting docs from pre-populate command
+            if ID_FIELD in doc:  # This happens when inserting docs from pre-populate command
                 doc_if_exists = self.find_one(req=None, _id=doc["_id"])
 
             # This also happens when inserting docs from pre-populate command
@@ -188,18 +191,18 @@ def get(self, req, lookup):
         Version of an article in Legal Archive isn't maintained by Eve. Overriding this to fetch the version history.
         """
 
-        resource_def = app.config["DOMAIN"][LEGAL_ARCHIVE_NAME]
+        resource_def = get_app_config("DOMAIN")[LEGAL_ARCHIVE_NAME]
         id_field = versioned_id_field(resource_def)
 
-        if req and req.args and req.args.get(config.ID_FIELD):
+        if req and req.args and req.args.get(ID_FIELD):
             version_history = list(
-                super().get_from_mongo(req=ParsedRequest(), lookup={id_field: req.args.get(config.ID_FIELD)})
+                super().get_from_mongo(req=ParsedRequest(), lookup={id_field: req.args.get(ID_FIELD)})
             )
         else:
             version_history = list(super().get_from_mongo(req=req, lookup=lookup))
 
         for doc in version_history:
-            doc[config.ID_FIELD] = doc[id_field]
+            doc[ID_FIELD] = doc[id_field]
             self.enhance(doc)
 
         return ListCursor(version_history)
diff --git a/apps/links.py b/apps/links.py
index 5b58ad0964..c385278a94 100644
--- a/apps/links.py
+++ b/apps/links.py
@@ -1,6 +1,6 @@
 import superdesk
-
-from flask import request, current_app as app, json
+from superdesk.core import json, get_app_config
+from superdesk.flask import request
 from superdesk.resource import build_custom_hateoas
 
 from apps.archive.archive import ArchiveResource, ArchiveService
@@ -25,7 +25,7 @@ def elastic_filter(req=None):
         },
     }
 
-    LINKS_HOURS = app.config.get("LINKS_MAX_HOURS")
+    LINKS_HOURS = get_app_config("LINKS_MAX_HOURS")
     if LINKS_HOURS:
         query["bool"].update(
             {
diff --git a/apps/macros/macro_register.py b/apps/macros/macro_register.py
index b11549d0bb..d63a63de78 100644
--- a/apps/macros/macro_register.py
+++ b/apps/macros/macro_register.py
@@ -14,12 +14,12 @@
 import imp
 import os
 
-from flask import current_app
+from superdesk.core import get_current_app
 
 
 def load_macros(app=None):
     if not app:
-        app = current_app
+        app = get_current_app()
     module = app.config.get("MACROS_MODULE", "superdesk.macros")
     load_module(module)
 
@@ -41,7 +41,7 @@ def load_module(module):
 
     m = sys.modules[module]
     if getattr(m, "init_app", None):
-        m.init_app(current_app)
+        m.init_app(get_current_app())
 
     register_macros()
 
diff --git a/apps/marked_desks/service.py b/apps/marked_desks/service.py
index 8d44614977..f24b6ea3a5 100644
--- a/apps/marked_desks/service.py
+++ b/apps/marked_desks/service.py
@@ -9,13 +9,14 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import json
-from flask import current_app as app
+from eve.utils import ParsedRequest
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.services import BaseService
-from eve.utils import ParsedRequest
 from superdesk.notification import push_notification
 from apps.archive.common import get_user
-from eve.utils import config
 from superdesk.utc import utcnow
 from apps.archive.common import ITEM_MARK, ITEM_UNMARK
 
@@ -60,7 +61,7 @@ def create(self, docs, **kwargs):
                 user = get_user() or {}
                 new_mark = {}
                 new_mark["desk_id"] = doc["marked_desk"]
-                new_mark["user_marked"] = str(user.get(config.ID_FIELD, ""))
+                new_mark["user_marked"] = str(user.get(ID_FIELD, ""))
                 new_mark["date_marked"] = utcnow()
                 marked_desks.append(new_mark)
                 marked_desks_on = True
@@ -78,6 +79,7 @@ def create(self, docs, **kwargs):
                 "item:marked_desks", marked=int(marked_desks_on), item_id=item["_id"], mark_id=str(doc["marked_desk"])
             )
 
+            app = get_current_app().as_any()
             if marked_desks_on:
                 app.on_archive_item_updated({"desk_id": doc["marked_desk"]}, item, ITEM_MARK)
             else:
diff --git a/apps/packages/package_service.py b/apps/packages/package_service.py
index 052e5180e9..d1ed1ac2f2 100644
--- a/apps/packages/package_service.py
+++ b/apps/packages/package_service.py
@@ -10,10 +10,12 @@
 
 import logging
 from eve.versioning import resolve_document_version
-from flask import current_app as app
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED
 import superdesk
 from collections import Counter
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from superdesk.errors import SuperdeskApiError
 from superdesk import get_resource_service
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, EMBARGO
@@ -99,7 +101,7 @@ def on_create(self, docs):
 
         for doc in docs:
             if not doc.get("ingest_provider"):
-                doc["source"] = app.config.get("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
+                doc["source"] = get_app_config("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
 
             if "highlight" in doc:
                 copy_metadata_from_highlight_template(doc)
@@ -235,7 +237,7 @@ def update_link(self, package, assoc, delete=False):
         # skip root node
         if assoc.get(ID_REF):
             return
-        package_id = package[config.ID_FIELD]
+        package_id = package[ID_FIELD]
 
         item, item_id, endpoint = self.get_associated_item(assoc, not delete)
         if not item and delete:
@@ -253,7 +255,7 @@ def update_link(self, package, assoc, delete=False):
 
     def check_for_duplicates(self, package, associations):
         counter = Counter()
-        package_id = package[config.ID_FIELD]
+        package_id = package[ID_FIELD]
         for itemRef in [assoc[RESIDREF] for assoc in associations if assoc.get(RESIDREF)]:
             if itemRef == package_id:
                 message = _("Trying to self reference as an association.")
@@ -290,7 +292,7 @@ def get_packages(self, doc_id, not_package_id=None):
         query = {"$and": [{ITEM_TYPE: CONTENT_TYPE.COMPOSITE}, {"groups.refs.residRef": doc_id}]}
 
         if not_package_id:
-            query["$and"].append({config.ID_FIELD: {"$ne": not_package_id}})
+            query["$and"].append({ID_FIELD: {"$ne": not_package_id}})
 
         request = ParsedRequest()
         request.max_results = 100
@@ -399,14 +401,16 @@ def remove_refs_in_package(self, package, ref_id_to_remove, processed_packages=N
                 return self.remove_refs_in_package(sub_package, ref_id_to_remove)
 
         new_groups = self.remove_group_ref(package, ref_id_to_remove)
-        updates = {config.LAST_UPDATED: utcnow(), GROUPS: new_groups}
+        updates = {LAST_UPDATED: utcnow(), GROUPS: new_groups}
 
         resolve_document_version(updates, ARCHIVE, "PATCH", package)
-        get_resource_service(ARCHIVE).patch(package[config.ID_FIELD], updates)
+        get_resource_service(ARCHIVE).patch(package[ID_FIELD], updates)
+
+        app = get_current_app().as_any()
         app.on_archive_item_updated(updates, package, ITEM_UNLINK)
-        insert_into_versions(id_=package[config.ID_FIELD])
+        insert_into_versions(id_=package[ID_FIELD])
 
-        sub_package_ids.append(package[config.ID_FIELD])
+        sub_package_ids.append(package[ID_FIELD])
         return sub_package_ids
 
     def _get_associations(self, doc):
@@ -419,7 +423,7 @@ def remove_spiked_refs_from_package(self, doc_id, not_package_id=None):
 
         processed_packages = []
         for package in packages:
-            if str(package[config.ID_FIELD]) in processed_packages:
+            if str(package[ID_FIELD]) in processed_packages:
                 continue
 
             processed_packages.extend(self.remove_refs_in_package(package, doc_id, processed_packages))
diff --git a/apps/picture_crop/__init__.py b/apps/picture_crop/__init__.py
index 8a225fee8f..b1dcc46277 100644
--- a/apps/picture_crop/__init__.py
+++ b/apps/picture_crop/__init__.py
@@ -1,6 +1,5 @@
 import superdesk
-
-from flask import current_app as app, json
+from superdesk.core import json, get_current_app
 from superdesk.utils import get_random_string
 from superdesk.media.media_operations import crop_image, process_image, encode_metadata
 from apps.search_providers.proxy import PROXY_ENDPOINT
@@ -13,7 +12,7 @@ def get_file(rendition, item):
             return superdesk.get_resource_service(item["fetch_endpoint"]).fetch_rendition(rendition, item=item)
         return superdesk.get_resource_service(item["fetch_endpoint"]).fetch_rendition(rendition)
     else:
-        return app.media.fetch_rendition(rendition)
+        return get_current_app().media.fetch_rendition(rendition)
 
 
 def get_crop_size(crop, width=800, height=600):
@@ -61,6 +60,7 @@ class PictureCropService(superdesk.Service):
 
     def create(self, docs, **kwargs):
         ids = []
+        app = get_current_app()
         for doc in docs:
             item = doc.pop("item")
             crop = doc.pop("crop")
diff --git a/apps/picture_renditions.py b/apps/picture_renditions.py
index 2e8b96a153..453e08e357 100644
--- a/apps/picture_renditions.py
+++ b/apps/picture_renditions.py
@@ -1,5 +1,5 @@
 import superdesk
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.media.renditions import generate_renditions, get_renditions_spec
 from apps.picture_crop import get_file
 
@@ -12,6 +12,7 @@ class PictureRenditionsService(superdesk.Service):
     """
 
     def create(self, docs, **kwargs):
+        app = get_current_app()
         ids = []
         for doc in docs:
             item = doc.pop("item")
diff --git a/apps/preferences.py b/apps/preferences.py
index 0c140f1da5..7c8c117268 100644
--- a/apps/preferences.py
+++ b/apps/preferences.py
@@ -11,8 +11,8 @@
 import logging
 import superdesk
 
-from flask import request
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import request
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk import get_backend
@@ -390,14 +390,14 @@ def on_role_privileges_updated(self, role, role_users):
         if not role_users or not role:
             return
 
-        logger.info("On_Role_Privileges_Updated: Updating Users for Role:{}.".format(role.get(config.ID_FIELD)))
+        logger.info("On_Role_Privileges_Updated: Updating Users for Role:{}.".format(role.get(ID_FIELD)))
         for user in role_users:
             try:
-                super().update(user[config.ID_FIELD], {}, user)
+                super().update(user[ID_FIELD], {}, user)
             except Exception:
                 logger.warn(
                     "On_Role_Privileges_Updated:Failed to update user:{} with role:{}.".format(
-                        user.get(config.ID_FIELD), role.get(config.ID_FIELD)
+                        user.get(ID_FIELD), role.get(ID_FIELD)
                     ),
                     exc_info=True,
                 )
diff --git a/apps/prepopulate/app_initialize.py b/apps/prepopulate/app_initialize.py
index 2bbcef137c..468ac37f8a 100644
--- a/apps/prepopulate/app_initialize.py
+++ b/apps/prepopulate/app_initialize.py
@@ -9,8 +9,9 @@
 
 import superdesk
 import pymongo
-from eve.utils import config
-from flask import current_app as app
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ETAG
 from superdesk.commands.flush_elastic_index import FlushElasticIndex
 
 logger = logging.getLogger(__name__)
@@ -205,7 +206,7 @@ def get_filepath(filename, path=None):
         dirs = [path]
     else:
         dirs = [
-            app.config.get("INIT_DATA_PATH", INIT_DATA_PATH),
+            get_app_config("INIT_DATA_PATH", INIT_DATA_PATH),
             INIT_DATA_PATH,
         ]
 
@@ -275,14 +276,15 @@ def run(self, entity_name=None, path=None, sample_data=False, force=False, init_
         :param bool init_index_only: if True, it only initializes index only
         """
         logger.info("Starting data initialization")
-        logger.info("Config: %s", app.config["APP_ABSPATH"])
+        logger.info("Config: %s", get_app_config("APP_ABSPATH"))
 
         # create indexes in mongo
         # We can safely ignore duplicate key errors as this only affects performance
         # As we want the rest of this command to still execute
+        app = get_current_app()
         app.init_indexes(ignore_duplicate_keys=True)
 
-        rebuild_elastic_on_init_data_error = app.config.get("REBUILD_ELASTIC_ON_INIT_DATA_ERROR")
+        rebuild_elastic_on_init_data_error = get_app_config("REBUILD_ELASTIC_ON_INIT_DATA_ERROR")
 
         # put mapping to elastic
         try:
@@ -345,6 +347,8 @@ def import_file(self, entity_name, path, file_name, index_params, do_patch=False
         """
         logger.info("Process %r", entity_name)
         file_path = file_name and get_filepath(file_name, path)
+        app = get_current_app()
+
         if not file_path:
             pass
         elif not file_path.exists():
@@ -376,8 +380,8 @@ def import_file(self, entity_name, path, file_name, index_params, do_patch=False
 
                     if data:
                         for item in data:
-                            if not item.get(config.ETAG):
-                                item.setdefault(config.ETAG, "init")
+                            if not item.get(ETAG):
+                                item.setdefault(ETAG, "init")
                         service.post(data)
 
                     if existing_data and do_patch:
diff --git a/apps/prepopulate/app_prepopulate.py b/apps/prepopulate/app_prepopulate.py
index f1aa781fcd..67623c3d0e 100644
--- a/apps/prepopulate/app_prepopulate.py
+++ b/apps/prepopulate/app_prepopulate.py
@@ -10,16 +10,18 @@
 
 import os
 import json
-import flask
 import logging
 import superdesk
 import multiprocessing
 import werkzeug.exceptions
 
-from flask import current_app as app
 from eve.utils import date_to_str
 from eve.versioning import insert_versioning_documents
 from bson.objectid import ObjectId
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import VERSION
+from superdesk.flask import g
 from apps.archive.common import ITEM_OPERATION
 from superdesk import get_resource_service
 from superdesk.metadata.item import ITEM_STATE, CONTENT_STATE
@@ -48,8 +50,8 @@ def set_logged_user(username, password):
         user = {"username": username, "password": password}
         get_resource_service("auth_db").post([user])
         auth_token = get_resource_service("auth").find_one(username=username, req=None)
-    flask.g.user = get_resource_service("users").find_one(req=None, username=username)
-    flask.g.auth = auth_token
+    g.user = get_resource_service("users").find_one(req=None, username=username)
+    g.auth = auth_token
 
 
 def get_default_user():
@@ -76,6 +78,7 @@ def prepopulate_data(file_name, default_user=None, directory=None):
     users = {default_user["username"]: default_user["password"]}
     default_username = default_user["username"]
     file = os.path.join(directory, file_name)
+    app = get_current_app()
     with open(file, "rt", encoding="utf8") as app_prepopulation:
         json_data = json.load(app_prepopulation)
         for item in json_data:
@@ -114,8 +117,8 @@ def prepopulate_data(file_name, default_user=None, directory=None):
                 if id_name:
                     placeholders[id_name] = str(ids[0])
 
-            if app.config["VERSION"] in data:
-                number_of_versions_to_insert = data[app.config["VERSION"]]
+            if VERSION in data:
+                number_of_versions_to_insert = data[VERSION]
                 doc_versions = []
 
                 if data[ITEM_STATE] not in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED]:
@@ -130,7 +133,7 @@ def prepopulate_data(file_name, default_user=None, directory=None):
                         published_version = data.copy()
                         published_version[ITEM_STATE] = CONTENT_STATE.PUBLISHED
                         published_version[ITEM_OPERATION] = "publish"
-                        published_version[app.config["VERSION"]] = number_of_versions_to_insert - 1
+                        published_version[VERSION] = number_of_versions_to_insert - 1
                         doc_versions.append(published_version)
 
                         number_of_versions_to_insert -= 2
@@ -143,7 +146,7 @@ def prepopulate_data(file_name, default_user=None, directory=None):
                         doc = data.copy()
                         doc[ITEM_STATE] = CONTENT_STATE.PROGRESS
                         doc.pop(ITEM_OPERATION, "")
-                        doc[app.config["VERSION"]] = number_of_versions_to_insert
+                        doc[VERSION] = number_of_versions_to_insert
                         doc_versions.append(doc)
 
                         number_of_versions_to_insert -= 1
@@ -167,6 +170,7 @@ class PrepopulateResource(Resource):
 
 class PrepopulateService(BaseService):
     def _create(self, docs):
+        app = get_current_app()
         for doc in docs:
             if doc.get("remove_first"):
                 clean_dbs(app, force=True)
@@ -189,7 +193,7 @@ def create(self, docs, **kwargs):
         with multiprocessing.Lock() as lock:
             with timer("prepopulate"):
                 self._create(docs)
-            if app.config.get("SUPERDESK_TESTING"):
+            if get_app_config("SUPERDESK_TESTING"):
                 for provider in ["paimg", "aapmm"]:
                     if provider not in allowed_search_providers:
                         register_search_provider(provider, provider)
diff --git a/apps/prepopulate/app_scaffold_data.py b/apps/prepopulate/app_scaffold_data.py
index fc6364d627..013d4b54ec 100644
--- a/apps/prepopulate/app_scaffold_data.py
+++ b/apps/prepopulate/app_scaffold_data.py
@@ -10,8 +10,9 @@
 
 import superdesk
 import logging
+
+from superdesk.resource_fields import ID_FIELD, VERSION
 from superdesk import get_resource_service
-from flask import current_app as app
 from apps.archive.archive import SOURCE as ARCHIVE
 from apps.archive.common import generate_unique_id_and_name, remove_unwanted, insert_into_versions
 from superdesk.metadata.item import GUID_TAG, FAMILY_ID, ITEM_STATE, CONTENT_STATE
@@ -68,11 +69,11 @@ def ingest_items_for(self, desk, no_of_stories, skip_index):
             for item in items:
                 dest_doc = dict(item)
                 new_id = generate_guid(type=GUID_TAG)
-                dest_doc[app.config["ID_FIELD"]] = new_id
+                dest_doc[ID_FIELD] = new_id
                 dest_doc["guid"] = new_id
                 generate_unique_id_and_name(dest_doc)
 
-                dest_doc[app.config["VERSION"]] = 1
+                dest_doc[VERSION] = 1
                 dest_doc[ITEM_STATE] = CONTENT_STATE.FETCHED
                 user_id = desk.get("members", [{"user": None}])[0].get("user")
                 dest_doc["original_creator"] = user_id
@@ -81,7 +82,7 @@ def ingest_items_for(self, desk, no_of_stories, skip_index):
                 from apps.tasks import send_to
 
                 send_to(dest_doc, desk_id=desk_id, stage_id=stage_id, user_id=user_id)
-                dest_doc[app.config["VERSION"]] = 1  # Above step increments the version and needs to reset
+                dest_doc[VERSION] = 1  # Above step increments the version and needs to reset
                 dest_doc[FAMILY_ID] = item["_id"]
 
                 remove_unwanted(dest_doc)
@@ -89,7 +90,7 @@ def ingest_items_for(self, desk, no_of_stories, skip_index):
 
             get_resource_service(ARCHIVE).post(archive_items)
             for item in archive_items:
-                insert_into_versions(id_=item[app.config["ID_FIELD"]])
+                insert_into_versions(id_=item[ID_FIELD])
 
 
 superdesk.command("app:scaffold_data", AppScaffoldDataCommand())
diff --git a/apps/products/service.py b/apps/products/service.py
index 2985d49a70..04f4242dcc 100644
--- a/apps/products/service.py
+++ b/apps/products/service.py
@@ -8,9 +8,9 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.services import CacheableService
-from eve.utils import config
 from superdesk.errors import SuperdeskApiError
 from superdesk.metadata.utils import ProductTypes
 from flask_babel import _
@@ -41,7 +41,7 @@ def _validate_product_type(self, updates, original):
         if updates.get("product_type", "both") != original.get("product_type", "both"):
             if updates.get("product_type") == ProductTypes.DIRECT.value:
                 names = get_resource_service("subscribers").get_subscriber_names(
-                    {"api_products": original.get(config.ID_FIELD)}
+                    {"api_products": original.get(ID_FIELD)}
                 )
                 if names:
                     raise SuperdeskApiError.badRequestError(
@@ -50,9 +50,7 @@ def _validate_product_type(self, updates, original):
                         )
                     )
             elif updates.get("product_type") == ProductTypes.API.value:
-                names = get_resource_service("subscribers").get_subscriber_names(
-                    {"products": original.get(config.ID_FIELD)}
-                )
+                names = get_resource_service("subscribers").get_subscriber_names({"products": original.get(ID_FIELD)})
                 if names:
                     raise SuperdeskApiError.badRequestError(
                         message=_("Product is used for direct publishing for the subscriber(s): {subscribers}").format(
diff --git a/apps/publish/content/common.py b/apps/publish/content/common.py
index 1139a619a9..72dcb48b7f 100644
--- a/apps/publish/content/common.py
+++ b/apps/publish/content/common.py
@@ -12,10 +12,11 @@
 import superdesk
 import superdesk.signals as signals
 
-from copy import copy
-from copy import deepcopy
-from flask import current_app as app
+from copy import copy, deepcopy
 
+from superdesk.core import get_app_config, get_current_app, json
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED, VERSION
+from superdesk.flask import request
 from superdesk import get_resource_service
 from apps.content import push_content_notification
 from apps.content_types.content_types import DEFAULT_SCHEMA
@@ -42,7 +43,6 @@
 from superdesk.validation import ValidationError
 from superdesk.media.image import get_metadata_from_item, write_metadata
 
-from eve.utils import config
 from eve.versioning import resolve_document_version
 
 from apps.archive.archive import ArchiveResource, SOURCE as ARCHIVE
@@ -75,7 +75,6 @@
 from apps.item_lock.components.item_lock import set_unlock_updates
 
 from flask_babel import _
-from flask import request, json
 
 
 logger = logging.getLogger(__name__)
@@ -147,7 +146,7 @@ def on_update(self, updates, original):
         self._set_updates(
             original,
             updates,
-            updates.get(config.LAST_UPDATED, utcnow()),
+            updates.get(LAST_UPDATED, utcnow()),
             preserve_state=original.get("state") in (CONTENT_STATE.SCHEDULED,) and "pubstatus" not in updates,
         )
         convert_task_attributes_to_objectId(updates)  # ???
@@ -157,7 +156,7 @@ def on_update(self, updates, original):
         update_refs(updates, original)
 
     def on_updated(self, updates, original):
-        original = super().find_one(req=None, _id=original[config.ID_FIELD])
+        original = super().find_one(req=None, _id=original[ID_FIELD])
         updates.update(original)
 
         if updates[ITEM_OPERATION] not in {ITEM_KILL, ITEM_TAKEDOWN} and original.get(ITEM_TYPE) in [
@@ -172,7 +171,7 @@ def on_updated(self, updates, original):
         CropService().update_media_references(updates, original, True)
         signals.item_published.send(self, item=original, after_scheduled=False)
 
-        packages = self.package_service.get_packages(original[config.ID_FIELD])
+        packages = self.package_service.get_packages(original[ID_FIELD])
         if packages and packages.count() > 0:
             archive_correct = get_resource_service("archive_correct")
             processed_packages = []
@@ -181,23 +180,23 @@ def on_updated(self, updates, original):
                 if (
                     package[ITEM_STATE] in [CONTENT_STATE.PUBLISHED, CONTENT_STATE.CORRECTED]
                     and package.get(PACKAGE_TYPE, "") == ""
-                    and str(package[config.ID_FIELD]) not in processed_packages
+                    and str(package[ID_FIELD]) not in processed_packages
                 ):
                     original_updates["groups"] = package["groups"]
 
                     if updates.get("headline"):
                         self.package_service.update_field_in_package(
-                            original_updates, original[config.ID_FIELD], "headline", updates.get("headline")
+                            original_updates, original[ID_FIELD], "headline", updates.get("headline")
                         )
 
                     if updates.get("slugline"):
                         self.package_service.update_field_in_package(
-                            original_updates, original[config.ID_FIELD], "slugline", updates.get("slugline")
+                            original_updates, original[ID_FIELD], "slugline", updates.get("slugline")
                         )
 
-                    archive_correct.patch(id=package[config.ID_FIELD], updates=original_updates)
-                    insert_into_versions(id_=package[config.ID_FIELD])
-                    processed_packages.append(package[config.ID_FIELD])
+                    archive_correct.patch(id=package[ID_FIELD], updates=original_updates)
+                    insert_into_versions(id_=package[ID_FIELD])
+                    processed_packages.append(package[ID_FIELD])
 
     def update(self, id, updates, original):
         """
@@ -229,7 +228,7 @@ def update(self, id, updates, original):
 
                 signals.item_publish.send(self, item=updated, updates=updates)
                 self._update_archive(original, updates, should_insert_into_versions=auto_publish)
-                self.update_published_collection(published_item_id=original[config.ID_FIELD], updated=updated)
+                self.update_published_collection(published_item_id=original[ID_FIELD], updated=updated)
 
             from apps.publish.enqueue import enqueue_published
 
@@ -240,7 +239,7 @@ def update(self, id, updates, original):
                 item=str(id),
                 unique_name=original["unique_name"],
                 desk=str(original.get("task", {}).get("desk", "")),
-                user=str(user.get(config.ID_FIELD, "")),
+                user=str(user.get(ID_FIELD, "")),
             )
 
             if updates.get("previous_marked_user") and not updates.get("marked_for_user"):
@@ -353,7 +352,7 @@ def _raise_if_unpublished_related_items(self, original):
             return
 
         if (
-            config.PUBLISH_ASSOCIATED_ITEMS
+            get_app_config("PUBLISH_ASSOCIATED_ITEMS")
             or not original.get(ASSOCIATIONS)
             or self.publish_type not in [ITEM_PUBLISH, ITEM_CORRECT]
         ):
@@ -421,7 +420,7 @@ def _process_publish_updates(self, original, updates):
             updates["source"] = (
                 desk["source"]
                 if desk and desk.get("source", "")
-                else app.settings["DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES"]
+                else get_app_config("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
             )
         updates["pubstatus"] = PUB_STATUS.CANCELED if self.publish_type == ITEM_KILL else PUB_STATUS.USABLE
         self._set_item_expiry(updates, original)
@@ -441,8 +440,8 @@ def _set_item_expiry(self, updates, original):
         elif EMBARGO in original or PUBLISH_SCHEDULE in original:
             offset = get_utc_schedule(original, PUBLISH_SCHEDULE) or get_utc_schedule(original, EMBARGO)
 
-        if app.settings.get("PUBLISHED_CONTENT_EXPIRY_MINUTES"):
-            updates["expiry"] = get_expiry_date(app.settings["PUBLISHED_CONTENT_EXPIRY_MINUTES"], offset=offset)
+        if get_app_config("PUBLISHED_CONTENT_EXPIRY_MINUTES"):
+            updates["expiry"] = get_expiry_date(get_app_config("PUBLISHED_CONTENT_EXPIRY_MINUTES"), offset=offset)
         else:
             updates["expiry"] = get_expiry(desk_id, stage_id, offset=offset)
 
@@ -490,18 +489,18 @@ def _publish_package_items(self, package, updates):
                         )
                     else:
                         # publish the item
-                        package_item[PUBLISHED_IN_PACKAGE] = package[config.ID_FIELD]
-                        archive_publish.patch(id=package_item.pop(config.ID_FIELD), updates=package_item)
+                        package_item[PUBLISHED_IN_PACKAGE] = package[ID_FIELD]
+                        archive_publish.patch(id=package_item.pop(ID_FIELD), updates=package_item)
 
                     insert_into_versions(id_=guid)
 
                 elif guid in added_items:
                     linked_in_packages = package_item.get(LINKED_IN_PACKAGES, [])
-                    if package[config.ID_FIELD] not in (lp.get(PACKAGE) for lp in linked_in_packages):
-                        linked_in_packages.append({PACKAGE: package[config.ID_FIELD]})
+                    if package[ID_FIELD] not in (lp.get(PACKAGE) for lp in linked_in_packages):
+                        linked_in_packages.append({PACKAGE: package[ID_FIELD]})
                         super().system_update(
                             guid,
-                            {LINKED_IN_PACKAGES: linked_in_packages, PUBLISHED_IN_PACKAGE: package[config.ID_FIELD]},
+                            {LINKED_IN_PACKAGES: linked_in_packages, PUBLISHED_IN_PACKAGE: package[ID_FIELD]},
                             package_item,
                         )
 
@@ -510,24 +509,24 @@ def _publish_package_items(self, package, updates):
                     linked_in_packages = [
                         linked
                         for linked in package_item.get(LINKED_IN_PACKAGES, [])
-                        if linked.get(PACKAGE) != package.get(config.ID_FIELD)
+                        if linked.get(PACKAGE) != package.get(ID_FIELD)
                     ]
                     super().system_update(guid, {LINKED_IN_PACKAGES: linked_in_packages}, package_item)
 
                 package_item = super().find_one(req=None, _id=guid)
 
                 self.package_service.update_field_in_package(
-                    updates, package_item[config.ID_FIELD], config.VERSION, package_item[config.VERSION]
+                    updates, package_item[ID_FIELD], VERSION, package_item[VERSION]
                 )
 
                 if package_item.get(ASSOCIATIONS):
                     self.package_service.update_field_in_package(
-                        updates, package_item[config.ID_FIELD], ASSOCIATIONS, package_item[ASSOCIATIONS]
+                        updates, package_item[ID_FIELD], ASSOCIATIONS, package_item[ASSOCIATIONS]
                     )
 
         updated = deepcopy(package)
         updated.update(updates)
-        self.update_published_collection(published_item_id=package[config.ID_FIELD], updated=updated)
+        self.update_published_collection(published_item_id=package[ID_FIELD], updated=updated)
 
     def update_published_collection(self, published_item_id, updated=None):
         """Updates the published collection with the published item.
@@ -564,14 +563,14 @@ def _set_updates(self, original, updates, last_updated, preserve_state=False):
         """
         if not preserve_state:
             self.set_state(original, updates)
-        updates.setdefault(config.LAST_UPDATED, last_updated)
+        updates.setdefault(LAST_UPDATED, last_updated)
 
-        if original[config.VERSION] == updates.get(config.VERSION, original[config.VERSION]):
+        if original[VERSION] == updates.get(VERSION, original[VERSION]):
             resolve_document_version(document=updates, resource=ARCHIVE, method="PATCH", latest_doc=original)
 
         user = get_user()
-        if user and user.get(config.ID_FIELD):
-            updates["version_creator"] = user[config.ID_FIELD]
+        if user and user.get(ID_FIELD):
+            updates["version_creator"] = user[ID_FIELD]
 
     def _update_archive(self, original, updates, versioned_doc=None, should_insert_into_versions=True):
         """Updates the articles into archive collection and inserts the latest into archive_versions.
@@ -581,16 +580,18 @@ def _update_archive(self, original, updates, versioned_doc=None, should_insert_i
         :param: versioned_doc: doc which can be inserted into archive_versions
         :param: should_insert_into_versions if True inserts the latest document into versions collection
         """
-        self.backend.update(self.datasource, original[config.ID_FIELD], updates, original)
+        self.backend.update(self.datasource, original[ID_FIELD], updates, original)
+
+        app = get_current_app().as_any()
         app.on_archive_item_updated(updates, original, updates[ITEM_OPERATION])
 
         if should_insert_into_versions:
             if versioned_doc is None:
-                insert_into_versions(id_=original[config.ID_FIELD])
+                insert_into_versions(id_=original[ID_FIELD])
             else:
                 insert_into_versions(doc=versioned_doc)
 
-        get_component(ItemAutosave).clear(original[config.ID_FIELD])
+        get_component(ItemAutosave).clear(original[ID_FIELD])
 
     def _get_changed_items(self, existing_items, updates):
         """Returns the added and removed items from existing_items.
@@ -637,8 +638,8 @@ def _validate_associated_items(self, original_item, updates=None, validation_err
 
         for item in items:
             orig = None
-            if isinstance(item, dict) and item.get(config.ID_FIELD):
-                orig = super().find_one(req=None, _id=item[config.ID_FIELD]) or {}
+            if isinstance(item, dict) and item.get(ID_FIELD):
+                orig = super().find_one(req=None, _id=item[ID_FIELD]) or {}
                 doc = copy(orig)
                 doc.update(item)
                 try:
@@ -689,7 +690,7 @@ def _validate_associated_items(self, original_item, updates=None, validation_err
                     ]
                     validation_errors.extend(pre_errors)
 
-            if config.PUBLISH_ASSOCIATED_ITEMS:
+            if get_app_config("PUBLISH_ASSOCIATED_ITEMS"):
                 # check the locks on the items
                 if doc.get("lock_user"):
                     if original_item["lock_user"] != doc["lock_user"]:
@@ -717,7 +718,7 @@ def _import_into_legal_archive(self, doc):
         """
 
         if doc.get(ITEM_STATE) != CONTENT_STATE.SCHEDULED:
-            kwargs = {"item_id": doc.get(config.ID_FIELD)}
+            kwargs = {"item_id": doc.get(ID_FIELD)}
             # countdown=3 is for elasticsearch to be refreshed with archive and published changes
             import_into_legal_archive.apply_async(countdown=3, kwargs=kwargs)  # @UndefinedVariable
 
@@ -735,14 +736,14 @@ def _refresh_associated_items(self, original, skip_related=False):
         """
         associations = original.get(ASSOCIATIONS) or {}
         for name, item in associations.items():
-            if isinstance(item, dict) and item.get(config.ID_FIELD) and (not skip_related or len(item.keys()) > 2):
+            if isinstance(item, dict) and item.get(ID_FIELD) and (not skip_related or len(item.keys()) > 2):
                 keys = [key for key in DEFAULT_SCHEMA.keys() if key not in PRESERVED_FIELDS]
 
-                if app.settings.get("COPY_METADATA_FROM_PARENT") and item.get(ITEM_TYPE) in MEDIA_TYPES:
+                if get_app_config("COPY_METADATA_FROM_PARENT") and item.get(ITEM_TYPE) in MEDIA_TYPES:
                     updates = original
                     keys = FIELDS_TO_COPY_FOR_ASSOCIATED_ITEM
                 else:
-                    updates = super().find_one(req=None, _id=item[config.ID_FIELD]) or {}
+                    updates = super().find_one(req=None, _id=item[ID_FIELD]) or {}
 
                 try:
                     is_db_item_bigger_ver = updates["_current_version"] > item["_current_version"]
@@ -751,7 +752,7 @@ def _refresh_associated_items(self, original, skip_related=False):
                 else:
                     # if copying from parent the don't keep the existing
                     # otherwise check the value is_db_item_bigger_ver
-                    keep_existing = not app.settings.get("COPY_METADATA_FROM_PARENT") and not is_db_item_bigger_ver
+                    keep_existing = not get_app_config("COPY_METADATA_FROM_PARENT") and not is_db_item_bigger_ver
                     update_item_data(item, updates, keys, keep_existing=keep_existing)
 
     def _fix_related_references(self, updated, updates):
@@ -792,8 +793,8 @@ def _publish_associated_items(self, original, updates=None):
         for associations_key, associated_item in associations.items():
             if associated_item is None:
                 continue
-            if isinstance(associated_item, dict) and associated_item.get(config.ID_FIELD):
-                if not config.PUBLISH_ASSOCIATED_ITEMS or not publish_service:
+            if isinstance(associated_item, dict) and associated_item.get(ID_FIELD):
+                if not get_app_config("PUBLISH_ASSOCIATED_ITEMS") or not publish_service:
                     if original.get(ASSOCIATIONS, {}).get(associations_key):
                         # Not allowed to publish
                         original[ASSOCIATIONS][associations_key]["state"] = self.published_state
@@ -810,7 +811,7 @@ def _publish_associated_items(self, original, updates=None):
 
                 if associated_item.get("state") == CONTENT_STATE.UNPUBLISHED:
                     # get the original associated item from archive
-                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[config.ID_FIELD])
+                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[ID_FIELD])
 
                     orig_associated_item["state"] = updates.get("state", self.published_state)
                     orig_associated_item["operation"] = self.publish_type
@@ -819,7 +820,7 @@ def _publish_associated_items(self, original, updates=None):
                     self._inherit_publish_schedule(original, updates, orig_associated_item)
 
                     get_resource_service("archive_publish").patch(
-                        id=orig_associated_item.pop(config.ID_FIELD), updates=orig_associated_item
+                        id=orig_associated_item.pop(ID_FIELD), updates=orig_associated_item
                     )
                     continue
 
@@ -828,7 +829,7 @@ def _publish_associated_items(self, original, updates=None):
                     remove_unwanted(associated_item)
 
                     # get the original associated item from archive
-                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[config.ID_FIELD])
+                    orig_associated_item = archive_service.find_one(req=None, _id=associated_item[ID_FIELD])
 
                     # check if the original associated item exists in archive
                     if not orig_associated_item:
@@ -859,7 +860,7 @@ def _publish_associated_items(self, original, updates=None):
 
                     associated_item_updates = associated_item.copy()
                     get_resource_service("archive_publish").patch(
-                        id=associated_item[config.ID_FIELD], updates=associated_item_updates
+                        id=associated_item[ID_FIELD], updates=associated_item_updates
                     )
                     sync_associated_item_changes(associated_item, associated_item_updates)
                     associated_item["state"] = updates.get("state", self.published_state)
@@ -880,14 +881,14 @@ def _publish_associated_items(self, original, updates=None):
                         associated_item.get("task", {}).pop("stage", None)
                         remove_unwanted(associated_item)
                         associated_item_updates = associated_item.copy()
-                        publish_service.patch(id=associated_item[config.ID_FIELD], updates=associated_item_updates)
+                        publish_service.patch(id=associated_item[ID_FIELD], updates=associated_item_updates)
                         sync_associated_item_changes(associated_item, associated_item_updates)
                         continue
 
                     if association_updates.get("state") not in PUBLISH_STATES:
                         # There's an update to the published associated item
                         remove_unwanted(association_updates)
-                        publish_service.patch(id=associated_item[config.ID_FIELD], updates=association_updates)
+                        publish_service.patch(id=associated_item[ID_FIELD], updates=association_updates)
 
             # When there is an associated item which is published, Inserts the latest version of that associated item into archive_versions.
             insert_into_versions(doc=associated_item)
@@ -898,11 +899,11 @@ def _mark_media_item_as_used(self, updates, original):
             return
 
         for item_name, item_obj in updates.get(ASSOCIATIONS).items():
-            if not item_obj or config.ID_FIELD not in item_obj:
+            if not item_obj or ID_FIELD not in item_obj:
                 continue
-            item_id = item_obj[config.ID_FIELD]
+            item_id = item_obj[ID_FIELD]
             media_item = self.find_one(req=None, _id=item_id)
-            if app.settings.get("COPY_METADATA_FROM_PARENT") and item_obj.get(ITEM_TYPE) in MEDIA_TYPES:
+            if get_app_config("COPY_METADATA_FROM_PARENT") and item_obj.get(ITEM_TYPE) in MEDIA_TYPES:
                 stored_item = (original.get(ASSOCIATIONS) or {}).get(item_name) or item_obj
             else:
                 stored_item = media_item
@@ -920,7 +921,7 @@ def _inherit_publish_schedule(self, original, updates, associated_item):
 
     def _update_picture_metadata(self, updates, original, updated):
         renditions = updated.get("renditions") or {}
-        mapping = app.config.get("PHOTO_METADATA_MAPPING")
+        mapping = get_app_config("PHOTO_METADATA_MAPPING")
         if not mapping or not renditions:
             return
         try:
@@ -930,6 +931,7 @@ def _update_picture_metadata(self, updates, original, updated):
         if not media_id:
             return
 
+        app = get_current_app()
         picture = app.media.get(media_id)
         binary = picture.read()
         metadata = get_metadata_from_item(updated, mapping)
diff --git a/apps/publish/content/correct.py b/apps/publish/content/correct.py
index 7c482a1994..f8a0052f03 100644
--- a/apps/publish/content/correct.py
+++ b/apps/publish/content/correct.py
@@ -8,6 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.core import get_current_app, get_app_config
 from superdesk import get_resource_service, editor_utils
 from superdesk.media.crop import CropService
 from superdesk.metadata.item import ITEM_STATE, EMBARGO, SCHEDULE_SETTINGS
@@ -20,7 +21,6 @@
 from .common import BasePublishService, BasePublishResource, ITEM_CORRECT
 from superdesk.emails import send_translation_changed
 from superdesk.activity import add_activity
-from flask import g, current_app as app
 
 
 def send_translation_notifications(original):
@@ -50,7 +50,8 @@ def send_translation_notifications(original):
     if len(recipients) == 0:
         return
 
-    username = g.user.get("display_name") or g.user.get("username")
+    user = get_current_app().get_current_user_dict() or {}
+    username = user.get("display_name") or user.get("username")
     send_translation_changed(username, changed_article, recipients)
 
 
@@ -80,7 +81,7 @@ def set_state(self, original, updates):
             super().set_state(original, updates)
 
     def change_being_corrected_to_published(self, updates, original):
-        if app.config.get("CORRECTIONS_WORKFLOW") and original.get("state") == "correction":
+        if get_app_config("CORRECTIONS_WORKFLOW") and original.get("state") == "correction":
             publish_service = get_resource_service("published")
             being_corrected_article = publish_service.find_one(
                 req=None, guid=original.get("guid"), state="being_corrected"
diff --git a/apps/publish/content/kill.py b/apps/publish/content/kill.py
index b72d357782..914d5dbc50 100644
--- a/apps/publish/content/kill.py
+++ b/apps/publish/content/kill.py
@@ -10,12 +10,13 @@
 
 import json
 import html
-from flask import render_template
 
 from eve.versioning import resolve_document_version
 from apps.templates.content_templates import render_content_template_by_name
 from .common import BasePublishService, BasePublishResource, ITEM_KILL
-from eve.utils import config
+
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED
+from superdesk.flask import render_template
 from superdesk.metadata.item import CONTENT_STATE, ITEM_STATE, PUB_STATUS, EMBARGO, SCHEDULE_SETTINGS, PUBLISH_SCHEDULE
 from superdesk import get_resource_service
 from superdesk.utc import utcnow
@@ -63,7 +64,7 @@ def on_update(self, updates, original):
         # check if we are trying to kill an item that is contained in package
         # and the package itself is not killed.
 
-        packages = self.package_service.get_packages(original[config.ID_FIELD])
+        packages = self.package_service.get_packages(original[ID_FIELD])
         if self.package_workflow == PACKAGE_WORKFLOW.RAISE:
             if packages and packages.count() > 0:
                 for package in packages:
@@ -145,9 +146,9 @@ def kill_item(self, updates, original):
         # resolve the document version
         resolve_document_version(document=updates_data, resource=ARCHIVE, method="PATCH", latest_doc=original)
         # kill the item
-        self.patch(original.get(config.ID_FIELD), updates_data)
+        self.patch(original.get(ID_FIELD), updates_data)
         # insert into versions
-        insert_into_versions(id_=original[config.ID_FIELD])
+        insert_into_versions(id_=original[ID_FIELD])
 
     def apply_kill_template(self, item):
         # apply the kill template
@@ -167,14 +168,14 @@ def apply_kill_override(self, item, updates):
         try:
             if item.get("_type") == "archive":
                 # attempt to find the published item as this will have an accurate time of publication
-                published_item = get_resource_service(PUBLISHED).get_last_published_version(item.get(config.ID_FIELD))
+                published_item = get_resource_service(PUBLISHED).get_last_published_version(item.get(ID_FIELD))
                 versioncreated = (
                     published_item.get("versioncreated")
                     if published_item
-                    else item.get("versioncreated", item.get(config.LAST_UPDATED))
+                    else item.get("versioncreated", item.get(LAST_UPDATED))
                 )
             else:
-                versioncreated = item.get("versioncreated", item.get(config.LAST_UPDATED))
+                versioncreated = item.get("versioncreated", item.get(LAST_UPDATED))
             desk_name = get_resource_service("desks").get_desk_name(item.get("task", {}).get("desk"))
             city = get_dateline_city(item.get("dateline"))
             kill_header = json.loads(
diff --git a/apps/publish/content/publish.py b/apps/publish/content/publish.py
index a6752d8d61..bd53a93a78 100644
--- a/apps/publish/content/publish.py
+++ b/apps/publish/content/publish.py
@@ -9,6 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
+
+from superdesk.flask import request
 from superdesk.errors import SuperdeskApiError
 from superdesk.metadata.item import (
     CONTENT_TYPE,
@@ -27,7 +29,6 @@
 
 from .common import BasePublishService, BasePublishResource, ITEM_PUBLISH
 from flask_babel import _
-from flask import request
 
 logger = logging.getLogger(__name__)
 
diff --git a/apps/publish/content/published_package_items.py b/apps/publish/content/published_package_items.py
index f089990faf..ed8830f24d 100644
--- a/apps/publish/content/published_package_items.py
+++ b/apps/publish/content/published_package_items.py
@@ -8,13 +8,13 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.resource_fields import ID_FIELD
 from apps.archive.common import ARCHIVE
 from superdesk import get_resource_service
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, PUBLISH_STATES, ITEM_STATE
 from superdesk.resource import Resource
 
 from apps.packages.package_service import PackageService, create_root_group, get_item_ref
-from eve.utils import config
 from superdesk.validation import ValidationError
 from superdesk.errors import SuperdeskApiError
 from superdesk.services import BaseService
@@ -61,14 +61,14 @@ def create(self, docs, **kwargs):
                     raise SuperdeskApiError.badRequestError(
                         _("Circular reference in item {item_id}").format(item_id=new_item["item_id"])
                     )
-                items[item[config.ID_FIELD]] = item
+                items[item[ID_FIELD]] = item
 
-            updates = {key: original[key] for key in [config.ID_FIELD, PACKAGE_TYPE, GROUPS] if key in original}
+            updates = {key: original[key] for key in [ID_FIELD, PACKAGE_TYPE, GROUPS] if key in original}
             create_root_group([updates])
             items_refs = []
             for new_item in doc["new_items"]:
                 items_refs.append(self._set_item_assoc(updates, new_item, items[new_item["item_id"]]))
-            get_resource_service(ARCHIVE).system_update(original[config.ID_FIELD], updates, original)
+            get_resource_service(ARCHIVE).system_update(original[ID_FIELD], updates, original)
             for item_ref in items_refs:
                 self.package_service.update_link(updates, item_ref)
 
@@ -76,7 +76,7 @@ def create(self, docs, **kwargs):
             if any(items_published):
                 get_resource_service("archive_correct").patch(id=doc["package_id"], updates=updates)
 
-            ids.append(original[config.ID_FIELD])
+            ids.append(original[ID_FIELD])
         return ids
 
     def _set_item_assoc(self, package, new_item, item_doc):
diff --git a/apps/publish/content/resend.py b/apps/publish/content/resend.py
index d42d6bab04..5bddfb9200 100644
--- a/apps/publish/content/resend.py
+++ b/apps/publish/content/resend.py
@@ -13,8 +13,11 @@
 from apps.archive.archive import ArchiveResource, SOURCE as ARCHIVE, remove_is_queued
 from apps.publish.content.utils import filter_digital
 from superdesk.metadata.utils import item_url
-from flask import request, current_app as app
-from superdesk import get_resource_service, Service, config, signals
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, VERSION
+from superdesk.flask import request
+from superdesk import get_resource_service, Service, signals
 from superdesk.errors import SuperdeskApiError
 from superdesk.metadata.item import CONTENT_TYPE, ITEM_TYPE, ITEM_STATE, CONTENT_STATE
 from superdesk.publish import SUBSCRIBER_TYPES
@@ -49,6 +52,8 @@ def create(self, docs, **kwargs):
         remove_is_queued(article)
         signals.item_resend.send(self, item=article)
         get_enqueue_service(article.get(ITEM_OPERATION)).resend(article, subscribers)
+
+        app = get_current_app().as_any()
         app.on_archive_item_updated({"subscribers": doc.get("subscribers")}, article, ITEM_RESEND)
         signals.item_resent.send(self, item=article)
         return [article_id]
@@ -57,7 +62,7 @@ def _validate_subscribers(self, subscriber_ids, article):
         if not subscriber_ids:
             raise SuperdeskApiError.badRequestError(message=_("No subscribers selected!"))
 
-        query = {"$and": [{config.ID_FIELD: {"$in": list(subscriber_ids)}}, {"is_active": True}]}
+        query = {"$and": [{ID_FIELD: {"$in": list(subscriber_ids)}}, {"is_active": True}]}
         subscribers = list(get_resource_service("subscribers").get(req=None, lookup=query))
 
         if len(subscribers) == 0:
@@ -73,7 +78,7 @@ def _validate_subscribers(self, subscriber_ids, article):
     def _validate_article(self, article_id, article_version):
         article = get_resource_service(ARCHIVE).find_one(req=None, _id=article_id)
 
-        if app.config.get("CORRECTIONS_WORKFLOW") and article.get(ITEM_STATE) == "correction":
+        if get_app_config("CORRECTIONS_WORKFLOW") and article.get(ITEM_STATE) == "correction":
             publish_service = get_resource_service("published")
             article = publish_service.find_one(req=None, guid=article.get("guid"), state="being_corrected")
 
@@ -93,9 +98,9 @@ def _validate_article(self, article_id, article_version):
                 message=_("Only published, corrected or killed stories can be resent!")
             )
 
-        if article[config.VERSION] != article_version:
+        if article[VERSION] != article_version:
             raise SuperdeskApiError.badRequestError(
-                message=_("Please use the newest version {version} to resend!").format(version=article[config.VERSION])
+                message=_("Please use the newest version {version} to resend!").format(version=article[VERSION])
             )
 
         if article.get("rewritten_by"):
diff --git a/apps/publish/content/tests.py b/apps/publish/content/tests.py
index e1c455e164..282d311bed 100644
--- a/apps/publish/content/tests.py
+++ b/apps/publish/content/tests.py
@@ -17,9 +17,10 @@
 from unittest.mock import MagicMock
 
 from bson.objectid import ObjectId
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from eve.versioning import versioned_id_field
 
+from superdesk.resource_fields import ID_FIELD, VERSION
 from apps.archive.archive import SOURCE as ARCHIVE
 from apps.packages.package_service import PackageService
 from apps.publish.content.common import BasePublishService
@@ -160,7 +161,7 @@ def init_data(self):
                 "_id": "1",
                 ITEM_TYPE: CONTENT_TYPE.TEXT,
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "body_html": "Test body",
                 "anpa_category": [{"qcode": "A", "name": "Sport"}],
                 "urgency": 4,
@@ -183,7 +184,7 @@ def init_data(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a974-xy4532fe33f9",
                 "_id": "2",
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "body_html": "Test body of the second article",
                 "slugline": "story slugline",
                 "urgency": 4,
@@ -208,7 +209,7 @@ def init_data(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4fa",
                 "_id": "3",
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "body_html": "Test body",
                 "slugline": "story slugline",
                 "urgency": 4,
@@ -231,7 +232,7 @@ def init_data(self):
                 "guid": "8",
                 "_id": "8",
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "target_regions": [{"qcode": "NSW", "name": "New South Wales", "allow": True}],
                 "body_html": "Take-1 body",
                 "urgency": 4,
@@ -255,7 +256,7 @@ def init_data(self):
                 "_id": "9",
                 "urgency": 3,
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "headline": "creator",
                 "task": {"user": "1", "desk": "123456789ABCDEF123456789"},
                 ITEM_STATE: CONTENT_STATE.FETCHED,
@@ -264,7 +265,7 @@ def init_data(self):
                 "guid": "tag:localhost:2015:69b961ab-a7b402fed4fb",
                 "_id": "test_item_9",
                 "last_version": 3,
-                config.VERSION: 4,
+                VERSION: 4,
                 "body_html": "Student Crime. Police Missing.",
                 "urgency": 4,
                 "headline": "Police Missing",
@@ -285,7 +286,7 @@ def init_data(self):
             {
                 "guid": "tag:localhost:10:10:10:2015:69b961ab-2816-4b8a-a584-a7b402fed4fc",
                 "_id": "100",
-                config.VERSION: 3,
+                VERSION: 3,
                 "task": {"user": "1", "desk": "123456789ABCDEF123456789"},
                 ITEM_TYPE: CONTENT_TYPE.COMPOSITE,
                 "groups": [
@@ -344,7 +345,7 @@ def _init_article_versions(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9",
                 version_id: "1",
                 ITEM_TYPE: CONTENT_TYPE.TEXT,
-                config.VERSION: 1,
+                VERSION: 1,
                 "urgency": 4,
                 "pubstatus": "usable",
                 "firstcreated": utcnow(),
@@ -360,7 +361,7 @@ def _init_article_versions(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9",
                 version_id: "1",
                 ITEM_TYPE: CONTENT_TYPE.TEXT,
-                config.VERSION: 2,
+                VERSION: 2,
                 "urgency": 4,
                 "headline": "Two students missing",
                 "pubstatus": "usable",
@@ -377,7 +378,7 @@ def _init_article_versions(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9",
                 version_id: "1",
                 ITEM_TYPE: CONTENT_TYPE.TEXT,
-                config.VERSION: 3,
+                VERSION: 3,
                 "urgency": 4,
                 "headline": "Two students missing",
                 "pubstatus": "usable",
@@ -395,7 +396,7 @@ def _init_article_versions(self):
                 "guid": "tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9",
                 version_id: "1",
                 ITEM_TYPE: CONTENT_TYPE.TEXT,
-                config.VERSION: 4,
+                VERSION: 4,
                 "body_html": "Test body",
                 "urgency": 4,
                 "headline": "Two students missing",
@@ -448,7 +449,7 @@ def test_publish(self):
         get_resource_service(ARCHIVE_PUBLISH).patch(id=doc["_id"], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED})
         published_doc = get_resource_service(ARCHIVE).find_one(req=None, _id=doc["_id"])
         self.assertIsNotNone(published_doc)
-        self.assertEqual(published_doc[config.VERSION], doc[config.VERSION] + 1)
+        self.assertEqual(published_doc[VERSION], doc[VERSION] + 1)
         self.assertEqual(published_doc[ITEM_STATE], ArchivePublishService().published_state)
 
     def test_versions_across_collections_after_publish(self):
@@ -458,30 +459,28 @@ def test_versions_across_collections_after_publish(self):
         doc = self.articles[3]
         original = doc.copy()
 
-        published_version_number = original[config.VERSION] + 1
+        published_version_number = original[VERSION] + 1
         get_resource_service(ARCHIVE_PUBLISH).patch(
-            id=doc[config.ID_FIELD],
-            updates={ITEM_STATE: CONTENT_STATE.PUBLISHED, config.VERSION: published_version_number},
+            id=doc[ID_FIELD],
+            updates={ITEM_STATE: CONTENT_STATE.PUBLISHED, VERSION: published_version_number},
         )
 
-        article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[config.ID_FIELD])
+        article_in_production = get_resource_service(ARCHIVE).find_one(req=None, _id=original[ID_FIELD])
         self.assertIsNotNone(article_in_production)
         self.assertEqual(article_in_production[ITEM_STATE], CONTENT_STATE.PUBLISHED)
-        self.assertEqual(article_in_production[config.VERSION], published_version_number)
+        self.assertEqual(article_in_production[VERSION], published_version_number)
 
         enqueue_published()
 
-        lookup = {"item_id": original[config.ID_FIELD], "item_version": published_version_number}
+        lookup = {"item_id": original[ID_FIELD], "item_version": published_version_number}
         queue_items = list(get_resource_service(PUBLISH_QUEUE).get(req=None, lookup=lookup))
-        assert len(queue_items) > 0, "Transmission Details are empty for published item %s" % original[config.ID_FIELD]
+        assert len(queue_items) > 0, "Transmission Details are empty for published item %s" % original[ID_FIELD]
 
-        lookup = {"item_id": original[config.ID_FIELD], config.VERSION: published_version_number}
+        lookup = {"item_id": original[ID_FIELD], VERSION: published_version_number}
         request = ParsedRequest()
         request.args = {"aggregations": 0}
         items_in_published_collection = list(get_resource_service(PUBLISHED).get(req=request, lookup=lookup))
-        assert len(items_in_published_collection) > 0, (
-            "Item not found in published collection %s" % original[config.ID_FIELD]
-        )
+        assert len(items_in_published_collection) > 0, "Item not found in published collection %s" % original[ID_FIELD]
 
     def test_queue_transmission_for_item_scheduled_future(self):
         self._is_publish_queue_empty()
@@ -694,7 +693,7 @@ def test_is_targeted(self):
 
     def test_targeted_for_includes_digital_subscribers(self):
         updates = {"target_regions": [{"qcode": "NSW", "name": "New South Wales", "allow": True}]}
-        doc_id = self.articles[5][config.ID_FIELD]
+        doc_id = self.articles[5][ID_FIELD]
         get_resource_service(ARCHIVE).patch(id=doc_id, updates=updates)
 
         get_resource_service(ARCHIVE_PUBLISH).patch(id=doc_id, updates={ITEM_STATE: CONTENT_STATE.PUBLISHED})
@@ -720,12 +719,10 @@ def get_publish_items(item_id, last_version):
             request.args = {"source": json.dumps(query), "aggregations": 0}
             return self.app.data.find(PUBLISHED, req=request, lookup=None)[0]
 
-        get_resource_service(ARCHIVE).patch(id=self.articles[1][config.ID_FIELD], updates={"publish_schedule": None})
+        get_resource_service(ARCHIVE).patch(id=self.articles[1][ID_FIELD], updates={"publish_schedule": None})
 
-        doc = get_resource_service(ARCHIVE).find_one(req=None, _id=self.articles[1][config.ID_FIELD])
-        get_resource_service(ARCHIVE_PUBLISH).patch(
-            id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED}
-        )
+        doc = get_resource_service(ARCHIVE).find_one(req=None, _id=self.articles[1][ID_FIELD])
+        get_resource_service(ARCHIVE_PUBLISH).patch(id=doc[ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.PUBLISHED})
 
         enqueue_published()
 
@@ -735,12 +732,10 @@ def get_publish_items(item_id, last_version):
         request.args = {"aggregations": 0}
         published_items = self.app.data.find(PUBLISHED, request, None)[0]
         self.assertEqual(1, published_items.count())
-        published_doc = next((item for item in published_items if item.get("item_id") == doc[config.ID_FIELD]), None)
+        published_doc = next((item for item in published_items if item.get("item_id") == doc[ID_FIELD]), None)
         self.assertEqual(published_doc[LAST_PUBLISHED_VERSION], True)
 
-        get_resource_service(ARCHIVE_CORRECT).patch(
-            id=doc[config.ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.CORRECTED}
-        )
+        get_resource_service(ARCHIVE_CORRECT).patch(id=doc[ID_FIELD], updates={ITEM_STATE: CONTENT_STATE.CORRECTED})
 
         enqueue_published()
 
diff --git a/apps/publish/enqueue/__init__.py b/apps/publish/enqueue/__init__.py
index 2f3b81a599..0c7d9dcf81 100644
--- a/apps/publish/enqueue/__init__.py
+++ b/apps/publish/enqueue/__init__.py
@@ -9,12 +9,11 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-import elasticapm
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD, VERSION
 import superdesk
 import superdesk.signals as signals
-
-from flask import current_app as app
-
 from superdesk import get_resource_service
 from superdesk.celery_task_utils import get_lock_id
 from superdesk.lock import lock, unlock
@@ -26,7 +25,7 @@
 from apps.publish.enqueue.enqueue_published import EnqueuePublishedService
 from apps.publish.published_item import PUBLISH_STATE, QUEUE_STATE, PUBLISHED, ERROR_MESSAGE
 from bson.objectid import ObjectId
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from eve.versioning import resolve_document_version
 from superdesk.celery_app import celery
 from superdesk.utc import utcnow
@@ -100,7 +99,7 @@ def enqueue_item(self, published_item):
         """
         Creates the corresponding entries in the publish queue for the given item
         """
-        published_item_id = ObjectId(published_item[config.ID_FIELD])
+        published_item_id = ObjectId(published_item[ID_FIELD])
         published_service = get_resource_service(PUBLISHED)
         archive_service = get_resource_service(ARCHIVE)
         published_update = {QUEUE_STATE: PUBLISH_STATE.IN_PROGRESS, "last_queue_event": utcnow()}
@@ -128,7 +127,7 @@ def enqueue_item(self, published_item):
                     document=item_updates,
                     resource=ARCHIVE,
                     method="PATCH",
-                    latest_doc={config.VERSION: published_item.get(config.VERSION, 1)},
+                    latest_doc={VERSION: published_item.get(VERSION, 1)},
                 )
 
                 # update the archive collection
@@ -136,7 +135,9 @@ def enqueue_item(self, published_item):
                 archive_service.system_update(published_item["item_id"], item_updates, archive_item)
                 # insert into version.
                 insert_into_versions(published_item["item_id"], doc=None)
+
                 # update archive history
+                app = get_current_app().as_any()
                 app.on_archive_item_updated(item_updates, archive_item, ITEM_PUBLISH)
                 # import to legal archive
                 import_into_legal_archive.apply_async(countdown=3, kwargs={"item_id": published_item["item_id"]})
@@ -149,7 +150,7 @@ def enqueue_item(self, published_item):
                     {
                         "versioncreated": versioncreated,
                         ITEM_STATE: CONTENT_STATE.PUBLISHED,
-                        config.VERSION: item_updates[config.VERSION],
+                        VERSION: item_updates[VERSION],
                     }
                 )
                 # send a notification to the clients
diff --git a/apps/publish/enqueue/enqueue_corrected.py b/apps/publish/enqueue/enqueue_corrected.py
index 5fd8b35fb2..1b10d77e2c 100644
--- a/apps/publish/enqueue/enqueue_corrected.py
+++ b/apps/publish/enqueue/enqueue_corrected.py
@@ -10,10 +10,10 @@
 
 import logging
 
+from superdesk.resource_fields import ID_FIELD
 from apps.publish.content.utils import filter_non_digital
 from superdesk import get_resource_service
 from superdesk.metadata.item import CONTENT_STATE
-from eve.utils import config
 from apps.publish.enqueue.enqueue_service import EnqueueService
 
 
@@ -53,7 +53,7 @@ def get_subscribers(self, doc, target_media_type):
             # Step 2
             active_subscribers = list(get_resource_service("subscribers").get_active())
             subscribers_yet_to_receive = [
-                a for a in active_subscribers if not any(a[config.ID_FIELD] == s[config.ID_FIELD] for s in subscribers)
+                a for a in active_subscribers if not any(a[ID_FIELD] == s[ID_FIELD] for s in subscribers)
             ]
 
             if len(subscribers_yet_to_receive) > 0:
diff --git a/apps/publish/enqueue/enqueue_killed.py b/apps/publish/enqueue/enqueue_killed.py
index 8aefa9ca92..e69da677ae 100644
--- a/apps/publish/enqueue/enqueue_killed.py
+++ b/apps/publish/enqueue/enqueue_killed.py
@@ -10,9 +10,8 @@
 
 import logging
 
-from eve.utils import config
-from flask import current_app as app
-
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from apps.archive.common import ITEM_OPERATION
 from apps.publish.content.kill import ITEM_KILL
 from superdesk import get_resource_service
@@ -46,7 +45,7 @@ def get_subscribers(self, doc, target_media_type):
         }
         subscribers, subscriber_codes, associations = self._get_subscribers_for_previously_sent_items(query)
 
-        if not subscribers and app.config.get("UNPUBLISH_TO_MATCHING_SUBSCRIBERS", False):
+        if not subscribers and get_app_config("UNPUBLISH_TO_MATCHING_SUBSCRIBERS", False):
             active_subscribers = get_resource_service("subscribers").get_active()
             subscribers, subscriber_codes = self.filter_subscribers(doc, active_subscribers, target_media_type)
 
@@ -64,12 +63,12 @@ def enqueue_archived_kill_item(self, item, transmission_details):
             for t in transmission_details
             if t.get("destination", {}).get("delivery_type") == "content_api"
         }
-        query = {"$and": [{config.ID_FIELD: {"$in": subscriber_ids}}]}
+        query = {"$and": [{ID_FIELD: {"$in": subscriber_ids}}]}
         subscribers = list(get_resource_service("subscribers").get(req=None, lookup=query))
 
         for subscriber in subscribers:
-            subscriber["api_enabled"] = subscriber.get(config.ID_FIELD) in api_subscribers
+            subscriber["api_enabled"] = subscriber.get(ID_FIELD) in api_subscribers
 
         self.queue_transmission(item, subscribers)
-        logger.info("Queued Transmission for article: {}".format(item[config.ID_FIELD]))
+        logger.info("Queued Transmission for article: {}".format(item[ID_FIELD]))
         self.publish_content_api(item, [subscriber for subscriber in subscribers if subscriber["api_enabled"]])
diff --git a/apps/publish/enqueue/enqueue_published.py b/apps/publish/enqueue/enqueue_published.py
index e63d087322..4eb15f519d 100644
--- a/apps/publish/enqueue/enqueue_published.py
+++ b/apps/publish/enqueue/enqueue_published.py
@@ -11,7 +11,7 @@
 import logging
 import elasticapm
 
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, CONTENT_STATE
 from apps.publish.enqueue.enqueue_service import EnqueueService
@@ -61,8 +61,8 @@ def get_subscribers(self, doc, target_media_type):
         subscribers, codes = self.filter_subscribers(doc, subscribers, target_media_type)
 
         if rewrite_subscribers:
-            subscribers_ids = set(s[config.ID_FIELD] for s in rewrite_subscribers)
-            subscribers = rewrite_subscribers + [s for s in subscribers if s[config.ID_FIELD] not in subscribers_ids]
+            subscribers_ids = set(s[ID_FIELD] for s in rewrite_subscribers)
+            subscribers = rewrite_subscribers + [s for s in subscribers if s[ID_FIELD] not in subscribers_ids]
 
         if rewrite_codes:
             # join the codes
diff --git a/apps/publish/enqueue/enqueue_service.py b/apps/publish/enqueue/enqueue_service.py
index 33d75bf730..1df1202ce7 100644
--- a/apps/publish/enqueue/enqueue_service.py
+++ b/apps/publish/enqueue/enqueue_service.py
@@ -15,23 +15,27 @@
 import content_api
 
 from bson import ObjectId
-from flask import current_app as app, g
+from copy import deepcopy
+from eve.utils import ParsedRequest
+from flask_babel import _
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, VERSION
+from superdesk.flask import g
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError, SuperdeskPublishError
 from superdesk.metadata.item import CONTENT_TYPE, ITEM_TYPE, ITEM_STATE, PUBLISH_SCHEDULE, ASSOCIATIONS, MEDIA_TYPES
 from superdesk.metadata.packages import GROUPS, ROOT_GROUP, GROUP_ID, REFS, RESIDREF
+from superdesk.notification import push_notification
 from superdesk.publish import SUBSCRIBER_TYPES
 from superdesk.publish.publish_queue import PUBLISHED_IN_PACKAGE
 from superdesk.publish.formatters import get_formatter
 from apps.publish.content.utils import filter_digital, filter_non_digital
 from apps.publish.content.common import BasePublishService
-from copy import deepcopy
-from eve.utils import config, ParsedRequest
-from apps.archive.common import get_utc_schedule
+from apps.archive.common import get_user, get_utc_schedule
 from apps.packages.package_service import PackageService
 from apps.publish.published_item import PUBLISH_STATE, QUEUE_STATE
 from apps.content_types import apply_schema
-from flask_babel import _
 
 logger = logging.getLogger(__name__)
 
@@ -124,7 +128,7 @@ def _publish_package_items(self, package):
                     )
 
                 subscribers, subscriber_codes, associations = self._get_subscribers_for_package_item(package_item)
-                package_item_id = package_item[config.ID_FIELD]
+                package_item_id = package_item[ID_FIELD]
                 self._extend_subscriber_items(
                     subscriber_items, subscribers, package_item, package_item_id, subscriber_codes
                 )
@@ -218,8 +222,9 @@ def publish(self, doc, target_media_type=None, content_type=None):
 
         1. Get the subscribers.
         2. Queue the content for subscribers
-        3. If not queued and not formatters then raise exception.
-        4. Publish the content to content api.
+        3. Sends notification if no formatter has found for any of the formats configured in Subscriber.
+        4. If not queued and not formatters then raise exception.
+        5. Publish the content to content api.
 
         :param dict doc: document to publish
         :param str target_media_type: Valid values are - Wire, Digital.
@@ -228,35 +233,33 @@ def publish(self, doc, target_media_type=None, content_type=None):
         :raises PublishQueueError.item_not_queued_error:
                 If the nothing is queued.
         """
+        sent = False
+
         # Step 1
-        subscribers, subscriber_codes, associations = self.get_subscribers(doc, target_media_type)
 
-        queued = False
+        subscribers, subscriber_codes, associations = self.get_subscribers(doc, target_media_type)
 
-        if not subscribers:
-            logger.info("No subscribers found for the item_id: %s", doc[config.ID_FIELD])
-        else:
-            # Step 2
-            queued = self.queue_transmission(
-                deepcopy(doc),
-                subscribers,
-                subscriber_codes,
-                associations,
-            )
+        # Step 2
+        no_formatters, queued = self.queue_transmission(
+            deepcopy(doc), subscribers, subscriber_codes, associations, sent
+        )
 
-            # Step 3
-            if not target_media_type and not queued:
-                level = logging.INFO
-                if app.config["PUBLISH_NOT_QUEUED_ERROR"] and not app.config.get("SUPERDESK_TESTING"):
-                    level = logging.ERROR
-                logger.log(
-                    level,
-                    "Nothing is saved to publish queue for story: {} for action: {}".format(
-                        doc[config.ID_FIELD], self.publish_type
-                    ),
-                )
+        # Step 3
+        self._push_formatter_notification(doc, no_formatters)
 
         # Step 4
+        if not target_media_type and not queued:
+            level = logging.INFO
+            if get_app_config("PUBLISH_NOT_QUEUED_ERROR") and not get_app_config("SUPERDESK_TESTING"):
+                level = logging.ERROR
+            logger.log(
+                level,
+                "Nothing is saved to publish queue for story: {} for action: {}".format(
+                    doc[ID_FIELD], self.publish_type
+                ),
+            )
+
+        # Step 5
         if not content_type:
             self.publish_content_api(doc, [s for s in subscribers if s.get("api_enabled")])
 
@@ -274,7 +277,22 @@ def publish_content_api(self, doc, subscribers):
                 get_resource_service("content_api").publish(doc, subscribers)
         except Exception:
             logger.exception(
-                "Failed to queue item to API for item: {} for action {}".format(doc[config.ID_FIELD], self.publish_type)
+                "Failed to queue item to API for item: {} for action {}".format(doc[ID_FIELD], self.publish_type)
+            )
+
+    def _push_formatter_notification(self, doc, no_formatters=None):
+        if no_formatters is None:
+            no_formatters = []
+
+        if len(no_formatters) > 0:
+            user = get_user()
+            push_notification(
+                "item:publish:wrong:format",
+                item=str(doc[ID_FIELD]),
+                unique_name=doc.get("unique_name"),
+                desk=str(doc.get("task", {}).get("desk", "")),
+                user=str(user.get(ID_FIELD, "")),
+                formats=no_formatters,
             )
 
     def _get_subscriber_codes(self, subscribers):
@@ -283,11 +301,11 @@ def _get_subscriber_codes(self, subscribers):
 
         for subscriber in subscribers:
             codes = self._get_codes(subscriber)
-            products = [p for p in all_products if p[config.ID_FIELD] in subscriber.get("products", [])]
+            products = [p for p in all_products if p[ID_FIELD] in subscriber.get("products", [])]
 
             for product in products:
                 codes.extend(self._get_codes(product))
-                subscriber_codes[subscriber[config.ID_FIELD]] = list(set(codes))
+                subscriber_codes[subscriber[ID_FIELD]] = list(set(codes))
 
         return subscriber_codes
 
@@ -308,7 +326,7 @@ def resend(self, doc, subscribers):
         for subscriber in digital_subscribers:
             subscriber["api_enabled"] = len(subscriber.get("api_products") or []) > 0
 
-        doc["item_id"] = doc[config.ID_FIELD]
+        doc["item_id"] = doc[ID_FIELD]
         associations = self._resend_associations_to_subscribers(doc, subscribers)
         if len(wire_subscribers) > 0:
             self._resend_to_subscribers(doc, wire_subscribers, subscriber_codes, associations)
@@ -343,24 +361,25 @@ def _resend_associations_to_subscribers(self, doc, subscribers):
             item["subscribers"] = []
 
             for s in subscribers:
-                item["subscribers"].append(s.get(config.ID_FIELD))
-                if not associations.get(s.get(config.ID_FIELD)):
-                    associations[s.get(config.ID_FIELD)] = []
+                item["subscribers"].append(s.get(ID_FIELD))
+                if not associations.get(s.get(ID_FIELD)):
+                    associations[s.get(ID_FIELD)] = []
 
-                associations[s.get(config.ID_FIELD)].append(item.get(config.ID_FIELD))
+                associations[s.get(ID_FIELD)].append(item.get(ID_FIELD))
         return associations
 
     def _resend_to_subscribers(self, doc, subscribers, subscriber_codes, associations=None):
         if associations is None:
             associations = {}
-        queued = self.queue_transmission(doc, subscribers, subscriber_codes, associations)
+        formatter_messages, queued = self.queue_transmission(doc, subscribers, subscriber_codes, associations)
+        self._push_formatter_notification(doc, formatter_messages)
         if not queued:
             logger.exception(
-                "Nothing is saved to publish queue for story: {} for action: {}".format(doc[config.ID_FIELD], "resend")
+                "Nothing is saved to publish queue for story: {} for action: {}".format(doc[ID_FIELD], "resend")
             )
 
     @elasticapm.capture_span()
-    def publish_package(self, package, target_subscribers) -> bool:
+    def publish_package(self, package, target_subscribers):
         """Publishes a given package to given subscribers.
 
         For each subscriber updates the package definition with the wanted_items for that subscriber
@@ -371,7 +390,7 @@ def publish_package(self, package, target_subscribers) -> bool:
         :param target_subscribers: List of subscriber and items-per-subscriber
         """
         all_items = self.package_service.get_residrefs(package)
-        queued = False
+        no_formatters, queued = [], False
         subscribers = []
         for items in target_subscribers.values():
             updated = deepcopy(package)
@@ -384,18 +403,19 @@ def publish_package(self, package, target_subscribers) -> bool:
                 if not still_items_left and self.publish_type != "correct":
                     # if nothing left in the package to be published and
                     # if not correcting then don't send the package
-                    return False
+                    return
             for key in wanted_items:
                 try:
                     self.package_service.replace_ref_in_package(updated, key, items["items"][key])
                 except KeyError:
                     continue
 
-            temp_queued = self.queue_transmission(
-                updated, [subscriber], {subscriber[config.ID_FIELD]: codes}, sent=True
+            formatters, temp_queued = self.queue_transmission(
+                updated, [subscriber], {subscriber[ID_FIELD]: codes}, sent=True
             )
 
             subscribers.append(subscriber)
+            no_formatters.extend(formatters)
             if temp_queued:
                 queued = temp_queued
 
@@ -415,7 +435,7 @@ def get_destinations(self, subscriber):
         return destinations
 
     @elasticapm.capture_span()
-    def queue_transmission(self, doc, subscribers, subscriber_codes=None, associations=None, sent=False) -> bool:
+    def queue_transmission(self, doc, subscribers, subscriber_codes=None, associations=None, sent=False):
         """Method formats and then queues the article for transmission to the passed subscribers.
 
         ::Important Note:: Format Type across Subscribers can repeat. But we can't have formatted item generated once
@@ -431,106 +451,112 @@ def queue_transmission(self, doc, subscribers, subscriber_codes=None, associatio
         if subscriber_codes is None:
             subscriber_codes = {}
 
-        if config.PUBLISH_ASSOCIATIONS_RESEND and not sent:
-            is_correction = doc.get("state") in ["corrected", "being_corrected"]
-            is_update = doc.get("rewrite_of")
-            is_new = not is_correction and not is_update
-
-            if config.PUBLISH_ASSOCIATIONS_RESEND == "new" and is_new:
-                self.resend_association_items(doc)
-            elif config.PUBLISH_ASSOCIATIONS_RESEND == "corrections":
-                self.resend_association_items(doc)
-            elif config.PUBLISH_ASSOCIATIONS_RESEND == "updates" and not is_correction:
-                self.resend_association_items(doc)
-
-        queued = False
-        filtered_document = self.filter_document(doc)
-        for subscriber in subscribers:
-            try:
-                if (
-                    doc[ITEM_TYPE] not in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]
-                    and subscriber.get("subscriber_type", "") == SUBSCRIBER_TYPES.WIRE
-                ):
-                    # wire subscribers can get only text and preformatted stories
-                    continue
+        try:
+            publish_associations_resend = get_app_config("PUBLISH_ASSOCIATIONS_RESEND")
+            if publish_associations_resend and not sent:
+                is_correction = doc.get("state") in ["corrected", "being_corrected"]
+                is_update = doc.get("rewrite_of")
+                is_new = not is_correction and not is_update
+
+                if publish_associations_resend == "new" and is_new:
+                    self.resend_association_items(doc)
+                elif publish_associations_resend == "corrections":
+                    self.resend_association_items(doc)
+                elif publish_associations_resend == "updates" and not is_correction:
+                    self.resend_association_items(doc)
+
+            queued = False
+            no_formatters = []
+            filtered_document = self.filter_document(doc)
+            app = get_current_app()
+            for subscriber in subscribers:
+                try:
+                    if (
+                        doc[ITEM_TYPE] not in [CONTENT_TYPE.TEXT, CONTENT_TYPE.PREFORMATTED]
+                        and subscriber.get("subscriber_type", "") == SUBSCRIBER_TYPES.WIRE
+                    ):
+                        # wire subscribers can get only text and preformatted stories
+                        continue
 
-                for destination in self.get_destinations(subscriber):
-                    embed_package_items = doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE and (
-                        destination.get("config") or {}
-                    ).get("packaged", False)
-                    if embed_package_items:
-                        doc = self._embed_package_items(doc)
+                    for destination in self.get_destinations(subscriber):
+                        embed_package_items = doc[ITEM_TYPE] == CONTENT_TYPE.COMPOSITE and (
+                            destination.get("config") or {}
+                        ).get("packaged", False)
+                        if embed_package_items:
+                            doc = self._embed_package_items(doc)
 
-                    if doc.get(PUBLISHED_IN_PACKAGE) and (destination.get("config") or {}).get("packaged", False):
-                        continue
+                        if doc.get(PUBLISHED_IN_PACKAGE) and (destination.get("config") or {}).get("packaged", False):
+                            continue
 
-                    # Step 2(a)
-                    formatter = get_formatter(destination["format"], doc)
+                        # Step 2(a)
+                        formatter = get_formatter(destination["format"], doc)
 
-                    if not formatter:
-                        logger.warning("Formatter not found for format: %s", destination["format"])
-                        continue
+                        if not formatter:  # if formatter not found then record it
+                            no_formatters.append(destination["format"])
+                            continue
 
-                    formatter.set_destination(destination, subscriber)
-                    formatted_docs = formatter.format(
-                        self.filter_document(doc) if embed_package_items else filtered_document.copy(),
-                        subscriber,
-                        subscriber_codes.get(subscriber[config.ID_FIELD]),
-                    )
+                        formatter.set_destination(destination, subscriber)
+                        formatted_docs = formatter.format(
+                            self.filter_document(doc) if embed_package_items else filtered_document.copy(),
+                            subscriber,
+                            subscriber_codes.get(subscriber[ID_FIELD]),
+                        )
 
-                    for idx, publish_data in enumerate(formatted_docs):
-                        if not isinstance(publish_data, dict):
-                            pub_seq_num, formatted_doc = publish_data
-                            formatted_docs[idx] = {
-                                "published_seq_num": pub_seq_num,
-                                "formatted_item": formatted_doc,
-                            }
-                        else:
-                            assert (
-                                "published_seq_num" in publish_data and "formatted_item" in publish_data
-                            ), "missing keys in publish_data"
-
-                    for publish_queue_item in formatted_docs:
-                        publish_queue_item["item_id"] = doc["item_id"]
-                        publish_queue_item["item_version"] = doc[config.VERSION]
-                        publish_queue_item["subscriber_id"] = subscriber[config.ID_FIELD]
-                        publish_queue_item["codes"] = subscriber_codes.get(subscriber[config.ID_FIELD])
-                        publish_queue_item["destination"] = destination
-                        # publish_schedule is just to indicate in the queue item is create via scheduled item
-                        publish_queue_item[PUBLISH_SCHEDULE] = get_utc_schedule(doc, PUBLISH_SCHEDULE) or None
-                        publish_queue_item["unique_name"] = doc.get("unique_name", None)
-                        publish_queue_item["content_type"] = doc.get("type", None)
-                        publish_queue_item["headline"] = doc.get("headline", None)
-                        publish_queue_item["publishing_action"] = self.published_state
-                        publish_queue_item["ingest_provider"] = (
-                            ObjectId(doc.get("ingest_provider")) if doc.get("ingest_provider") else None
+                        for idx, publish_data in enumerate(formatted_docs):
+                            if not isinstance(publish_data, dict):
+                                pub_seq_num, formatted_doc = publish_data
+                                formatted_docs[idx] = {
+                                    "published_seq_num": pub_seq_num,
+                                    "formatted_item": formatted_doc,
+                                }
+                            else:
+                                assert (
+                                    "published_seq_num" in publish_data and "formatted_item" in publish_data
+                                ), "missing keys in publish_data"
+
+                        for publish_queue_item in formatted_docs:
+                            publish_queue_item["item_id"] = doc["item_id"]
+                            publish_queue_item["item_version"] = doc[VERSION]
+                            publish_queue_item["subscriber_id"] = subscriber[ID_FIELD]
+                            publish_queue_item["codes"] = subscriber_codes.get(subscriber[ID_FIELD])
+                            publish_queue_item["destination"] = destination
+                            # publish_schedule is just to indicate in the queue item is create via scheduled item
+                            publish_queue_item[PUBLISH_SCHEDULE] = get_utc_schedule(doc, PUBLISH_SCHEDULE) or None
+                            publish_queue_item["unique_name"] = doc.get("unique_name", None)
+                            publish_queue_item["content_type"] = doc.get("type", None)
+                            publish_queue_item["headline"] = doc.get("headline", None)
+                            publish_queue_item["publishing_action"] = self.published_state
+                            publish_queue_item["ingest_provider"] = (
+                                ObjectId(doc.get("ingest_provider")) if doc.get("ingest_provider") else None
+                            )
+                            publish_queue_item["associated_items"] = associations.get(subscriber[ID_FIELD], [])
+                            publish_queue_item["priority"] = subscriber.get("priority")
+
+                            if doc.get(PUBLISHED_IN_PACKAGE):
+                                publish_queue_item[PUBLISHED_IN_PACKAGE] = doc[PUBLISHED_IN_PACKAGE]
+                            try:
+                                encoded_item = publish_queue_item.pop("encoded_item")
+                            except KeyError:
+                                pass
+                            else:
+                                binary = io.BytesIO(encoded_item)
+                                publish_queue_item["encoded_item_id"] = app.storage.put(binary)
+                            publish_queue_item.pop(ITEM_STATE, None)
+
+                            # content api delivery will be marked as SUCCESS in queue
+                            get_resource_service("publish_queue").post([publish_queue_item])
+                            queued = True
+
+                except Exception:
+                    logger.exception(
+                        "Failed to queue item for id {} with headline {} for subscriber {}.".format(
+                            doc.get(ID_FIELD), doc.get("headline"), subscriber.get("name")
                         )
-                        publish_queue_item["associated_items"] = associations.get(subscriber[config.ID_FIELD], [])
-                        publish_queue_item["priority"] = subscriber.get("priority")
-
-                        if doc.get(PUBLISHED_IN_PACKAGE):
-                            publish_queue_item[PUBLISHED_IN_PACKAGE] = doc[PUBLISHED_IN_PACKAGE]
-                        try:
-                            encoded_item = publish_queue_item.pop("encoded_item")
-                        except KeyError:
-                            pass
-                        else:
-                            binary = io.BytesIO(encoded_item)
-                            publish_queue_item["encoded_item_id"] = app.storage.put(binary)
-                        publish_queue_item.pop(ITEM_STATE, None)
-
-                        # content api delivery will be marked as SUCCESS in queue
-                        get_resource_service("publish_queue").post([publish_queue_item])
-                        queued = True
-
-            except Exception:
-                logger.exception(
-                    "Failed to queue item for id {} with headline {} for subscriber {}.".format(
-                        doc.get(config.ID_FIELD), doc.get("headline"), subscriber.get("name")
                     )
-                )
 
-        return queued
+            return no_formatters, queued
+        except Exception:
+            raise
 
     def get_unique_associations(self, associated_items):
         """This method is used for the removing duplicate associate items
@@ -578,14 +604,14 @@ def _embed_package_items(self, package):
                 if RESIDREF not in ref:
                     continue
                 package_item = get_resource_service("published").find_one(
-                    req=None, item_id=ref[RESIDREF], _current_version=ref[config.VERSION]
+                    req=None, item_id=ref[RESIDREF], _current_version=ref[VERSION]
                 )
                 if not package_item:
                     msg = _("Can not find package {package} published item {item}").format(
                         package=package["item_id"], item=ref["residRef"]
                     )
                     raise SuperdeskPublishError(500, msg)
-                package_item[config.ID_FIELD] = package_item["item_id"]
+                package_item[ID_FIELD] = package_item["item_id"]
                 ref["package_item"] = package_item
         return package
 
@@ -595,7 +621,7 @@ def _get_subscribers_for_package_item(self, package_item):
         :param package_item: item in a package
         :return list: List of subscribers
         """
-        query = {"$and": [{"item_id": package_item[config.ID_FIELD]}, {"publishing_action": package_item[ITEM_STATE]}]}
+        query = {"$and": [{"item_id": package_item[ID_FIELD]}, {"publishing_action": package_item[ITEM_STATE]}]}
 
         return self._get_subscribers_for_previously_sent_items(query)
 
@@ -629,7 +655,7 @@ def _get_subscribers_for_previously_sent_items(self, lookup):
             subscribers = [s.copy() for s in active_subscribers if s["_id"] in subscriber_ids]
 
             for s in subscribers:
-                s["api_enabled"] = subscriber_ids.get(s.get(config.ID_FIELD))
+                s["api_enabled"] = subscriber_ids.get(s.get(ID_FIELD))
 
         return subscribers, subscriber_codes, associations
 
@@ -643,7 +669,7 @@ def filter_subscribers(self, doc, subscribers, target_media_type):
         """
         filtered_subscribers = []
         subscriber_codes = {}
-        existing_products = {p[config.ID_FIELD]: p for p in get_resource_service("products").get_active()}
+        existing_products = {p[ID_FIELD]: p for p in get_resource_service("products").get_active()}
         global_filters = deepcopy(
             [gf["cf"] for gf in self.filters.get("content_filters", {}).values() if gf["cf"].get("is_global", True)]
         )
@@ -701,7 +727,7 @@ def filter_subscribers(self, doc, subscribers, target_media_type):
 
             # unify the list of codes by removing duplicates
             if subscriber_added:
-                subscriber_codes[subscriber[config.ID_FIELD]] = list(set(product_codes))
+                subscriber_codes[subscriber[ID_FIELD]] = list(set(product_codes))
 
         return filtered_subscribers, subscriber_codes
 
@@ -754,14 +780,14 @@ def _filter_subscribers_for_associations(self, subscribers, doc, target_media_ty
                 continue
 
             assoc_subscribers = set()
-            assoc_id = item.get(config.ID_FIELD)
+            assoc_id = item.get(ID_FIELD)
             filtered_subscribers, subscriber_codes = self.filter_subscribers(
                 item, deepcopy(subscribers), target_media_type
             )
 
             for subscriber in filtered_subscribers:
                 # for the validated subscribers
-                subscriber_id = subscriber.get(config.ID_FIELD)
+                subscriber_id = subscriber.get(ID_FIELD)
                 if not associations.get(subscriber_id):
                     associations[subscriber_id] = []
 
@@ -910,7 +936,7 @@ def conforms_subscriber_global_filter(self, subscriber, global_filters):
 
         gfs = subscriber.get("global_filters", {})
         for global_filter in global_filters:
-            if gfs.get(str(global_filter[config.ID_FIELD]), True):
+            if gfs.get(str(global_filter[ID_FIELD]), True):
                 # Global filter applies to this subscriber
                 if global_filter.get("does_match"):
                     return False
@@ -924,9 +950,9 @@ def _extend_subscriber_items(self, subscriber_items, subscribers, item, package_
         :param item: item that has been published
         :param package_item_id: package_item_id
         """
-        item_id = item[config.ID_FIELD]
+        item_id = item[ID_FIELD]
         for subscriber in subscribers:
-            sid = subscriber[config.ID_FIELD]
+            sid = subscriber[ID_FIELD]
             item_list = subscriber_items.get(sid, {}).get("items", {})
             item_list[item_id] = package_item_id
             subscriber_items[sid] = {
diff --git a/apps/publish/publish_content_tests.py b/apps/publish/publish_content_tests.py
index f519375fb0..84e8b12cf8 100644
--- a/apps/publish/publish_content_tests.py
+++ b/apps/publish/publish_content_tests.py
@@ -11,9 +11,9 @@
 from unittest import mock
 from datetime import timedelta
 
+from superdesk.resource_fields import ID_FIELD
 from apps.publish import init_app
 from apps.publish.enqueue import EnqueueContent
-from superdesk import config
 from superdesk.publish.publish_content import get_queue_items
 from superdesk.tests import TestCase
 from superdesk.utc import utcnow
@@ -113,7 +113,7 @@ def test_queue_items(self):
             self.app.data.insert("publish_queue", self.queue_items)
             items = get_queue_items()
             self.assertEqual(3, items.count())
-            ids = [item[config.ID_FIELD] for item in items]
+            ids = [item[ID_FIELD] for item in items]
             self.assertNotIn(4, ids)
 
     @mock.patch("apps.publish.enqueue.EnqueueContent.enqueue_item")
@@ -127,5 +127,5 @@ def test_get_enqueue_items(self):
         self.app.data.insert("published", self.published_items)
         items = EnqueueContent().get_published_items()
         self.assertEqual(2, len(items))
-        ids = [item[config.ID_FIELD] for item in items]
+        ids = [item[ID_FIELD] for item in items]
         self.assertNotIn(3, ids)
diff --git a/apps/publish/published_item.py b/apps/publish/published_item.py
index 361c52eef3..034f0ae360 100644
--- a/apps/publish/published_item.py
+++ b/apps/publish/published_item.py
@@ -11,9 +11,11 @@
 from collections import namedtuple
 import json
 import logging
-import flask
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD, ITEMS, DATE_CREATED, LAST_UPDATED, VERSION
+from superdesk.flask import request
 from superdesk import get_resource_service
-import superdesk
 from superdesk.errors import SuperdeskApiError
 from superdesk.metadata.item import not_analyzed, ITEM_STATE, PUBLISH_STATES
 from superdesk.metadata.utils import aggregations, get_elastic_highlight_query
@@ -22,8 +24,7 @@
 from superdesk.utc import utcnow
 
 from bson.objectid import ObjectId
-from eve.utils import ParsedRequest, config
-from flask import current_app as app, request
+from eve.utils import ParsedRequest
 
 from apps.archive.archive import SOURCE as ARCHIVE
 from apps.archive.common import handle_existing_data, item_schema
@@ -82,7 +83,7 @@ def get_content_filter(req=None):
 
     :return:
     """
-    user = getattr(flask.g, "user", None)
+    user = get_current_app().get_current_user_dict()
     if user:
         if "invisible_stages" in user:
             stages = user.get("invisible_stages")
@@ -107,7 +108,7 @@ class PublishedItemResource(Resource):
     }
 
     schema = item_schema(published_item_fields)
-    etag_ignore_fields = [config.ID_FIELD, "highlights", "item_id", LAST_PUBLISHED_VERSION, "moved_to_legal"]
+    etag_ignore_fields = [ID_FIELD, "highlights", "item_id", LAST_PUBLISHED_VERSION, "moved_to_legal"]
 
     privileges = {"POST": "publish_queue", "PATCH": "publish_queue"}
     item_methods = ["GET", "PATCH"]
@@ -132,7 +133,7 @@ def on_fetched(self, docs):
         Overriding this to enhance the published article with the one in archive collection
         """
 
-        self.enhance_with_archive_items(docs[config.ITEMS])
+        self.enhance_with_archive_items(docs[ITEMS])
 
     def on_fetched_item(self, doc):
         """
@@ -150,7 +151,7 @@ def on_create(self, docs):
 
         for doc in docs:
             self.raise_if_not_marked_for_publication(doc)
-            doc[config.LAST_UPDATED] = doc[config.DATE_CREATED] = utcnow()
+            doc[LAST_UPDATED] = doc[DATE_CREATED] = utcnow()
             self.set_defaults(doc)
 
     def on_update(self, updates, original):
@@ -175,10 +176,10 @@ def raise_if_not_marked_for_publication(self, doc):
             )
 
     def set_defaults(self, doc):
-        doc["item_id"] = doc[config.ID_FIELD]
+        doc["item_id"] = doc[ID_FIELD]
         doc["versioncreated"] = utcnow()
         doc["publish_sequence_no"] = get_resource_service("sequences").get_next_sequence_number(self.SEQ_KEY_NAME)
-        doc.pop(config.ID_FIELD, None)
+        doc.pop(ID_FIELD, None)
         doc.pop("lock_user", None)
         doc.pop("lock_time", None)
         doc.pop("lock_action", None)
@@ -190,24 +191,22 @@ def enhance_with_archive_items(self, items):
             archive_items = []
             archive_lookup = {}
             if ids:
-                query = {"$and": [{config.ID_FIELD: {"$in": ids}}]}
+                query = {"$and": [{ID_FIELD: {"$in": ids}}]}
                 archive_req = ParsedRequest()
                 archive_req.max_results = len(ids)
                 # can't access published from elastic due filter on the archive resource hence going to mongo
-                archive_items = list(
-                    superdesk.get_resource_service(ARCHIVE).get_from_mongo(req=archive_req, lookup=query)
-                )
+                archive_items = list(get_resource_service(ARCHIVE).get_from_mongo(req=archive_req, lookup=query))
 
                 for item in archive_items:
                     handle_existing_data(item)
-                    archive_lookup[item[config.ID_FIELD]] = item
+                    archive_lookup[item[ID_FIELD]] = item
 
             for item in items:
-                archive_item = archive_lookup.get(item.get("item_id"), {config.VERSION: item.get(config.VERSION, 1)})
+                archive_item = archive_lookup.get(item.get("item_id"), {VERSION: item.get(VERSION, 1)})
 
                 updates = {
-                    config.ID_FIELD: item.get("item_id"),
-                    "item_id": item.get(config.ID_FIELD),
+                    ID_FIELD: item.get("item_id"),
+                    "item_id": item.get(ID_FIELD),
                     "lock_user": archive_item.get("lock_user", None),
                     "lock_time": archive_item.get("lock_time", None),
                     "lock_action": archive_item.get("lock_action", None),
@@ -216,7 +215,7 @@ def enhance_with_archive_items(self, items):
                 }
 
                 if request and request.args.get("published_id") == "1":
-                    updates.pop(config.ID_FIELD)
+                    updates.pop(ID_FIELD)
                     updates.pop("item_id")
 
                 item.update(updates)
@@ -228,7 +227,7 @@ def on_delete(self, doc):
         Overriding to avoid other services from invoking this method accidentally.
         """
 
-        if app.testing:
+        if get_current_app().testing:
             super().on_delete(doc)
         else:
             raise NotImplementedError(
@@ -241,7 +240,7 @@ def delete_action(self, lookup=None):
         Overriding to avoid other services from invoking this method accidentally.
         """
 
-        if app.testing:
+        if get_current_app().testing:
             super().delete_action(lookup)
         else:
             raise NotImplementedError(
@@ -254,7 +253,7 @@ def on_deleted(self, doc):
         Overriding to avoid other services from invoking this method accidentally.
         """
 
-        if app.testing:
+        if get_current_app().testing:
             super().on_deleted(doc)
         else:
             raise NotImplementedError(
@@ -332,10 +331,10 @@ def update_published_items(self, _id, field, state):
         items = self.get_other_published_items(_id)
         for item in items:
             try:
-                super().system_update(ObjectId(item[config.ID_FIELD]), {field: state}, item)
+                super().system_update(ObjectId(item[ID_FIELD]), {field: state}, item)
             except Exception:
                 # This part is used in unit testing
-                super().system_update(item[config.ID_FIELD], {field: state}, item)
+                super().system_update(item[ID_FIELD], {field: state}, item)
 
     def delete_by_article_id(self, _id):
         """Removes the article from the published collection.
@@ -372,8 +371,8 @@ def set_moved_to_legal(self, item_id, version, status):
 
         for item in items:
             try:
-                if item.get(config.VERSION) <= version and not item.get("moved_to_legal", False):
-                    super().system_update(ObjectId(item.get(config.ID_FIELD)), {"moved_to_legal": status}, item)
+                if item.get(VERSION) <= version and not item.get("moved_to_legal", False):
+                    super().system_update(ObjectId(item.get(ID_FIELD)), {"moved_to_legal": status}, item)
             except Exception:
                 logger.exception(
                     "Failed to set the moved_to_legal flag " "for item {} and version {}".format(item_id, version)
diff --git a/apps/rules/routing_rules.py b/apps/rules/routing_rules.py
index 65d47140a5..4f6b26aaa4 100644
--- a/apps/rules/routing_rules.py
+++ b/apps/rules/routing_rules.py
@@ -16,10 +16,11 @@
 
 from enum import Enum
 from datetime import datetime, timedelta
+
+from superdesk.resource_fields import ID_FIELD
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.errors import SuperdeskApiError
-from eve.utils import config
 from superdesk.utc import set_time
 from flask_babel import _
 
@@ -183,7 +184,7 @@ def on_delete(self, doc):
         Will throw BadRequestError if any of the pre-conditions fail.
         """
 
-        if self.backend.find_one("ingest_providers", req=None, routing_scheme=doc[config.ID_FIELD]):
+        if self.backend.find_one("ingest_providers", req=None, routing_scheme=doc[ID_FIELD]):
             raise SuperdeskApiError.forbiddenError(_("Routing scheme is applied to channel(s). It cannot be deleted."))
 
     def apply_routing_scheme(self, ingest_item, provider, routing_scheme):
diff --git a/apps/rules/rule_handlers.py b/apps/rules/rule_handlers.py
index c029604406..e257f585f6 100644
--- a/apps/rules/rule_handlers.py
+++ b/apps/rules/rule_handlers.py
@@ -11,9 +11,10 @@
 from typing import Dict, Any
 import logging
 
-from eve.utils import config
 from flask_babel import lazy_gettext, LazyString
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service, Resource, Service
 from superdesk.metadata.item import CONTENT_STATE, ITEM_TYPE, CONTENT_TYPE, MEDIA_TYPES
 from superdesk.utils import ListCursor
@@ -131,7 +132,7 @@ def apply_rule(self, rule, ingest_item, routing_scheme):
                 stage_id = ingest_item["task"]["stage"]
             else:
                 stage_id = desk["incoming_stage"]
-            self.__fetch(ingest_item, [{"desk": desk[config.ID_FIELD], "stage": stage_id}], rule)
+            self.__fetch(ingest_item, [{"desk": desk[ID_FIELD], "stage": stage_id}], rule)
             fetch_actions = [
                 f for f in rule.get("actions", {}).get("fetch", []) if f.get("desk") != ingest_item["task"]["desk"]
             ]
@@ -155,7 +156,7 @@ def __fetch(self, ingest_item, destinations, rule):
                 item_id = get_resource_service("fetch").fetch(
                     [
                         {
-                            config.ID_FIELD: ingest_item[config.ID_FIELD],
+                            ID_FIELD: ingest_item[ID_FIELD],
                             "desk": str(destination.get("desk")),
                             "stage": str(destination.get("stage")),
                             "state": CONTENT_STATE.ROUTED,
@@ -212,7 +213,7 @@ def __get_target(self, destination):
 
     def _set_default_values(self, archive_item):
         """Assigns the default values to the item that about to be auto published"""
-        default_categories = self._get_categories(config.DEFAULT_CATEGORY_QCODES_FOR_AUTO_PUBLISHED_ARTICLES)
+        default_categories = self._get_categories(get_app_config("DEFAULT_CATEGORY_QCODES_FOR_AUTO_PUBLISHED_ARTICLES"))
         default_values = self._assign_default_values(archive_item, default_categories)
         get_resource_service("archive").patch(archive_item["_id"], default_values)
 
diff --git a/apps/rundowns/export.py b/apps/rundowns/export.py
index 22f0e16bf3..7866aee4fd 100644
--- a/apps/rundowns/export.py
+++ b/apps/rundowns/export.py
@@ -1,10 +1,11 @@
 import superdesk
 
 from bson import ObjectId
-from flask import Blueprint, url_for, current_app as app, abort
 from typing import List
 from werkzeug.utils import secure_filename
 
+from superdesk.core import get_current_app, get_app_config
+from superdesk.flask import Blueprint, url_for, abort
 from superdesk.utils import ListCursor, jwt_encode, jwt_decode
 
 from . import privileges, rundowns, rundown_items, formatters, shows
@@ -29,6 +30,8 @@ def export(token):
     assert formatter, {"formatter": 1}
     items = rundown_items.items_service.get_rundown_items(rundown)
     output, mimetype, filename = formatter.export(show, rundown, items)
+
+    app = get_current_app()
     response = app.response_class(output, mimetype=mimetype)
     response.headers["Content-Disposition"] = f'attachment; filename="{secure_filename(filename)}"'
     if app.testing:
@@ -73,7 +76,7 @@ def set_link(self, doc):
             "rundowns_export.export",
             token=self.get_token(doc),
             _external=True,
-            _scheme=app.config["PREFERRED_URL_SCHEME"],
+            _scheme=get_app_config("PREFERRED_URL_SCHEME"),
         )
         return doc["rundown"]
 
diff --git a/apps/rundowns/formatters/html.py b/apps/rundowns/formatters/html.py
index d5fc6c4037..3e9077920e 100644
--- a/apps/rundowns/formatters/html.py
+++ b/apps/rundowns/formatters/html.py
@@ -1,9 +1,8 @@
 import base64
 
-from flask import render_template
-
 from . import BaseFormatter
 
+from superdesk.flask import render_template
 from superdesk.text_utils import get_text
 
 
diff --git a/apps/rundowns/rundowns.py b/apps/rundowns/rundowns.py
index cb9158c573..e52efedebf 100644
--- a/apps/rundowns/rundowns.py
+++ b/apps/rundowns/rundowns.py
@@ -2,9 +2,9 @@
 import datetime
 import superdesk
 
-from flask import json
 from typing import Optional
 
+from superdesk.core import json
 from superdesk.metadata.item import metadata_schema
 from werkzeug.datastructures import ImmutableMultiDict
 
diff --git a/apps/rundowns/tasks.py b/apps/rundowns/tasks.py
index c5f8ff13a9..2caf17fdc8 100644
--- a/apps/rundowns/tasks.py
+++ b/apps/rundowns/tasks.py
@@ -1,8 +1,9 @@
+from typing import cast
 import pytz
 import logging
-
-from flask import current_app as app
 from datetime import datetime, tzinfo
+
+from superdesk.core import get_app_config
 from superdesk.celery_app import celery
 from superdesk.utc import utc_to_local, utcnow
 from superdesk.lock import lock, unlock
@@ -22,7 +23,7 @@ def create_scheduled_rundowns() -> None:
     logger.info("Starting to create scheduled rundowns")
     try:
         now = utcnow()
-        tz = pytz.timezone(app.config["RUNDOWNS_TIMEZONE"])
+        tz = pytz.timezone(cast(str, get_app_config("RUNDOWNS_TIMEZONE")))
         create_scheduled(now, tz)
     finally:
         unlock(lock_id)
diff --git a/apps/rundowns/utils.py b/apps/rundowns/utils.py
index b851029f20..de5ce01433 100644
--- a/apps/rundowns/utils.py
+++ b/apps/rundowns/utils.py
@@ -1,10 +1,10 @@
+from typing import Optional, cast
+
 import logging
 import datetime
 import dateutil.rrule as rrule
 
-from typing import Optional
-from flask import current_app as app
-
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
 from superdesk.utc import utcnow, utc_to_local, local_to_utc
 
@@ -37,14 +37,14 @@ def combine_date_time(
 
 
 def to_utc(date: datetime.datetime) -> datetime.datetime:
-    local = local_to_utc(app.config["RUNDOWNS_TIMEZONE"], date)
+    local = local_to_utc(get_app_config("RUNDOWNS_TIMEZONE"), date)
     assert local is not None
     return local
 
 
 def get_start_datetime(time: datetime.time, date: Optional[datetime.date]) -> datetime.datetime:
     now = utcnow()
-    local_now = utc_to_local(app.config["RUNDOWNS_TIMEZONE"], now)
+    local_now = utc_to_local(get_app_config("RUNDOWNS_TIMEZONE"), now)
     if date is None or date < local_now.date():
         date = local_now.date()
     return combine_date_time(date, time, local_now.tzinfo)
@@ -84,7 +84,7 @@ def set_autocreate_schedule(updates, local_date: Optional[datetime.datetime], te
     create_before = (
         datetime.timedelta(seconds=template["autocreate_before_seconds"])
         if template.get("autocreate_before_seconds")
-        else (datetime.timedelta(hours=app.config["RUNDOWNS_SCHEDULE_HOURS"]))
+        else (datetime.timedelta(hours=cast(int, get_app_config("RUNDOWNS_SCHEDULE_HOURS"))))
     )
 
     updates["scheduled_on"] = to_utc(local_date)
diff --git a/apps/saved_searches/__init__.py b/apps/saved_searches/__init__.py
index a42edc5388..5243df2aa6 100644
--- a/apps/saved_searches/__init__.py
+++ b/apps/saved_searches/__init__.py
@@ -28,7 +28,9 @@
 import pytz
 from croniter import croniter
 from datetime import datetime
-from flask import render_template, current_app as app
+
+from superdesk.core import get_app_config, get_current_app
+from superdesk.flask import render_template
 from superdesk import emails
 import json
 
@@ -84,7 +86,7 @@ def get_next_date(scheduling, base=None):
     :return datetime: date of next schedule
     """
     if base is None:
-        tz = pytz.timezone(app.config["DEFAULT_TIMEZONE"])
+        tz = pytz.timezone(get_app_config("DEFAULT_TIMEZONE"))
         base = datetime.now(tz=tz)
     cron_iter = croniter(scheduling, base)
     return cron_iter.get_next(datetime)
@@ -99,13 +101,13 @@ def send_report_email(user_id, search, docs):
     users_service = get_resource_service("users")
     user_data = next(users_service.find({"_id": user_id}))
     recipients = [user_data["email"]]
-    admins = app.config["ADMINS"]
+    admins = get_app_config("ADMINS")
     subject = "Saved searches report"
     context = {
-        "app_name": app.config["APPLICATION_NAME"],
+        "app_name": get_app_config("APPLICATION_NAME"),
         "search": search,
         "docs": docs,
-        "client_url": app.config["CLIENT_URL"].rstrip("/"),
+        "client_url": get_app_config("CLIENT_URL").rstrip("/"),
     }
     text_body = render_template("saved_searches_report.txt", **context)
     html_body = render_template("saved_searches_report.html", **context)
@@ -119,7 +121,7 @@ def publish_report(user_id, search_data):
     search_filter = json.loads(search_data["filter"])
     query = es_utils.filter2query(search_filter, user_id=user_id)
     repos = es_utils.filter2repos(search_filter) or es_utils.REPOS.copy()
-    docs = list(app.data.elastic.search(query, repos))
+    docs = list(get_current_app().data.elastic.search(query, repos))
     send_report_email(user_id, search_data, docs)
 
 
@@ -154,7 +156,7 @@ def report():
     try:
         saved_searches = get_resource_service("saved_searches")
         subscribed_searches = saved_searches.find({"subscribers": {"$exists": 1}})
-        tz = pytz.timezone(app.config["DEFAULT_TIMEZONE"])
+        tz = pytz.timezone(get_app_config("DEFAULT_TIMEZONE"))
         now = datetime.now(tz=tz)
         for search in subscribed_searches:
             do_update = False
diff --git a/apps/search/__init__.py b/apps/search/__init__.py
index c9a09ed38b..9de98e52e6 100644
--- a/apps/search/__init__.py
+++ b/apps/search/__init__.py
@@ -11,9 +11,10 @@
 import superdesk
 
 from copy import deepcopy
-from flask import current_app as app, json, g
 from eve_elastic.elastic import set_filters
 
+from superdesk.core import json, get_current_app, get_app_config
+from superdesk.resource_fields import ITEMS
 from superdesk import get_resource_service
 from superdesk.metadata.item import CONTENT_STATE, ITEM_STATE, get_schema
 from superdesk.metadata.utils import aggregations as common_aggregations, item_url, _set_highlight_query
@@ -34,7 +35,7 @@ class SearchService(superdesk.Service):
 
     @property
     def elastic(self):
-        return app.data.elastic
+        return get_current_app().data.elastic
 
     def __init__(self, datasource, backend):
         super().__init__(datasource=datasource, backend=backend)
@@ -111,6 +112,7 @@ def _get_query(self, req):
         except KeyError:
             pass
 
+        app = get_current_app()
         if app.data.elastic.should_aggregate(req):
             source["aggs"] = self.aggregations
 
@@ -120,11 +122,12 @@ def _get_query(self, req):
         return source
 
     def _enhance_query_string(self, query_string):
-        query_string.setdefault("analyze_wildcard", app.config["ELASTIC_QUERY_STRING_ANALYZE_WILDCARD"])
-        query_string.setdefault("type", app.config["ELASTIC_QUERY_STRING_TYPE"])
+        query_string.setdefault("analyze_wildcard", get_app_config("ELASTIC_QUERY_STRING_ANALYZE_WILDCARD"))
+        query_string.setdefault("type", get_app_config("ELASTIC_QUERY_STRING_TYPE"))
 
     def _get_projected_fields(self, req):
         """Get elastic projected fields."""
+        app = get_current_app()
         if app.data.elastic.should_project(req):
             return app.data.elastic.get_projected_fields(req)
 
@@ -149,7 +152,7 @@ def get_stages_to_exclude(self):
         """
         Returns the list of the current users invisible stages
         """
-        user = g.get("user", {})
+        user = get_current_app().get_current_user_dict() or {}
         if "invisible_stages" in user:
             stages = user.get("invisible_stages")
         else:
@@ -169,8 +172,8 @@ def get(self, req, lookup):
         filters = self._get_filters(types, excluded_stages)
 
         # if the system has a setting value for the maximum search depth then apply the filter
-        if not app.settings["MAX_SEARCH_DEPTH"] == -1:
-            query["terminate_after"] = app.settings["MAX_SEARCH_DEPTH"]
+        if not get_app_config("MAX_SEARCH_DEPTH") == -1:
+            query["terminate_after"] = get_app_config("MAX_SEARCH_DEPTH")
 
         if filters:
             set_filters(query, filters)
@@ -182,7 +185,8 @@ def get(self, req, lookup):
         docs = self.elastic.search(query, types, params)
 
         for resource in types:
-            response = {app.config["ITEMS"]: [doc for doc in docs if doc["_type"] == resource]}
+            response = {ITEMS: [doc for doc in docs if doc["_type"] == resource]}
+            app = get_current_app().as_any()
             getattr(app, "on_fetched_resource")(resource, response)
             getattr(app, "on_fetched_resource_%s" % resource)(response)
 
@@ -215,7 +219,7 @@ def on_fetched(self, doc):
         :type doc: dict
         """
 
-        docs = doc[app.config["ITEMS"]]
+        docs = doc[ITEMS]
         for item in docs:
             build_custom_hateoas({"self": {"title": item["_type"], "href": "/{}/{{_id}}".format(item["_type"])}}, item)
 
@@ -243,4 +247,4 @@ def init_app(app) -> None:
     SearchResource("search", app=app, service=search_service)
 
     # Set the start of week config for use in both server and client
-    app.client_config["start_of_week"] = app.config.get("START_OF_WEEK") or 0
+    app.client_config["start_of_week"] = get_app_config("START_OF_WEEK") or 0
diff --git a/apps/search_providers/proxy.py b/apps/search_providers/proxy.py
index 084d34999a..c44ccb9cb7 100644
--- a/apps/search_providers/proxy.py
+++ b/apps/search_providers/proxy.py
@@ -1,8 +1,9 @@
 import bson
 import bson.errors
-import superdesk
 
-from flask import abort, request, json
+import superdesk
+from superdesk.core import json
+from superdesk.flask import abort, request
 from superdesk.utc import utcnow
 from superdesk.utils import ListCursor
 from apps.search_providers.registry import registered_search_providers
diff --git a/apps/search_providers/service.py b/apps/search_providers/service.py
index 91eea9ec93..f535ac347c 100644
--- a/apps/search_providers/service.py
+++ b/apps/search_providers/service.py
@@ -11,8 +11,8 @@
 import logging
 
 from flask_babel import _
-from eve.utils import config
 
+from superdesk.resource_fields import ID_FIELD
 from apps.search_providers import allowed_search_providers
 from superdesk.errors import SuperdeskApiError
 from superdesk.services import BaseService
@@ -56,7 +56,7 @@ def on_created(self, docs):
         for doc in docs:
             if doc.get("is_default"):
                 self.find_and_modify(
-                    query={"$and": [{"_id": {"$ne": doc[config.ID_FIELD]}}, {"is_default": True}]},
+                    query={"$and": [{"_id": {"$ne": doc[ID_FIELD]}}, {"is_default": True}]},
                     update={"$set": {"is_default": False}},
                     upsert=False,
                 )
@@ -64,7 +64,7 @@ def on_created(self, docs):
     def on_updated(self, updates, original):
         if updates.get("is_default"):
             self.find_and_modify(
-                query={"$and": [{"_id": {"$ne": original[config.ID_FIELD]}}, {"is_default": True}]},
+                query={"$and": [{"_id": {"$ne": original[ID_FIELD]}}, {"is_default": True}]},
                 update={"$set": {"is_default": False}},
                 upsert=False,
             )
diff --git a/apps/stages.py b/apps/stages.py
index 36acdd84e2..938a72ea40 100644
--- a/apps/stages.py
+++ b/apps/stages.py
@@ -9,8 +9,7 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
-
-from superdesk import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.notification import push_notification
 from superdesk.resource import Resource
 from superdesk.services import BaseService
@@ -101,7 +100,7 @@ def on_created(self, docs):
                 push_notification(
                     self.notification_key,
                     created=1,
-                    stage_id=str(doc.get(config.ID_FIELD)),
+                    stage_id=str(doc.get(ID_FIELD)),
                     desk_id=str(doc.get("desk")),
                     is_visible=doc.get("is_visible", True),
                 )
@@ -136,7 +135,7 @@ def on_delete(self, doc):
             if desk_id and superdesk.get_resource_service("desks").find_one(req=None, _id=desk_id):
                 raise SuperdeskApiError.preconditionFailedError(message=_("Cannot delete a Incoming Stage."))
 
-        archive_versions_query = {"task.stage": str(doc[config.ID_FIELD])}
+        archive_versions_query = {"task.stage": str(doc[ID_FIELD])}
         items = superdesk.get_resource_service("archive_versions").get(req=None, lookup=archive_versions_query)
         if items and items.count():
             raise SuperdeskApiError.preconditionFailedError(
@@ -144,7 +143,7 @@ def on_delete(self, doc):
             )
 
         # check if the stage is referred to in a ingest routing rule
-        rules = self._stage_in_rule(doc[config.ID_FIELD])
+        rules = self._stage_in_rule(doc[ID_FIELD])
         if rules.count() > 0:
             rule_names = ", ".join(rule.get("name") for rule in rules)
             raise SuperdeskApiError.preconditionFailedError(
@@ -153,7 +152,7 @@ def on_delete(self, doc):
 
     def on_deleted(self, doc):
         push_notification(
-            self.notification_key, deleted=1, stage_id=str(doc.get(config.ID_FIELD)), desk_id=str(doc.get("desk"))
+            self.notification_key, deleted=1, stage_id=str(doc.get(ID_FIELD)), desk_id=str(doc.get("desk"))
         )
 
     def on_update(self, updates, original):
@@ -183,7 +182,7 @@ def on_updated(self, updates, original):
             push_notification(
                 "stage_visibility_updated",
                 updated=1,
-                stage_id=str(original[config.ID_FIELD]),
+                stage_id=str(original[ID_FIELD]),
                 desk_id=str(original["desk"]),
                 is_visible=updates.get("is_visible", original.get("is_visible", True)),
             )
@@ -192,7 +191,7 @@ def on_updated(self, updates, original):
             push_notification(
                 self.notification_key,
                 updated=1,
-                stage_id=str(original.get(config.ID_FIELD)),
+                stage_id=str(original.get(ID_FIELD)),
                 desk_id=str(original.get("desk")),
             )
 
diff --git a/apps/system_message/service.py b/apps/system_message/service.py
index ed4057ac8c..e2e8812bc7 100644
--- a/apps/system_message/service.py
+++ b/apps/system_message/service.py
@@ -9,10 +9,9 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk.services import Service
 from superdesk.notification import push_notification
-
-from eve.utils import config
 from apps.auth import get_user_id
 
 
@@ -27,7 +26,7 @@ def on_created(self, docs):
         :param docs:
         :return:
         """
-        push_notification("system_message:created", _id=[doc.get(config.ID_FIELD) for doc in docs])
+        push_notification("system_message:created", _id=[doc.get(ID_FIELD) for doc in docs])
 
     def on_update(self, updates, original):
         updates["user_id"] = get_user_id()
@@ -39,7 +38,7 @@ def on_updated(self, updates, original):
         :param original:
         :return:
         """
-        push_notification("system_message:updated", _id=[original.get(config.ID_FIELD)])
+        push_notification("system_message:updated", _id=[original.get(ID_FIELD)])
 
     def on_deleted(self, doc):
         """
@@ -47,4 +46,4 @@ def on_deleted(self, doc):
         :param doc:
         :return:
         """
-        push_notification("system_message:deleted", _id=[doc.get(config.ID_FIELD)])
+        push_notification("system_message:deleted", _id=[doc.get(ID_FIELD)])
diff --git a/apps/tasks.py b/apps/tasks.py
index 9a4a76fef3..dd4036bbb1 100644
--- a/apps/tasks.py
+++ b/apps/tasks.py
@@ -17,6 +17,7 @@
 from eve.utils import ParsedRequest
 from eve.versioning import resolve_document_version
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk.users.services import current_user_has_privilege
 from superdesk.resource import Resource
 from superdesk.errors import StopDuplication, SuperdeskApiError, InvalidStateTransitionError
@@ -25,7 +26,7 @@
 from superdesk.metadata.utils import item_url
 from superdesk.services import BaseService
 from superdesk.metadata.item import metadata_schema, ITEM_STATE, CONTENT_STATE, ITEM_TYPE
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
 from superdesk.activity import add_activity, ACTIVITY_CREATE, ACTIVITY_UPDATE
 from superdesk.workflow import is_workflow_state_transition_valid
 from apps.archive.common import (
@@ -163,7 +164,7 @@ def apply_onstage_rule(doc, _id):
     :param _id:
     :return:
     """
-    doc[config.ID_FIELD] = _id
+    doc[ID_FIELD] = _id
     stage = get_resource_service("stages").find_one(req=None, _id=doc.get("task", {}).get("stage"))
     if stage:
         apply_stage_rule(doc, None, stage, "onstage")
diff --git a/apps/templates/content_templates.py b/apps/templates/content_templates.py
index 8179de092f..cd1a85ddab 100644
--- a/apps/templates/content_templates.py
+++ b/apps/templates/content_templates.py
@@ -13,10 +13,13 @@
 import logging
 import jinja2.exceptions
 
-from flask import g, render_template_string, current_app as app
 from copy import deepcopy
+
+from superdesk.core import get_current_app, get_app_config
+from superdesk.resource_fields import ID_FIELD, DATE_CREATED, LAST_UPDATED, ETAG, VERSION, ITEMS
+from superdesk.flask import render_template_string
 from superdesk.services import BaseService
-from superdesk import Resource, Service, config, get_resource_service
+from superdesk import Resource, Service, get_resource_service
 from superdesk.utils import SuperdeskBaseEnum, plaintext_filter
 from superdesk.resource import build_custom_hateoas
 from superdesk.utc import utcnow, local_to_utc, utc_to_local
@@ -46,11 +49,11 @@
 KILL_TEMPLATE_NOT_REQUIRED_FIELDS = ["schedule", "dateline", "template_desks", "schedule_desk", "schedule_stage"]
 PLAINTEXT_FIELDS = {"headline"}
 TEMPLATE_DATA_IGNORE_FIELDS = {  # fields to be ignored when creating item from template
-    config.ID_FIELD,
-    config.LAST_UPDATED,
-    config.DATE_CREATED,
-    config.ETAG,
-    config.VERSION,
+    ID_FIELD,
+    LAST_UPDATED,
+    DATE_CREATED,
+    ETAG,
+    VERSION,
     "task",
     "firstcreated",
     "versioncreated",
@@ -127,7 +130,7 @@ def push_template_notification(docs, event="template:update"):
         if doc.get("template_desks"):
             template_desks.update([str(template) for template in doc.get("template_desks")])
 
-    push_notification(event, user=str(user.get(config.ID_FIELD, "")), desks=list(template_desks))
+    push_notification(event, user=str(user.get(ID_FIELD, "")), desks=list(template_desks))
 
 
 class ContentTemplatesResource(Resource):
@@ -194,7 +197,7 @@ class ContentTemplatesResource(Resource):
 
 class ContentTemplatesService(BaseService):
     def get(self, req, lookup):
-        active_user = g.get("user", {})
+        active_user = get_current_app().get_current_user_dict() or {}
         privileges = active_user.get("active_privileges", {})
         if not lookup:
             lookup = {}
@@ -230,7 +233,7 @@ def on_create(self, docs):
             if doc.get("template_type") == TemplateType.KILL.value:
                 self._validate_kill_template(doc)
             if get_user():
-                doc.setdefault("user", get_user()[config.ID_FIELD])
+                doc.setdefault("user", get_user()[ID_FIELD])
             self._validate_template_desks(doc)
 
     def on_created(self, docs):
@@ -265,7 +268,7 @@ def on_updated(self, updates, original):
         push_template_notification([updates, original])
 
     def on_fetched(self, docs):
-        self.enhance_items(docs[config.ITEMS])
+        self.enhance_items(docs[ITEMS])
 
     def on_fetched_item(self, doc):
         self.enhance_items([doc])
@@ -319,7 +322,7 @@ def update_template_profile(self, updates, profile_id, templates=None):
         for template in templates:
             data, processed = self._reset_fields(template, updates)
             if processed:
-                self.patch(template.get(config.ID_FIELD), {"data": data})
+                self.patch(template.get(ID_FIELD), {"data": data})
 
     def _reset_fields(self, template, profile_data):
         """
@@ -421,7 +424,7 @@ def _process_kill_template(self, doc):
                 doc[key] = None
 
     def _validate_privileges(self, doc, action=None):
-        active_user = g.get("user")
+        active_user = get_current_app().get_current_user_dict() or {}
         user = doc.get("user")
         privileges = active_user.get("active_privileges", {}) if active_user else {}
         if (active_user and active_user.get("user_type")) == "administrator":
@@ -430,7 +433,7 @@ def _validate_privileges(self, doc, action=None):
             active_user
             and user
             and not doc.get("is_public")
-            and active_user.get(config.ID_FIELD) != doc.get("user")
+            and active_user.get(ID_FIELD) != doc.get("user")
             and not privileges.get("personal_template")
         ):
             raise SuperdeskApiError.badRequestError(
@@ -487,7 +490,7 @@ def create(self, docs, **kwargs):
 
         docs[0] = item
         build_custom_hateoas(CUSTOM_HATEOAS, docs[0])
-        return [docs[0].get(config.ID_FIELD)]
+        return [docs[0].get(ID_FIELD)]
 
 
 def render_content_template_by_name(item, template_name):
@@ -597,7 +600,7 @@ def set_template_timestamps(template, now):
         "next_run": get_next_run(template.get("schedule"), now),
     }
     service = superdesk.get_resource_service("content_templates")
-    service.update(template[config.ID_FIELD], updates, template)
+    service.update(template[ID_FIELD], updates, template)
 
 
 def get_item_from_template(template):
@@ -636,7 +639,7 @@ def filter_plaintext_fields(item):
 
 
 def apply_null_override_for_kill(item):
-    for key in app.config["KILL_TEMPLATE_NULL_FIELDS"]:
+    for key in get_app_config("KILL_TEMPLATE_NULL_FIELDS"):
         if key in item:
             item[key] = None
 
@@ -657,11 +660,11 @@ def create_scheduled_content(now=None):
         for template in templates:
             set_template_timestamps(template, now)
             item = get_item_from_template(template)
-            item[config.VERSION] = 1
+            item[VERSION] = 1
             production.post([item])
             insert_into_versions(doc=item)
             try:
-                apply_onstage_rule(item, item.get(config.ID_FIELD))
+                apply_onstage_rule(item, item.get(ID_FIELD))
             except Exception as ex:  # noqa
                 logger.exception("Failed to apply on stage rule while scheduling template.")
             items.append(item)
@@ -686,7 +689,7 @@ def create_template_for_profile(items):
                 {
                     "template_name": profile.get("label"),
                     "is_public": True,
-                    "data": {"profile": str(profile.get(config.ID_FIELD))},
+                    "data": {"profile": str(profile.get(ID_FIELD))},
                 }
             )
     if templates:
@@ -699,8 +702,8 @@ def remove_profile_from_templates(item):
     :param item: deleted content profile
     """
     templates = list(
-        superdesk.get_resource_service(CONTENT_TEMPLATE_RESOURCE).get_templates_by_profile_id(item.get(config.ID_FIELD))
+        superdesk.get_resource_service(CONTENT_TEMPLATE_RESOURCE).get_templates_by_profile_id(item.get(ID_FIELD))
     )
     for template in templates:
         template.get("data", {}).pop("profile", None)
-        superdesk.get_resource_service(CONTENT_TEMPLATE_RESOURCE).patch(template[config.ID_FIELD], template)
+        superdesk.get_resource_service(CONTENT_TEMPLATE_RESOURCE).patch(template[ID_FIELD], template)
diff --git a/apps/templates/filters.py b/apps/templates/filters.py
index 35ac7c9817..73db2d2769 100644
--- a/apps/templates/filters.py
+++ b/apps/templates/filters.py
@@ -9,12 +9,13 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-from superdesk.utc import get_date, timezone
-from superdesk import config
-from superdesk.etree import parse_html
 from lxml import etree
 from datetime import timedelta
 
+from superdesk.core import get_app_config
+from superdesk.utc import get_date, timezone
+from superdesk.etree import parse_html
+
 logger = logging.getLogger(__name__)
 
 
@@ -29,7 +30,7 @@ def format_datetime_filter(date_or_string, timezone_string=None, date_format=Non
     try:
         date_time = get_date(date_or_string)
 
-        timezone_string = timezone_string if timezone_string else config.DEFAULT_TIMEZONE
+        timezone_string = timezone_string if timezone_string else get_app_config("DEFAULT_TIMEZONE")
         tz = timezone(timezone_string)
         if tz:
             date_time = date_time.astimezone(tz)
diff --git a/apps/validate/validate.py b/apps/validate/validate.py
index c8b847e241..076ed0b30a 100644
--- a/apps/validate/validate.py
+++ b/apps/validate/validate.py
@@ -13,8 +13,9 @@
 from lxml import html
 from copy import deepcopy
 from datetime import datetime
-from flask import current_app as app, json
 from eve.io.mongo import Validator
+
+from superdesk.core import get_app_config
 from superdesk.metadata.item import ITEM_TYPE
 from superdesk.logging import logger
 from superdesk.text_utils import get_text
@@ -279,7 +280,7 @@ def _validate_validate_characters(self, validate, field, value):
         if value:
             value = str(html.fromstring(value).text_content())
 
-        disallowed_characters = app.config.get("DISALLOWED_CHARACTERS")
+        disallowed_characters = get_app_config("DISALLOWED_CHARACTERS")
 
         if validate and disallowed_characters and value:
             invalid_chars = [char for char in disallowed_characters if char in value]
@@ -370,13 +371,13 @@ def _get_validators(self, doc):
         profile_id = doc["validate"].get("profile") or item_type
 
         # use content profile if exists
-        if profile_id and (app.config["AUTO_PUBLISH_CONTENT_PROFILE"] or doc["act"] != "auto_publish"):
+        if profile_id and (get_app_config("AUTO_PUBLISH_CONTENT_PROFILE") or doc["act"] != "auto_publish"):
             content_type = superdesk.get_resource_service("content_types").find_one(req=None, _id=profile_id)
             if content_type:
                 return self._get_profile_schema(content_type.get("schema", {}), doc)
 
         # use custom schema like profile schema
-        custom_schema = app.config.get("SCHEMA", {}).get(doc[ITEM_TYPE])
+        custom_schema = get_app_config("SCHEMA", {}).get(doc[ITEM_TYPE])
         if custom_schema:
             return self._get_profile_schema(custom_schema, doc)
 
@@ -497,7 +498,7 @@ def _process_media_metadata(self, doc, schema):
         :return:
         """
         if doc.get("associations"):
-            schema.setdefault("associations", {})["media_metadata"] = app.config.get(
+            schema.setdefault("associations", {})["media_metadata"] = get_app_config(
                 "VALIDATE_MEDIA_METADATA_ON_PUBLISH", True
             )
 
diff --git a/apps/video_edit/__init__.py b/apps/video_edit/__init__.py
index cb6471fbad..9439539879 100644
--- a/apps/video_edit/__init__.py
+++ b/apps/video_edit/__init__.py
@@ -1,8 +1,8 @@
 from flask_babel import _
 
 import superdesk
+from superdesk.resource_fields import ID_FIELD
 from apps.archive.common import ARCHIVE
-from superdesk import config
 from superdesk.errors import SuperdeskApiError
 from superdesk.media.video_editor import VideoEditorWrapper
 from superdesk.metadata.utils import item_url
@@ -22,7 +22,7 @@ def create(self, docs, **kwargs):
         ids = []
         for doc in docs:
             item = doc.get("item")
-            item_id = item[config.ID_FIELD]
+            item_id = item[ID_FIELD]
             renditions = item["renditions"]
             video_id = renditions["original"].get("video_editor_id")
             if not video_id:
@@ -75,7 +75,7 @@ def find_one(self, req, **lookup):
             response = self.video_editor.create_timeline_thumbnails(
                 video_id, req.args.get("amount", TIMELINE_THUMBNAILS_AMOUNT)
             )
-            return {config.ID_FIELD: video_id, **response}
+            return {ID_FIELD: video_id, **response}
         res["project"] = self.video_editor.find_one(video_id)
         return res
 
@@ -126,7 +126,7 @@ class VideoEditResource(superdesk.Resource):
             "required": False,
             "empty": True,
             "schema": {
-                config.ID_FIELD: {
+                ID_FIELD: {
                     "type": "string",
                     "required": True,
                 },
diff --git a/apps/vidible/vidible.py b/apps/vidible/vidible.py
index 5ca7751081..054f7bb130 100644
--- a/apps/vidible/vidible.py
+++ b/apps/vidible/vidible.py
@@ -13,8 +13,8 @@
 import requests
 
 from eve.render import send_response
-from flask import Blueprint
 
+from superdesk.flask import Blueprint
 from superdesk.utc import utcnow
 
 
diff --git a/apps/workspace/workspace.py b/apps/workspace/workspace.py
index f313966cb9..f32e150c31 100644
--- a/apps/workspace/workspace.py
+++ b/apps/workspace/workspace.py
@@ -9,7 +9,7 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
-from flask import request
+from superdesk.flask import request
 
 
 class WorkspaceResource(superdesk.Resource):
diff --git a/content_api/__init__.py b/content_api/__init__.py
index e332adced7..290c8e62be 100644
--- a/content_api/__init__.py
+++ b/content_api/__init__.py
@@ -8,7 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import current_app
+from superdesk.core import get_app_config
 from .commands import RemoveExpiredItems  # noqa
 
 
@@ -21,4 +21,4 @@ def is_enabled():
 
     It can be turned off via ``CONTENTAPI_ENABLED`` setting.
     """
-    return current_app.config.get("CONTENTAPI_ENABLED", True)
+    return get_app_config("CONTENTAPI_ENABLED", True)
diff --git a/content_api/api_audit/service.py b/content_api/api_audit/service.py
index e31fa5318d..9ea2f48d30 100644
--- a/content_api/api_audit/service.py
+++ b/content_api/api_audit/service.py
@@ -9,8 +9,9 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from superdesk.services import BaseService
-from flask import g, request, current_app as app
-from eve.utils import config
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import g, request
 
 
 class ApiAuditService(BaseService):
@@ -35,12 +36,12 @@ def audit_items(self, items):
     def _audit_docs(self, docs):
         if not len(docs):
             return
-        if not app.config.get("CONTENTAPI_AUDIT", True):
+        if not get_app_config("CONTENTAPI_AUDIT", True):
             return
         subscriber = getattr(g, "user", None)
         # in behave testing we get user (dict)
         if isinstance(subscriber, dict):
-            subscriber = subscriber.get(config.ID_FIELD)
+            subscriber = subscriber.get(ID_FIELD)
         audit_docs = [
             {
                 "type": item.get("type", ""),
diff --git a/content_api/app/__init__.py b/content_api/app/__init__.py
index 3c2e854a31..04a211089c 100644
--- a/content_api/app/__init__.py
+++ b/content_api/app/__init__.py
@@ -19,11 +19,11 @@
 """
 
 import os
-import flask
 import importlib
 
 from eve.io.mongo.mongo import MongoJSONEncoder
 
+from superdesk.flask import Config
 from content_api.tokens import SubscriberTokenAuth
 from superdesk.datalayer import SuperdeskDataLayer
 from superdesk.factory.elastic_apm import setup_apm
@@ -40,7 +40,7 @@ def get_app(config=None):
         from `settings.py`
     :return: a new SuperdeskEve app instance
     """
-    app_config = flask.Config(".")
+    app_config = Config(".")
 
     # get content api default conf
     app_config.from_object("content_api.app.settings")
diff --git a/content_api/assets/__init__.py b/content_api/assets/__init__.py
index beb62ff04e..d451d42ba4 100644
--- a/content_api/assets/__init__.py
+++ b/content_api/assets/__init__.py
@@ -12,17 +12,19 @@
 import superdesk
 import bson.errors
 
-from flask import request, current_app as app
+from superdesk.core import get_current_app
+from superdesk.flask import request, Blueprint
 from content_api.errors import FileNotFoundError
 from superdesk import get_resource_service
 from superdesk.upload import upload_url as _upload_url
 from superdesk.storage.superdesk_file import generate_response_for_file
 
-bp = superdesk.Blueprint("assets", __name__)
+bp = Blueprint("assets", __name__)
 
 
 @bp.route("/assets/<path:media_id>", methods=["GET"])
 def get_media_streamed(media_id):
+    app = get_current_app()
     if not app.auth.authorized([], "assets", "GET"):
         return app.auth.authenticate()
     try:
diff --git a/content_api/assets/service.py b/content_api/assets/service.py
index b359e0b1f1..c1bae4faf2 100644
--- a/content_api/assets/service.py
+++ b/content_api/assets/service.py
@@ -9,8 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from bson import ObjectId
-from flask import current_app as app
 
+from superdesk.core import get_current_app
 from superdesk.errors import SuperdeskApiError
 from superdesk.media.media_operations import (
     download_file_from_encoded_str,
@@ -42,6 +42,7 @@ def on_create(self, docs):
 
     def store_file(self, doc, content, filename, content_type):
         # retrieve file name and metadata from file
+        app = get_current_app()
         file_name, content_type, metadata = process_file_from_stream(content, content_type=content_type)
         try:
             content.seek(0)
diff --git a/content_api/commands/remove_expired_items.py b/content_api/commands/remove_expired_items.py
index 41c46d7576..f729356089 100644
--- a/content_api/commands/remove_expired_items.py
+++ b/content_api/commands/remove_expired_items.py
@@ -12,7 +12,7 @@
 import superdesk
 from datetime import timedelta
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
 from superdesk.celery_task_utils import get_lock_id
 from superdesk.lock import lock, unlock
@@ -42,8 +42,8 @@ class RemoveExpiredItems(superdesk.Command):
     def run(self, expiry_days=None):
         if expiry_days:
             self.expiry_days = int(expiry_days)
-        elif app.config.get("CONTENT_API_EXPIRY_DAYS"):
-            self.expiry_days = app.config["CONTENT_API_EXPIRY_DAYS"]
+        elif get_app_config("CONTENT_API_EXPIRY_DAYS"):
+            self.expiry_days = get_app_config("CONTENT_API_EXPIRY_DAYS")
 
         if self.expiry_days == 0:
             logger.info("Expiry days is set to 0, therefor no items will be removed.")
diff --git a/content_api/items/service.py b/content_api/items/service.py
index 3ba3c4a8af..22cc7aac6f 100644
--- a/content_api/items/service.py
+++ b/content_api/items/service.py
@@ -19,10 +19,10 @@
 from superdesk.utc import utcnow
 from urllib.parse import urljoin, urlparse, quote
 
-from flask import current_app as app, g
-from flask import request
 from werkzeug.datastructures import MultiDict
 
+from superdesk.core import get_current_app, get_app_config
+from superdesk.flask import request, g
 from content_api.app.settings import ELASTIC_DATE_FORMAT
 from content_api.errors import BadParameterValueError, UnexpectedParameterError
 from content_api.items.resource import ItemsResource
@@ -207,10 +207,10 @@ def get_expired_items(self, expiry_datetime=None, expiry_days=None, max_results=
             expiry_datetime = utcnow()
 
         if expiry_days is None:
-            expiry_days = app.config["CONTENT_API_EXPIRY_DAYS"]
+            expiry_days = get_app_config("CONTENT_API_EXPIRY_DAYS")
 
         if max_results is None:
-            max_results = app.config["MAX_EXPIRY_QUERY_LIMIT"]
+            max_results = get_app_config("MAX_EXPIRY_QUERY_LIMIT")
 
         last_id = None
         expire_at = date_to_str(expiry_datetime - timedelta(days=expiry_days))
@@ -274,6 +274,8 @@ def _process_fetched_object(self, document, audit=True):
 
     def _process_item_renditions(self, item):
         hrefs = {}
+        app = get_current_app()
+
         if item.get("renditions"):
             renditions = {}
             for k, v in item["renditions"].items():
@@ -318,7 +320,7 @@ def _get_uri(self, document):
             endpoint_name = "items"
 
         resource_url = "{api_url}/{endpoint}/".format(
-            api_url=app.config["CONTENTAPI_URL"], endpoint=app.config["URLS"][endpoint_name]
+            api_url=get_app_config("CONTENTAPI_URL"), endpoint=get_app_config("URLS")[endpoint_name]
         )
 
         return urljoin(resource_url, quote(document.get("_id", document.get("guid"))))
diff --git a/content_api/items_versions/service.py b/content_api/items_versions/service.py
index 718781a16f..b6cab34dd3 100644
--- a/content_api/items_versions/service.py
+++ b/content_api/items_versions/service.py
@@ -7,23 +7,24 @@
 # For the full copyright and license information, please see the
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
-from eve.utils import config
 from eve.versioning import versioned_id_field
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.services import BaseService
-from flask import current_app as app
 from superdesk.utils import ListCursor
 
 
 class ItemsVersionsService(BaseService):
     def get(self, req, lookup):
-        resource_def = app.config["DOMAIN"]["items"]
+        resource_def = get_app_config("DOMAIN")["items"]
         id_field = versioned_id_field(resource_def)
 
         lookup = {"$and": [lookup, {"pubstatus": {"$ne": "canceled"}}]}
         version_history = list(super().get_from_mongo(req=req, lookup=lookup))
 
         for doc in version_history:
-            doc[config.ID_FIELD] = doc[id_field]
+            doc[ID_FIELD] = doc[id_field]
 
         return ListCursor(version_history)
 
diff --git a/content_api/publish/service.py b/content_api/publish/service.py
index 66579ce7c4..95dba261a3 100644
--- a/content_api/publish/service.py
+++ b/content_api/publish/service.py
@@ -12,8 +12,8 @@
 from typing import Dict
 
 from copy import copy
-from eve.utils import config
 
+from superdesk.resource_fields import ID_FIELD, VERSION
 from superdesk.utc import utcnow
 from superdesk.services import BaseService
 from superdesk.publish.formatters.ninjs_newsroom_formatter import NewsroomNinjsFormatter
@@ -51,10 +51,10 @@ def publish(self, item, subscribers=None):
             now = utcnow()
             doc.setdefault("firstcreated", now)
             doc.setdefault("versioncreated", now)
-            doc.setdefault(config.VERSION, item.get(config.VERSION, 1))
+            doc.setdefault(VERSION, item.get(VERSION, 1))
             for _, assoc in doc.get(ASSOCIATIONS, {}).items():
                 if assoc:
-                    assoc.setdefault("subscribers", [str(subscriber[config.ID_FIELD]) for subscriber in subscribers])
+                    assoc.setdefault("subscribers", [str(subscriber[ID_FIELD]) for subscriber in subscribers])
             doc["subscribers"] = [str(sub["_id"]) for sub in subscribers]
             doc["original_id"] = doc["guid"]
             if "evolvedfrom" in doc:
@@ -89,7 +89,7 @@ def _create_doc(self, doc, **kwargs):
         """Create a new item or update existing."""
         item = copy(doc)
         item.setdefault("_id", item.get("guid"))
-        _id = item[config.ID_FIELD] = item.pop("guid")
+        _id = item[ID_FIELD] = item.pop("guid")
 
         # merging the existing and new subscribers
         original = self.find_one(req=None, _id=_id)
@@ -147,7 +147,7 @@ def _filter_item(self, item):
 
         for fc in filters:
             if filter_service.does_match(fc, item):
-                logger.info("API Filter block {} matched for item {}.".format(fc, item.get(config.ID_FIELD)))
+                logger.info("API Filter block {} matched for item {}.".format(fc, item.get(ID_FIELD)))
                 return True
 
         return False
@@ -180,7 +180,7 @@ def _process_associations(self, updates, original):
             if original:
                 original_assoc = (original.get("associations") or {}).get(assoc)
 
-                if original_assoc and original_assoc.get(config.ID_FIELD) == update_assoc.get(config.ID_FIELD):
+                if original_assoc and original_assoc.get(ID_FIELD) == update_assoc.get(ID_FIELD):
                     update_assoc["subscribers"] = list(
                         set(original_assoc.get("subscribers") or []) | set(update_assoc.get("subscribers") or [])
                     )
diff --git a/content_api/search/service.py b/content_api/search/service.py
index ef073602cd..8d39348e9b 100644
--- a/content_api/search/service.py
+++ b/content_api/search/service.py
@@ -9,8 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from content_api.items.service import ItemsService
-from flask import g
 
+from superdesk.flask import g
 from superdesk import get_resource_privileges
 from superdesk.errors import SuperdeskApiError
 
diff --git a/content_api/tests/items_service_test.py b/content_api/tests/items_service_test.py
index 6ab9e6a1e9..b7f12bd958 100644
--- a/content_api/tests/items_service_test.py
+++ b/content_api/tests/items_service_test.py
@@ -11,10 +11,12 @@
 import json
 from datetime import date, timedelta
 from eve.utils import ParsedRequest
-from flask import Flask
+
 from unittest import mock
 from unittest.mock import MagicMock
 from werkzeug.datastructures import MultiDict
+
+from superdesk.flask import Flask
 from superdesk import resources
 from content_api.tests import ApiTestCase
 from superdesk.tests import TestCase
diff --git a/content_api/tests/packages_service_test.py b/content_api/tests/packages_service_test.py
index 0a8647356e..a32f70110d 100644
--- a/content_api/tests/packages_service_test.py
+++ b/content_api/tests/packages_service_test.py
@@ -8,9 +8,9 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import Flask
 from unittest import mock
 
+from superdesk.flask import Flask
 from content_api.tests import ApiTestCase
 
 
diff --git a/content_api/tests/search_service_test.py b/content_api/tests/search_service_test.py
index d6f7bf43f4..ebd713e4ba 100644
--- a/content_api/tests/search_service_test.py
+++ b/content_api/tests/search_service_test.py
@@ -10,8 +10,9 @@
 
 from unittest import mock
 from unittest.mock import MagicMock
-from flask import Flask
 from werkzeug.datastructures import MultiDict
+
+from superdesk.flask import Flask
 from content_api.tests import ApiTestCase
 
 
diff --git a/content_api/tokens/__init__.py b/content_api/tokens/__init__.py
index 766810f8c3..15bbeb370e 100644
--- a/content_api/tokens/__init__.py
+++ b/content_api/tokens/__init__.py
@@ -10,8 +10,10 @@
 
 import superdesk
 
-from flask import current_app as app, g
 from eve.auth import TokenAuth
+
+from superdesk.core import get_current_app
+from superdesk.flask import g
 from superdesk.utc import utcnow
 from superdesk.publish.subscriber_token import SubscriberTokenResource, SubscriberTokenService
 
@@ -30,6 +32,7 @@ class AuthSubscriberTokenResource(SubscriberTokenResource):
 class SubscriberTokenAuth(TokenAuth):
     def check_auth(self, token, allowed_roles, resource, method):
         """Try to find auth token and if valid put subscriber id into ``g.user``."""
+        app = get_current_app()
         data = app.data.mongo.find_one(TOKEN_RESOURCE, req=None, _id=token)
         if not data:
             return False
diff --git a/content_api/users/service.py b/content_api/users/service.py
index 3d1edad90b..ea8d7f29c7 100644
--- a/content_api/users/service.py
+++ b/content_api/users/service.py
@@ -11,8 +11,8 @@
 import logging
 
 import bcrypt
-from flask import current_app as app
 
+from superdesk.core import get_app_config
 from superdesk.services import BaseService
 from superdesk.utils import is_hashed, get_hash
 
@@ -39,7 +39,7 @@ def on_update(self, updates, original):
             updates["password"] = self._get_password_hash(updates["password"])
 
     def _get_password_hash(self, password):
-        return get_hash(password, app.config.get("BCRYPT_GENSALT_WORK_FACTOR", 12))
+        return get_hash(password, get_app_config("BCRYPT_GENSALT_WORK_FACTOR", 12))
 
     def password_match(self, password, hashed_password):
         """Return true if the given password matches the hashed password
diff --git a/docs/settings.rst b/docs/settings.rst
index 3e7f085065..00753c030e 100644
--- a/docs/settings.rst
+++ b/docs/settings.rst
@@ -6,11 +6,11 @@
 Configuration
 =============
 
-We use ``flask.app.config``, so to use it do::
+We use ``superdesk.core.get_app_config``, so to use it do::
 
-    from flask import current_app as app
+    from superdesk.core import get_app_config
 
-    print(app.config['SERVER_DOMAIN'])
+    print(get_app_config("SERVER_DOMAIN"))
 
 Configuration is combination of default settings module and settings module
 in `application repo <https://github.com/superdesk/superdesk/blob/master/server/settings.py>`_.
diff --git a/features/steps/highlights_steps.py b/features/steps/highlights_steps.py
index 912513d4e9..4bb52c62f0 100644
--- a/features/steps/highlights_steps.py
+++ b/features/steps/highlights_steps.py
@@ -1,5 +1,5 @@
-from flask import json
 from behave import given, when, then  # @UnresolvedImport
+from superdesk.core import json
 from superdesk import get_resource_service
 from superdesk.tests import get_prefixed_url
 from superdesk.utc import utcnow
diff --git a/features/steps/steps_users.py b/features/steps/steps_users.py
index 4a99c3c0d6..2a58318422 100644
--- a/features/steps/steps_users.py
+++ b/features/steps/steps_users.py
@@ -1,5 +1,5 @@
 from behave import then
-from flask import json
+from superdesk.core import json
 
 
 @then("we get users")
diff --git a/prod_api/app/__init__.py b/prod_api/app/__init__.py
index cab85c3f56..eca71c7590 100644
--- a/prod_api/app/__init__.py
+++ b/prod_api/app/__init__.py
@@ -17,12 +17,11 @@
 """
 
 import os
-import flask
 import importlib
 
-from eve import Eve
 from eve.io.mongo.mongo import MongoJSONEncoder
 
+from superdesk.flask import Config
 from superdesk.datalayer import SuperdeskDataLayer
 from superdesk.factory.elastic_apm import setup_apm
 from superdesk.validator import SuperdeskValidator
@@ -41,7 +40,7 @@ def get_app(config=None):
     :return: a new SuperdeskEve app instance
     """
 
-    app_config = flask.Config(".")
+    app_config = Config(".")
 
     # default config
     app_config.from_object("prod_api.app.settings")
diff --git a/prod_api/assets/__init__.py b/prod_api/assets/__init__.py
index effc0cf37f..10718ccc5c 100644
--- a/prod_api/assets/__init__.py
+++ b/prod_api/assets/__init__.py
@@ -9,11 +9,12 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
+from superdesk.core import get_app_config
+from superdesk.flask import Blueprint
 from superdesk.upload import get_upload_as_data_uri
-from flask import current_app as app
 
 
-bp = superdesk.Blueprint("assets", __name__)
+bp = Blueprint("assets", __name__)
 
 
 @bp.route("/assets/<path:media_id>", methods=["GET"])
@@ -22,7 +23,7 @@ def prod_get_upload_as_data_uri(media_id):
 
 
 def upload_url(media_id, view=prod_get_upload_as_data_uri):
-    return "{}/{}".format(app.config.get("MEDIA_PREFIX").rstrip("/"), media_id)
+    return "{}/{}".format(get_app_config("MEDIA_PREFIX").rstrip("/"), media_id)
 
 
 def init_app(app) -> None:
diff --git a/prod_api/auth.py b/prod_api/auth.py
index 3eb14c2a9b..98b011ff48 100644
--- a/prod_api/auth.py
+++ b/prod_api/auth.py
@@ -1,10 +1,10 @@
 from time import time
 from authlib.jose import jwt
 from authlib.jose.errors import BadSignatureError, ExpiredTokenError, DecodeError
-from flask import abort, make_response, jsonify
-from flask import current_app as app
 from eve.auth import TokenAuth
 
+from superdesk.core import get_app_config
+from superdesk.flask import abort, make_response, jsonify
 from superdesk import get_resource_privileges
 
 
@@ -24,12 +24,12 @@ def check_auth(self, token, allowed_roles, resource, method):
         :param method: HTTP method being executed (POST, GET, etc.)
         """
 
-        if not app.config.get("AUTH_SERVER_SHARED_SECRET"):
+        if not get_app_config("AUTH_SERVER_SHARED_SECRET"):
             return False
 
         # decode jwt
         try:
-            decoded_jwt = jwt.decode(token, key=app.config.get("AUTH_SERVER_SHARED_SECRET"))
+            decoded_jwt = jwt.decode(token, key=get_app_config("AUTH_SERVER_SHARED_SECRET"))
             decoded_jwt.validate_exp(now=time(), leeway=0)
         except (BadSignatureError, ExpiredTokenError, DecodeError):
             return False
diff --git a/prod_api/conftest.py b/prod_api/conftest.py
index 35e06657d6..975f762807 100644
--- a/prod_api/conftest.py
+++ b/prod_api/conftest.py
@@ -3,9 +3,9 @@
 import pytest
 from pathlib import Path
 from bson import ObjectId
-from flask import url_for
 from requests.auth import _basic_auth_str
 
+from superdesk.flask import url_for
 from superdesk.tests import get_mongo_uri, setup, clean_dbs
 from superdesk.factory import get_app as get_sd_app
 from superdesk.auth_server.clients import RegisterClient
diff --git a/prod_api/service.py b/prod_api/service.py
index 2bf6d775f6..2ec5e1861d 100644
--- a/prod_api/service.py
+++ b/prod_api/service.py
@@ -11,7 +11,7 @@
 import operator
 from functools import reduce
 import superdesk
-from flask import current_app as app
+from superdesk.core import get_current_app
 
 
 class ProdApiService(superdesk.Service):
@@ -77,6 +77,7 @@ def _process_item_renditions(self, doc):
         """
 
         def _process(item):
+            app = get_current_app()
             for _k, v in item.get("renditions", {}).items():
                 if v and "media" in v:
                     media = v.pop("media")
diff --git a/prod_api/tests/test_auth.py b/prod_api/tests/test_auth.py
index 2feb06cc1d..bd1bfc3481 100644
--- a/prod_api/tests/test_auth.py
+++ b/prod_api/tests/test_auth.py
@@ -2,8 +2,8 @@
 import time
 import pytest
 from requests.auth import _basic_auth_str
-from flask import url_for
 
+from superdesk.flask import url_for
 from superdesk.auth_server.scopes import Scope
 
 from ..conftest import get_test_prodapi_app
diff --git a/prod_api/tests/test_contacts.py b/prod_api/tests/test_contacts.py
index 2bdcd4dd08..8c9399aec6 100644
--- a/prod_api/tests/test_contacts.py
+++ b/prod_api/tests/test_contacts.py
@@ -1,6 +1,6 @@
 import json
-from flask import url_for
 
+from superdesk.flask import url_for
 from superdesk import get_resource_service
 
 
diff --git a/prod_api/tests/test_desks.py b/prod_api/tests/test_desks.py
index 1b9bbcad87..b28636d07a 100644
--- a/prod_api/tests/test_desks.py
+++ b/prod_api/tests/test_desks.py
@@ -1,6 +1,6 @@
 import json
-from flask import url_for
 
+from superdesk.flask import url_for
 from superdesk import get_resource_service
 
 
diff --git a/prod_api/tests/test_items.py b/prod_api/tests/test_items.py
index a093c14394..7526b9b577 100644
--- a/prod_api/tests/test_items.py
+++ b/prod_api/tests/test_items.py
@@ -1,6 +1,6 @@
 import json
-from flask import url_for
 
+from superdesk.flask import url_for
 from superdesk import get_resource_service
 from apps.archive.common import ARCHIVE
 
diff --git a/prod_api/tests/test_users.py b/prod_api/tests/test_users.py
index 225253a24d..3947e90539 100644
--- a/prod_api/tests/test_users.py
+++ b/prod_api/tests/test_users.py
@@ -1,6 +1,6 @@
 import json
-from flask import url_for
 
+from superdesk.flask import url_for
 from superdesk import get_resource_service
 
 
diff --git a/superdesk/__init__.py b/superdesk/__init__.py
index f1e0e26f0a..83da19707f 100644
--- a/superdesk/__init__.py
+++ b/superdesk/__init__.py
@@ -16,15 +16,13 @@
 import logging as logging_lib
 
 from typing import Any, Dict, List, NamedTuple, Optional
-from flask import Flask, abort, json, Blueprint, current_app
-from flask.cli import ScriptInfo, with_appcontext
 from flask_babel.speaklater import LazyString
 
 # from flask_script import Command as BaseCommand, Option
-from eve.utils import config  # noqa
 from eve.methods.common import document_link  # noqa
 from werkzeug.exceptions import HTTPException
 
+from .core import get_app_config, json
 from .eve_backend import EveBackend
 from .datalayer import SuperdeskDataLayer  # noqa
 from .services import BaseService as Service  # noqa
@@ -132,10 +130,11 @@ def get_headers(self, environ=None):
 
     todo(petr): put in in custom flask error handler instead
     """
+
     return [
         ("Content-Type", "text/html"),
-        ("Access-Control-Allow-Origin", current_app.config["CLIENT_URL"]),
-        ("Access-Control-Allow-Headers", ",".join(current_app.config["X_HEADERS"])),
+        ("Access-Control-Allow-Origin", get_app_config("CLIENT_URL")),
+        ("Access-Control-Allow-Headers", ",".join(get_app_config("X_HEADERS"))),
         ("Access-Control-Allow-Credentials", "true"),
         ("Access-Control-Allow-Methods", "*"),
     ]
diff --git a/superdesk/activity.py b/superdesk/activity.py
index e0b1166952..5034ba1d7f 100644
--- a/superdesk/activity.py
+++ b/superdesk/activity.py
@@ -12,11 +12,11 @@
 import logging
 
 from bson.objectid import ObjectId
-from flask import g
 from flask_babel import _, lazy_gettext
 
 import superdesk
 from superdesk import get_resource_service
+from superdesk.core import get_current_app
 from superdesk.emails import send_activity_emails
 from superdesk.errors import SuperdeskApiError, add_notifier
 from superdesk.notification import push_notification
@@ -97,7 +97,7 @@ def get(self, req, lookup):
         """Filter out personal activity on personal items if inquired by another user."""
         if req is None:
             req = ParsedRequest()
-        user = getattr(g, "user", None)
+        user = get_current_app().get_current_user_dict()
         if not user:
             raise SuperdeskApiError.notFoundError("Can not determine user")
         where_cond = {}
@@ -120,7 +120,7 @@ def on_update(self, updates, original):
         :param original:
         :return:
         """
-        user = getattr(g, "user", None)
+        user = get_current_app().get_current_user_dict()
         if not user:
             raise SuperdeskApiError.notFoundError("Can not determine user")
         user_id = user.get("_id")
@@ -194,7 +194,7 @@ def add_activity(
 
     activity = {"name": activity_name, "message": msg, "data": data, "resource": resource}
 
-    user = getattr(g, "user", None)
+    user = get_current_app().get_current_user_dict()
     if user:
         activity["user"] = user.get("_id")
         activity["user_name"] = user.get("display_name", user.get("username"))
@@ -248,7 +248,7 @@ def notify_and_add_activity(
         recipients = get_recipients(user_list, activity_name, preference_notification_name)
 
         if activity_name != ACTIVITY_ERROR:
-            current_user = getattr(g, "user", None)
+            current_user = get_current_app().get_current_user_dict()
             activity = {
                 "name": activity_name,
                 "message": current_user.get("display_name") + " " + msg if current_user else msg,
diff --git a/superdesk/allowed_values.py b/superdesk/allowed_values.py
index 811d303856..83d9630893 100644
--- a/superdesk/allowed_values.py
+++ b/superdesk/allowed_values.py
@@ -10,8 +10,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
+from superdesk.core import get_app_config
 from superdesk.utils import ListCursor
-from flask import current_app as app
 
 
 class AllowedValuesResource(superdesk.Resource):
@@ -29,7 +29,7 @@ class AllowedValuesResource(superdesk.Resource):
 class AllowedValuesService(superdesk.Service):
     def get(self, req, lookup):
         allowed = []
-        for resource, config in app.config.get("DOMAIN", {}).items():
+        for resource, config in get_app_config("DOMAIN", {}).items():
             for field, field_config in config.get("schema", {}).items():
                 if field_config.get("allowed"):
                     allowed.append(
diff --git a/superdesk/attachments.py b/superdesk/attachments.py
index 736c9e209d..495304e300 100644
--- a/superdesk/attachments.py
+++ b/superdesk/attachments.py
@@ -3,8 +3,9 @@
 import superdesk
 from superdesk.logging import logger
 
-from flask import current_app, request
 from werkzeug.utils import secure_filename
+from superdesk.core import get_current_app
+from superdesk.flask import request
 from apps.auth import get_user_id
 
 
@@ -38,6 +39,7 @@ class AttachmentsResource(superdesk.Resource):
 
 class AttachmentsService(superdesk.Service):
     def on_create(self, docs):
+        current_app = get_current_app()
         for doc in docs:
             doc["user"] = get_user_id()
 
@@ -53,7 +55,7 @@ def on_create(self, docs):
                 doc.setdefault("length", getattr(media, "length"))
 
     def on_deleted(self, doc):
-        current_app.media.delete(doc["media"], RESOURCE)
+        get_current_app().media.delete(doc["media"], RESOURCE)
 
 
 def is_attachment_public(attachment):
@@ -90,7 +92,7 @@ def get_attachment_public_url(attachment: Dict[str, Any]) -> Optional[str]:
 
         return None
 
-    return current_app.media.url_for_external(attachment["media"], RESOURCE)
+    return get_current_app().media.url_for_external(attachment["media"], RESOURCE)
 
 
 def init_app(app) -> None:
diff --git a/superdesk/audit/audit.py b/superdesk/audit/audit.py
index 72c3e41d16..816e323f59 100644
--- a/superdesk/audit/audit.py
+++ b/superdesk/audit/audit.py
@@ -9,7 +9,7 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import logging
-from flask import g
+from superdesk.flask import g
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 
diff --git a/superdesk/audit/commands.py b/superdesk/audit/commands.py
index 4545acc3bb..a549305bbd 100644
--- a/superdesk/audit/commands.py
+++ b/superdesk/audit/commands.py
@@ -8,15 +8,17 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import superdesk
 import logging
 import datetime
-from superdesk.utc import utcnow
-from eve.utils import date_to_str, ParsedRequest, config
+from eve.utils import ParsedRequest
 from copy import deepcopy
 from bson import ObjectId
 from time import time
 
+import superdesk
+from superdesk.core import get_app_config
+from superdesk.utc import utcnow
+
 logger = logging.getLogger(__name__)
 
 
@@ -68,10 +70,10 @@ def run(self, expiry=None):
         if expiry is not None:
             self.expiry = utcnow() - datetime.timedelta(minutes=int(expiry))
         else:
-            if config.AUDIT_EXPIRY_MINUTES == 0:
+            if get_app_config("AUDIT_EXPIRY_MINUTES") == 0:
                 logger.info("Audit purge is not enabled")
                 return
-            self.expiry = utcnow() - datetime.timedelta(minutes=config.AUDIT_EXPIRY_MINUTES)
+            self.expiry = utcnow() - datetime.timedelta(minutes=get_app_config("AUDIT_EXPIRY_MINUTES"))
         logger.info("Starting audit purge for items older than {}".format(self.expiry))
         # self.purge_orphaned_item_audits()
         self.purge_old_entries()
diff --git a/superdesk/auth/__init__.py b/superdesk/auth/__init__.py
index a0e09131de..5745be43e5 100644
--- a/superdesk/auth/__init__.py
+++ b/superdesk/auth/__init__.py
@@ -21,8 +21,8 @@
 import logging
 import superdesk
 
-from flask import render_template, current_app as app
-
+from superdesk.core import get_app_config
+from superdesk.flask import render_template
 from apps.auth.auth import AuthResource
 from apps.auth.service import AuthService
 from superdesk.validation import ValidationError
@@ -69,7 +69,7 @@ def auth_user(email, userdata=None):
             superdesk.get_resource_service("users").update_external_user(data[0]["user"], userdata)
         return render_template(AUTHORIZED_TEMPLATE, data=data[0])
     except ValueError:
-        if not app.config["USER_EXTERNAL_CREATE"] or not userdata:
+        if not get_app_config("USER_EXTERNAL_CREATE") or not userdata:
             return render_template(AUTHORIZED_TEMPLATE, data={"error": 404})
 
     # create new user using userdata
diff --git a/superdesk/auth/decorator.py b/superdesk/auth/decorator.py
index 1b46abe55d..8a5ffe14a7 100644
--- a/superdesk/auth/decorator.py
+++ b/superdesk/auth/decorator.py
@@ -1,7 +1,8 @@
 from typing import Optional
 from functools import wraps
-from flask import request
-from flask import current_app as app
+
+from superdesk.core import get_current_app
+from superdesk.flask import request
 
 
 def blueprint_auth(resource: Optional[str] = None):
@@ -12,7 +13,7 @@ def blueprint_auth(resource: Optional[str] = None):
     def fdec(f):
         @wraps(f)
         def decorated(*args, **kwargs):
-            auth = app.auth
+            auth = get_current_app().auth
             if not auth.authorized([], resource or "_blueprint", request.method):
                 return auth.authenticate()
             return f(*args, **kwargs)
diff --git a/superdesk/auth/oauth.py b/superdesk/auth/oauth.py
index b33f0d201f..41302ade6e 100644
--- a/superdesk/auth/oauth.py
+++ b/superdesk/auth/oauth.py
@@ -29,12 +29,13 @@
 from typing import Optional, List, Tuple
 from bson import ObjectId
 import superdesk
-from flask import url_for, render_template, current_app as app
 from flask_babel import lazy_gettext as l_
-from eve.utils import config
 from authlib.integrations.flask_client import OAuth
 from authlib.integrations.requests_client import OAuth2Session
 from authlib.oauth2.rfc6749.wrappers import OAuth2Token
+
+from superdesk.resource_fields import ID_FIELD
+from superdesk.flask import url_for, render_template, Blueprint
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.errors import SuperdeskApiError
@@ -44,7 +45,7 @@
 
 logger = logging.getLogger(__name__)
 
-bp = superdesk.Blueprint("oauth", __name__)
+bp = Blueprint("oauth", __name__)
 oauth: Optional[OAuth] = None
 TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
 REVOKE_ENDPOINT = "https://oauth2.googleapis.com/revoke"
@@ -200,7 +201,7 @@ def google_authorized():
                 logger.warning(f"No provider is corresponding to the id used with the token {token_id!r}")
             else:
                 ingest_providers_service.update(
-                    provider[config.ID_FIELD], updates={"config.email": user["email"]}, original=provider
+                    provider[ID_FIELD], updates={"config.email": user["email"]}, original=provider
                 )
 
             return render_template(AUTHORIZED_TEMPLATE, data={})
@@ -252,9 +253,9 @@ def revoke_google_token(token_id: ObjectId) -> None:
         raise SuperdeskApiError.proxyError(
             f"Can't revoke token {token_id} (HTTP status {resp.status_code}): {resp.text}"
         )
-    oauth2_token_service.delete({config.ID_FIELD: token_id})
+    oauth2_token_service.delete({ID_FIELD: token_id})
     ingest_providers_service = superdesk.get_resource_service("ingest_providers")
     provider = ingest_providers_service.find_one(req=None, _id=token_id)
     if provider is not None:
-        ingest_providers_service.update(provider[config.ID_FIELD], updates={"config.email": None}, original=provider)
+        ingest_providers_service.update(provider[ID_FIELD], updates={"config.email": None}, original=provider)
     logger.info(f"OAUTH token {token_id!r} has been revoked")
diff --git a/superdesk/auth/saml.py b/superdesk/auth/saml.py
index 37c3a87757..9b12334a33 100644
--- a/superdesk/auth/saml.py
+++ b/superdesk/auth/saml.py
@@ -36,7 +36,8 @@
 
 from urllib.parse import urlparse
 
-from flask import current_app as app, request, redirect, make_response, session, jsonify, json
+from superdesk.core import get_app_config
+from superdesk.flask import request, redirect, make_response, session, jsonify, Blueprint
 from superdesk.auth import auth_user
 
 try:
@@ -51,7 +52,7 @@
 SESSION_SESSION_ID = "samlSessionIndex"
 SESSION_USERDATA_KEY = "samlUserdata"
 
-bp = superdesk.Blueprint("saml", __name__)
+bp = Blueprint("saml", __name__)
 logger = logging.getLogger(__name__)
 
 
@@ -65,15 +66,16 @@ def init_app(app) -> None:
 
 
 def init_saml_auth(req):
-    auth = OneLogin_Saml2_Auth(req, custom_base_path=app.config["SAML_PATH"])
+    auth = OneLogin_Saml2_Auth(req, custom_base_path=get_app_config("SAML_PATH"))
     return auth
 
 
 def prepare_flask_request(request):
     url_data = urlparse(request.url)
     scheme = request.scheme
-    if app.config.get("SERVER_URL"):
-        scheme = urlparse(app.config["SERVER_URL"]).scheme or request.scheme
+    server_url = get_app_config("SERVER_URL")
+    if server_url:
+        scheme = urlparse(server_url).scheme or request.scheme
     return {
         "https": "on" if scheme == "https" else "off",
         "http_host": request.host,
diff --git a/superdesk/auth_server/oauth2.py b/superdesk/auth_server/oauth2.py
index e747eebed7..6bc97b198e 100644
--- a/superdesk/auth_server/oauth2.py
+++ b/superdesk/auth_server/oauth2.py
@@ -11,7 +11,7 @@
 from authlib.integrations.flask_oauth2 import AuthorizationServer
 from authlib.oauth2.rfc6749 import grants
 from authlib.jose import jwt
-import superdesk
+from superdesk.flask import Blueprint
 from .models import query_client, save_token
 
 
@@ -23,7 +23,7 @@
 )
 
 
-bp = superdesk.Blueprint("auth_server", __name__)
+bp = Blueprint("auth_server", __name__)
 
 TOKEN_ENDPOINT = "/auth_server/token"
 shared_secret = None
diff --git a/superdesk/backend_meta/backend_meta.py b/superdesk/backend_meta/backend_meta.py
index 69d42e2d84..57c72b3064 100644
--- a/superdesk/backend_meta/backend_meta.py
+++ b/superdesk/backend_meta/backend_meta.py
@@ -6,15 +6,16 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from typing import Optional, Dict, cast
 
 import os.path
 import re
 import json
 from pathlib import Path
-from typing import Optional, Dict
+
+from superdesk.core import get_app_config
 from superdesk.resource import Resource
 from superdesk.services import BaseService
-from superdesk import config
 
 try:
     import settings  # type: ignore
@@ -65,11 +66,8 @@ def get_commit_href(package: str, revision: str) -> str:
 
         if config.REPO_OVERRIDE is set, it will be used
         """
-        try:
-            repo_override = config.REPO_OVERRIDE
-        except AttributeError:
-            # config may not be initialised (during tests or beginning of the session)
-            repo_override = {}
+
+        repo_override = cast(dict[str, str], get_app_config("REPO_OVERRIDE", {}))
         return GITHUB_COMMIT_HREF.format(package=repo_override.get(package, package), revision=revision)
 
     @classmethod
diff --git a/superdesk/cache.py b/superdesk/cache.py
index 2c1f8c4a02..523951a4af 100644
--- a/superdesk/cache.py
+++ b/superdesk/cache.py
@@ -6,7 +6,6 @@
 
 from urllib.parse import urlparse
 
-from flask import current_app
 from superdesk import json_utils
 from superdesk.logging import logger
 
@@ -74,6 +73,9 @@ def init_app(self, app):
 
     @property
     def _backend(self):
+        from superdesk.core import get_current_app
+
+        current_app = get_current_app().as_any()
         if not current_app:
             raise RuntimeError("You can only use cache within app context.")
         self.init_app(current_app)
diff --git a/superdesk/celery_app.py b/superdesk/celery_app.py
index 5696bda9ca..9f8d5ce2b3 100644
--- a/superdesk/celery_app.py
+++ b/superdesk/celery_app.py
@@ -23,7 +23,7 @@
 from kombu.serialization import register
 from eve.io.mongo import MongoJSONEncoder
 from eve.utils import str_to_date
-from flask import json, current_app as app
+from superdesk.core import json, get_current_app, get_app_config
 from superdesk.errors import SuperdeskError
 from superdesk.logging import logger
 
@@ -48,13 +48,13 @@ def try_cast(v):
 
 
 def dumps(o):
-    with superdesk.app.app_context():
+    with get_current_app().app_context():
         return MongoJSONEncoder().encode(o)
 
 
 def loads(s):
     o = json.loads(s)
-    with superdesk.app.app_context():
+    with get_current_app().app_context():
         return serialize(o)
 
 
@@ -86,14 +86,14 @@ class AppContextTask(TaskBase):  # type: ignore
     )
 
     def __call__(self, *args, **kwargs):
-        with superdesk.app.app_context():
+        with get_current_app().app_context():
             try:
                 return super().__call__(*args, **kwargs)
             except self.app_errors as e:
                 handle_exception(e)
 
     def on_failure(self, exc, task_id, args, kwargs, einfo):
-        with superdesk.app.app_context():
+        with get_current_app().app_context():
             handle_exception(exc)
 
 
@@ -138,7 +138,7 @@ def __get_redis(app_ctx):
 
 def update_key(key, flag=False, db=None):
     if db is None:
-        db = app.redis
+        db = get_current_app().redis
 
     if flag:
         crt_value = db.incr(key)
@@ -154,7 +154,7 @@ def update_key(key, flag=False, db=None):
 
 
 def _update_subtask_progress(task_id, current=None, total=None, done=None):
-    redis_db = redis.from_url(app.config["REDIS_URL"])
+    redis_db = redis.from_url(get_app_config("REDIS_URL"))
     try:
         current_key = "current_%s" % task_id
         total_key = "total_%s" % task_id
diff --git a/superdesk/commands/clean_images.py b/superdesk/commands/clean_images.py
index 5dce5df137..2b877bda86 100644
--- a/superdesk/commands/clean_images.py
+++ b/superdesk/commands/clean_images.py
@@ -11,7 +11,7 @@
 
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 from superdesk.metadata.item import ASSOCIATIONS
 
 
@@ -47,7 +47,7 @@ def run(self):
         upload_items = superdesk.get_resource_service("upload").get_from_mongo(req=None, lookup={})
         self.__add_existing_files(used_images, upload_items)
 
-        if app.config.get("LEGAL_ARCHIVE"):
+        if get_app_config("LEGAL_ARCHIVE"):
             legal_archive_items = superdesk.get_resource_service("legal_archive").get_from_mongo(None, query)
             self.__add_existing_files(used_images, legal_archive_items)
 
@@ -57,8 +57,7 @@ def run(self):
             self.__add_existing_files(used_images, legal_archive_version_items)
 
         print("Number of used files: ", len(used_images))
-
-        app.media.remove_unreferenced_files(used_images)
+        get_current_app().media.remove_unreferenced_files(used_images)
 
     def __add_existing_files(self, used_images, items):
         for item in items:
diff --git a/superdesk/commands/data_manipulation.py b/superdesk/commands/data_manipulation.py
index 194c0f6d05..fefb34466a 100644
--- a/superdesk/commands/data_manipulation.py
+++ b/superdesk/commands/data_manipulation.py
@@ -20,7 +20,6 @@
 import pymongo.database
 
 from multiprocessing import Process, Lock
-from flask import current_app as app
 import multiprocessing.synchronize
 from contextlib import contextmanager
 from datetime import datetime
@@ -31,6 +30,7 @@
 from bson.json_util import dumps, loads
 from pymongo.errors import OperationFailure
 import superdesk
+from superdesk.core import get_current_app
 from superdesk.timer import timer
 from superdesk.resource import Resource
 from superdesk.services import BaseService
@@ -175,7 +175,7 @@ def parse_dump_file(
     :return: metadata
     """
     if db is None:
-        db = app.data.pymongo().db
+        db = get_current_app().data.pymongo().db
     # we use a state machine to parse JSON progressively, and avoid memory issue for huge databases
     if single_file:
         collection_name = None
@@ -473,7 +473,7 @@ def run(self, dump_path: Union[Path, str], keep_existing: bool = False, no_flush
         if keep_existing is False:
             for db in get_dbs():
                 db.client.drop_database(db)
-            app.init_indexes()
+            get_current_app().init_indexes()
         if archive_path.is_file():
             self.restore_file(archive_path)
         elif archive_path.is_dir():
@@ -577,7 +577,7 @@ def run(
         dest_dir_p.mkdir(parents=True, exist_ok=True)
         dest_path = (dest_dir_p / name).with_suffix(".json.bz2")
         applied_updates = list(data_updates.get_applied_updates())
-        pymongo = app.data.pymongo()
+        pymongo = get_current_app().data.pymongo()
         db = pymongo.db
         version = tuple(int(v) for v in pymongo.cx.server_info()["version"].split("."))
         if version < (4, 0):
@@ -658,7 +658,7 @@ class StorageRestoreRecord(superdesk.Command):
 
     def run(self, record_file: Union[Path, str], force_db_reset: bool = False, skip_base_dump: bool = False) -> None:
         file_path = get_dest_path(record_file, dump=False)
-        db = app.data.pymongo().db
+        db = get_current_app().data.pymongo().db
         with open_dump(file_path) as f:
             record_data = loads(f.read())
             metadata = record_data["metadata"]
@@ -975,4 +975,5 @@ def create(self, docs, **kwargs):
 
 
 def get_dbs():
+    app = get_current_app()
     return [app.data.pymongo(prefix=prefix).db for prefix in [None, "ARCHIVED", "LEGAL_ARCHIVE", "CONTENTAPI_MONGO"]]
diff --git a/superdesk/commands/data_updates.py b/superdesk/commands/data_updates.py
index edd1983338..3bd08d38b6 100644
--- a/superdesk/commands/data_updates.py
+++ b/superdesk/commands/data_updates.py
@@ -11,7 +11,7 @@
 
 from string import Template
 from types import ModuleType
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 from superdesk.services import BaseService
 import superdesk
 import getpass
@@ -59,9 +59,9 @@ def backwards(self, mongodb_collection, mongodb_database):
 def get_dirs(only_relative_folder=False):
     dirs = []
     try:
-        dirs.append(app.config.get("DATA_UPDATES_PATH", DEFAULT_DATA_UPDATE_DIR_NAME))
-        if app.config.get("APPS_DATA_UPDATES_PATHS"):
-            dirs.extend(app.config["APPS_DATA_UPDATES_PATHS"])
+        dirs.append(get_app_config("DATA_UPDATES_PATH", DEFAULT_DATA_UPDATE_DIR_NAME))
+        if get_app_config("APPS_DATA_UPDATES_PATHS"):
+            dirs.extend(get_app_config("APPS_DATA_UPDATES_PATHS"))
     except RuntimeError:
         # working outside of application context
         pass
@@ -322,6 +322,7 @@ def run(self, resource_name, global_update=False):
 class BaseDataUpdate:
     def apply(self, direction):
         assert direction in ["forwards", "backwards"]
+        app = get_current_app()
         collection = app.data.get_mongo_collection(self.resource)
         db = app.data.driver.db
         getattr(self, direction)(collection, db)
diff --git a/superdesk/commands/delete_archived_document.py b/superdesk/commands/delete_archived_document.py
index 3885c22a1a..1603b8b3fe 100644
--- a/superdesk/commands/delete_archived_document.py
+++ b/superdesk/commands/delete_archived_document.py
@@ -8,9 +8,11 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import superdesk
 import json
-from eve.utils import ParsedRequest, config
+from eve.utils import ParsedRequest
+
+import superdesk
+from superdesk.resource_fields import ID_FIELD
 
 
 class DeleteArchivedDocumentCommand(superdesk.Command):
@@ -63,14 +65,14 @@ def delete(self, items):
 
         archived_service = superdesk.get_resource_service("archived")
         for item in items:
-            articles_to_kill = archived_service.find_articles_to_kill({"_id": item[config.ID_FIELD]}, False)
+            articles_to_kill = archived_service.find_articles_to_kill({"_id": item[ID_FIELD]}, False)
 
             if not articles_to_kill:
                 continue
 
             for article in articles_to_kill:
-                archived_service.command_delete({"_id": article[config.ID_FIELD]})
-                print("Deleted item {} ".format(article[config.ID_FIELD]))
+                archived_service.command_delete({"_id": article[ID_FIELD]})
+                print("Deleted item {} ".format(article[ID_FIELD]))
 
     def run(self, ids):
         if ids and len(ids) > 0:
diff --git a/superdesk/commands/flush_elastic_index.py b/superdesk/commands/flush_elastic_index.py
index ae1d0ceb4e..3a06d7cb2e 100644
--- a/superdesk/commands/flush_elastic_index.py
+++ b/superdesk/commands/flush_elastic_index.py
@@ -9,8 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from elasticsearch import exceptions as es_exceptions
-from flask import current_app as app
 from eve_elastic import get_es
+from superdesk.core import get_app_config, get_current_app
 import superdesk
 from content_api import ELASTIC_PREFIX as CAPI_ELASTIC_PREFIX
 
@@ -45,12 +45,12 @@ def run(self, sd_index, capi_index):
         if not (sd_index or capi_index):
             raise SystemExit("You must specify at least one elastic index to flush. " "Options: `--sd`, `--capi`")
 
-        self._es = get_es(app.config["ELASTICSEARCH_URL"])
+        self._es = get_es(get_app_config("ELASTICSEARCH_URL"))
 
         if sd_index:
-            self._delete_elastic(app.config["ELASTICSEARCH_INDEX"])
+            self._delete_elastic(get_app_config("ELASTICSEARCH_INDEX"))
         if capi_index:
-            self._delete_elastic(app.config["CONTENTAPI_ELASTICSEARCH_INDEX"])
+            self._delete_elastic(get_app_config("CONTENTAPI_ELASTICSEARCH_INDEX"))
 
         self._index_from_mongo(sd_index, capi_index)
 
@@ -64,6 +64,7 @@ def _delete_elastic(self, index_prefix):
         indices = list(self._es.indices.get_alias("{}_*".format(index_prefix)).keys())
         print(f"Configured indices with prefix '{index_prefix}': " + ", ".join(indices))
 
+        app = get_current_app()
         for es_resource in app.data.get_elastic_resources():
             alias = app.data.elastic._resource_index(es_resource)
             print(f"- Attempting to delete alias {alias}")
@@ -92,6 +93,7 @@ def _index_from_mongo(self, sd_index, capi_index):
         :param bool capi_index:nFlag to index content api elastic index.
         """
         # get all es resources
+        app = get_current_app()
         app.data.init_elastic(app)
         resources = app.data.get_elastic_resources()
 
diff --git a/superdesk/commands/index_from_mongo.py b/superdesk/commands/index_from_mongo.py
index 5a331178a4..82ead6492d 100644
--- a/superdesk/commands/index_from_mongo.py
+++ b/superdesk/commands/index_from_mongo.py
@@ -12,12 +12,11 @@
 import pymongo
 import superdesk
 
-from flask import current_app as app
+from superdesk.resource_fields import ID_FIELD
 from superdesk.errors import BulkIndexError
-from superdesk import config
 from bson.objectid import ObjectId
 
-from superdesk.core.app import get_current_async_app
+from superdesk.core import get_current_async_app
 
 
 class IndexFromMongo(superdesk.Command):
@@ -48,10 +47,12 @@ def run(self, collection_name, all_collections, page_size, last_id, string_id):
         if not collection_name and not all_collections:
             raise SystemExit("Specify --all to index from all collections")
         elif all_collections:
+            async_app = get_current_async_app()
+            app = async_app.wsgi
             app.data.init_elastic(app)
             resources = app.data.get_elastic_resources()
             resources_processed = []
-            for resource_config in get_current_async_app().resources.get_all_configs():
+            for resource_config in async_app.resources.get_all_configs():
                 if resource_config.elastic is None:
                     continue
                 self.copy_resource(resource_config.name, page_size)
@@ -68,7 +69,7 @@ def run(self, collection_name, all_collections, page_size, last_id, string_id):
 
     @classmethod
     def copy_resource(cls, resource, page_size, last_id=None, string_id=False):
-        new_app = get_current_async_app()
+        async_app = get_current_async_app()
         for items in cls.get_mongo_items(resource, page_size, last_id, string_id):
             print("{} Inserting {} items".format(time.strftime("%X %x %Z"), len(items)))
             s = time.time()
@@ -77,8 +78,9 @@ def copy_resource(cls, resource, page_size, last_id=None, string_id=False):
             for i in range(1, 4):
                 try:
                     try:
-                        success, failed = new_app.elastic.get_client(resource).bulk_insert(items)
+                        success, failed = async_app.elastic.get_client(resource).bulk_insert(items)
                     except KeyError:
+                        app = async_app.wsgi
                         success, failed = app.data._search_backend(resource).bulk_insert(resource, items)
                 except Exception as ex:
                     print("Exception thrown on insert to elastic {}", ex)
@@ -105,13 +107,15 @@ def get_mongo_items(cls, mongo_collection_name, page_size, last_id, string_id):
         """
         bucket_size = int(page_size) if page_size else cls.default_page_size
         print("Indexing data from mongo/{} to elastic/{}".format(mongo_collection_name, mongo_collection_name))
+        async_app = get_current_async_app()
 
         try:
-            db = get_current_async_app().mongo.get_collection(mongo_collection_name)
+            db = async_app.mongo.get_collection(mongo_collection_name)
         except KeyError:
+            app = async_app.wsgi
             db = app.data.get_mongo_collection(mongo_collection_name)
 
-        args = {"limit": bucket_size, "sort": [(config.ID_FIELD, pymongo.ASCENDING)]}
+        args = {"limit": bucket_size, "sort": [(ID_FIELD, pymongo.ASCENDING)]}
 
         while True:
             if last_id:
@@ -120,14 +124,14 @@ def get_mongo_items(cls, mongo_collection_name, page_size, last_id, string_id):
                         last_id = ObjectId(last_id)
                     except Exception:
                         pass
-                args.update({"filter": {config.ID_FIELD: {"$gt": last_id}}})
+                args.update({"filter": {ID_FIELD: {"$gt": last_id}}})
 
             cursor = db.find(**args)
             items = list(cursor)
             if not len(items):
                 print("Last id", mongo_collection_name, last_id)
                 break
-            last_id = items[-1][config.ID_FIELD]
+            last_id = items[-1][ID_FIELD]
             yield items
 
 
diff --git a/superdesk/commands/rebuild_elastic_index.py b/superdesk/commands/rebuild_elastic_index.py
index d9be0c46f7..e41805b49f 100644
--- a/superdesk/commands/rebuild_elastic_index.py
+++ b/superdesk/commands/rebuild_elastic_index.py
@@ -11,8 +11,7 @@
 
 import superdesk
 
-from flask import current_app as app
-from superdesk.core.app import get_current_async_app
+from superdesk.core import get_current_async_app
 
 
 class RebuildElasticIndex(superdesk.Command):
@@ -37,6 +36,8 @@ class RebuildElasticIndex(superdesk.Command):
 
     def run(self, resource_name=None, requests_per_second=1000):
         # if no index name is passed then use the configured one
+        async_app = get_current_async_app()
+        app = async_app.wsgi
         resources = list(app.data.elastic._get_elastic_resources().keys())
         if resource_name and resource_name in resources:
             resources = [resource_name]
@@ -44,11 +45,10 @@ def run(self, resource_name=None, requests_per_second=1000):
             raise ValueError("Resource {} is not configured".format(resource_name))
 
         resources_processed = []
-        new_app = get_current_async_app()
-        for config in new_app.resources.get_all_configs():
+        for config in async_app.resources.get_all_configs():
             if config.elastic is None:
                 continue
-            new_app.elastic.reindex(config.name, requests_per_second=requests_per_second)
+            async_app.elastic.reindex(config.name, requests_per_second=requests_per_second)
             resources_processed.append(config.name)
             print(f"Index {config.name} rebuilt successfully")
 
diff --git a/superdesk/commands/remove_exported_files.py b/superdesk/commands/remove_exported_files.py
index 00c92b3958..466b6761ab 100644
--- a/superdesk/commands/remove_exported_files.py
+++ b/superdesk/commands/remove_exported_files.py
@@ -12,7 +12,7 @@
 import superdesk
 from datetime import timedelta
 
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.celery_task_utils import get_lock_id
 from superdesk.lock import lock, unlock
 from superdesk.utc import utcnow
@@ -37,6 +37,7 @@ class RemoveExportedFiles(superdesk.Command):
     # option_list = [superdesk.Option("--expire-hours", "-e", dest="expire_hours", required=False, type=int)]
 
     def run(self, expire_hours=None):
+        app = get_current_app()
         if expire_hours:
             self.expire_hours = expire_hours
         elif "TEMP_FILE_EXPIRY_HOURS" in app.config:
@@ -61,11 +62,12 @@ def run(self, expire_hours=None):
 
     def _remove_exported_files(self, expire_at):
         logger.info("{} Beginning to remove exported files from storage".format(self.log_msg))
+        app = get_current_app()
         for file_id in self._get_file_ids(expire_at):
             app.media.delete(file_id)
 
     def _get_file_ids(self, expire_at):
-        files = app.media.find(folder="temp", upload_date={"$lte": expire_at})
+        files = get_current_app().media.find(folder="temp", upload_date={"$lte": expire_at})
         return [file["_id"] for file in files]
 
 
diff --git a/superdesk/commands/schema.py b/superdesk/commands/schema.py
index 5f09fbeaee..97aeaf5022 100644
--- a/superdesk/commands/schema.py
+++ b/superdesk/commands/schema.py
@@ -12,7 +12,7 @@
 
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 from superdesk.lock import lock, unlock
 from superdesk.commands.rebuild_elastic_index import RebuildElasticIndex
 
@@ -22,7 +22,7 @@
 
 def _get_version_db():
     """Get db used for storing version information."""
-    return app.data.mongo.pymongo().db["superdesk"]
+    return get_current_app().data.mongo.pymongo().db["superdesk"]
 
 
 def get_schema_version():
@@ -70,7 +70,7 @@ def run(self):
 
         try:
             app_schema_version = get_schema_version()
-            superdesk_schema_version = app.config.get("SCHEMA_VERSION", superdesk.SCHEMA_VERSION)
+            superdesk_schema_version = get_app_config("SCHEMA_VERSION", superdesk.SCHEMA_VERSION)
             if app_schema_version < superdesk_schema_version:
                 print("Updating schema from version {} to {}.".format(app_schema_version, superdesk_schema_version))
                 update_schema()
diff --git a/superdesk/core/__init__.py b/superdesk/core/__init__.py
index e913f04944..001066c8af 100644
--- a/superdesk/core/__init__.py
+++ b/superdesk/core/__init__.py
@@ -7,3 +7,14 @@
 # For the full copyright and license information, please see the
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
+
+from flask import json
+from .app import get_app_config, get_current_app, get_current_async_app
+
+
+__all__ = [
+    "get_current_app",
+    "get_current_async_app",
+    "json",
+    "get_app_config",
+]
diff --git a/superdesk/core/app.py b/superdesk/core/app.py
index 438b14cd26..1bb293c50a 100644
--- a/superdesk/core/app.py
+++ b/superdesk/core/app.py
@@ -8,12 +8,26 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from typing import Dict, List, Optional
+from typing import Dict, List, Optional, Any, cast
 import importlib
 
 from .web import WSGIApp
 
 
+def get_app_config(key: str, default: Optional[Any] = None) -> Optional[Any]:
+    from flask import current_app
+
+    try:
+        return current_app.config.get(key, default)
+    except RuntimeError:
+        pass
+
+    if _global_app is None:
+        raise RuntimeError("Superdesk app is not running")
+
+    return _global_app.wsgi.config.get(key, default)
+
+
 class SuperdeskAsyncApp:
     _running: bool
     _imported_modules: Dict[str, "Module"]
@@ -36,6 +50,7 @@ def __init__(self, wsgi: WSGIApp):
         self.mongo = MongoResources(self)
         self.elastic = ElasticResources(self)
         self.resources = Resources(self)
+        self._store_app()
 
     @property
     def running(self) -> bool:
@@ -156,14 +171,23 @@ def _remove_app(self):
         _global_app = None
 
 
+def get_current_app() -> WSGIApp:
+    """Retrieve the current WSGI app instance"""
+
+    from flask import current_app
+
+    return cast(WSGIApp, current_app)
+
+
 def get_current_async_app() -> SuperdeskAsyncApp:
     """Retrieve the current app instance"""
 
     from flask import current_app
 
     try:
-        if current_app.async_app is not None:
-            return current_app.async_app
+        async_app = getattr(current_app, "async_app", None)
+        if async_app is not None:
+            return async_app
     except RuntimeError:
         # Flask context not available
         pass
diff --git a/superdesk/core/resources/service.py b/superdesk/core/resources/service.py
index a3099aed98..0fbf686426 100644
--- a/superdesk/core/resources/service.py
+++ b/superdesk/core/resources/service.py
@@ -30,8 +30,6 @@
 
 from superdesk.errors import SuperdeskApiError
 from superdesk.utc import utcnow
-from superdesk.metadata.item import GUID_NEWSML
-from superdesk.metadata.utils import generate_guid
 
 from ..app import SuperdeskAsyncApp, get_current_async_app
 from .fields import ObjectId as ObjectIdField
@@ -75,6 +73,9 @@ def id_uses_objectid(self) -> bool:
         return resource_uses_objectid_for_id(self.config.data_class)
 
     def generate_id(self) -> str | ObjectId:
+        from superdesk.metadata.item import GUID_NEWSML
+        from superdesk.metadata.utils import generate_guid
+
         return ObjectIdField() if self.id_uses_objectid() else generate_guid(type=GUID_NEWSML)
 
     @property
diff --git a/superdesk/core/resources/validators.py b/superdesk/core/resources/validators.py
index 9bb963c545..c9726c69ab 100644
--- a/superdesk/core/resources/validators.py
+++ b/superdesk/core/resources/validators.py
@@ -122,7 +122,7 @@ async def validate_resource_exists(item: ResourceModel, item_id: DataRelationVal
             if convert_to_objectid:
                 item_id = ObjectId(item_id)
 
-            from superdesk.core.app import get_current_async_app
+            from superdesk.core import get_current_async_app
 
             app = get_current_async_app()
             try:
@@ -157,7 +157,7 @@ async def validate_unique_value_in_resource(item: ResourceModel, name: UniqueVal
         if name is None:
             return
 
-        from superdesk.core.app import get_current_async_app
+        from superdesk.core import get_current_async_app
 
         app = get_current_async_app()
         resource_config = app.resources.get_config(resource_name)
@@ -181,7 +181,7 @@ async def validate_iunique_value_in_resource(item: ResourceModel, name: UniqueVa
         if name is None:
             return
 
-        from superdesk.core.app import get_current_async_app
+        from superdesk.core import get_current_async_app
 
         app = get_current_async_app()
         resource_config = app.resources.get_config(resource_name)
diff --git a/superdesk/core/web/types.py b/superdesk/core/web/types.py
index 16c0d4300b..32b4af72e5 100644
--- a/superdesk/core/web/types.py
+++ b/superdesk/core/web/types.py
@@ -283,6 +283,20 @@ def convert_to_endpoint(func: EndpointFunction):
     return convert_to_endpoint
 
 
+class NotificationClientProtocol(Protocol):
+    open: bool
+    messages: Sequence[str]
+
+    def close(self) -> None:
+        ...
+
+    def send(self, message: str) -> None:
+        ...
+
+    def reset(self) -> None:
+        ...
+
+
 class WSGIApp(Protocol):
     """Protocol for defining functionality from a WSGI application (such as Eve/Flask)
 
@@ -294,5 +308,68 @@ class WSGIApp(Protocol):
     #: Config for the application
     config: Dict[str, Any]
 
+    #: Config for the front-end application
+    client_config: Dict[str, Any]
+
+    testing: Optional[bool]
+
+    #: Interface to upload/download/query media
+    media: Any
+
+    mail: Any
+
+    data: Any
+
+    storage: Any
+
+    auth: Any
+
+    subjects: Any
+
+    notification_client: NotificationClientProtocol
+
+    locators: Any
+
+    celery: Any
+
+    redis: Any
+
+    jinja_loader: Any
+
+    jinja_env: Any
+
+    extensions: Dict[str, Any]
+
     def register_endpoint(self, endpoint: Endpoint | EndpointGroup):
         ...
+
+    def register_resource(self, name: str, settings: Dict[str, Any]):
+        ...
+
+    def upload_url(self, media_id: str) -> str:
+        ...
+
+    def download_url(self, media_id: str) -> str:
+        ...
+
+    # TODO: Provide proper type here, context manager
+    def app_context(self):
+        ...
+
+    def get_current_user_dict(self) -> Optional[Dict[str, Any]]:
+        ...
+
+    def response_class(self, *args, **kwargs) -> Any:
+        ...
+
+    def validator(self, *args, **kwargs) -> Any:
+        ...
+
+    def init_indexes(self, ignore_duplicate_keys: bool = False) -> None:
+        ...
+
+    def as_any(self) -> Any:
+        ...
+
+    # TODO: Change how we use events on the app
+    # def on_role_privileges_updated(self, role: Any, role_users: Any) -> None: ...
diff --git a/superdesk/data_updates/00001_20160722-111630_users.py b/superdesk/data_updates/00001_20160722-111630_users.py
index 9aa874dfa0..03e50306d8 100644
--- a/superdesk/data_updates/00001_20160722-111630_users.py
+++ b/superdesk/data_updates/00001_20160722-111630_users.py
@@ -9,7 +9,7 @@
 
 from superdesk.commands.data_updates import BaseDataUpdate
 from superdesk import get_resource_service
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -22,8 +22,8 @@ class DataUpdate(BaseDataUpdate):
 
     def forwards(self, mongodb_collection, mongodb_database):
         for user in mongodb_collection.find({}):
-            stages = get_resource_service(self.resource).get_invisible_stages_ids(user.get(config.ID_FIELD))
-            print(mongodb_collection.update({"_id": user.get(config.ID_FIELD)}, {"$set": {"invisible_stages": stages}}))
+            stages = get_resource_service(self.resource).get_invisible_stages_ids(user.get(ID_FIELD))
+            print(mongodb_collection.update({"_id": user.get(ID_FIELD)}, {"$set": {"invisible_stages": stages}}))
 
     def backwards(self, mongodb_collection, mongodb_database):
         print(mongodb_collection.update({}, {"$unset": {"invisible_stages": []}}, upsert=False, multi=True))
diff --git a/superdesk/data_updates/00003_20170814-114652_audit.py b/superdesk/data_updates/00003_20170814-114652_audit.py
index 5f02193651..dcd207cf89 100644
--- a/superdesk/data_updates/00003_20170814-114652_audit.py
+++ b/superdesk/data_updates/00003_20170814-114652_audit.py
@@ -7,11 +7,12 @@
 # Author  : superdesk
 # Creation: 2017-08-14 11:47
 
+from superdesk.core import get_current_app
 from superdesk.commands.data_updates import BaseDataUpdate
 from superdesk import get_resource_service
-from eve.utils import config, app
 from superdesk.factory.app import create_index
 from superdesk.audit.commands import PurgeAudit
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -20,10 +21,10 @@ class DataUpdate(BaseDataUpdate):
     def forwards(self, mongodb_collection, mongodb_database):
         for audit in mongodb_collection.find({"resource": {"$in": PurgeAudit.item_resources}}):
             audit_id = get_resource_service(self.resource)._extract_doc_id(audit.get("extra"))
-            print(mongodb_collection.update({"_id": audit.get(config.ID_FIELD)}, {"$set": {"audit_id": audit_id}}))
+            print(mongodb_collection.update({"_id": audit.get(ID_FIELD)}, {"$set": {"audit_id": audit_id}}))
         try:
             create_index(
-                app=app,
+                app=get_current_app(),
                 resource=self.resource,
                 name="audit_id",
                 list_of_keys=[("audit_id", 1)],
diff --git a/superdesk/data_updates/00006_20171124-195408_content_types.py b/superdesk/data_updates/00006_20171124-195408_content_types.py
index bd5ba2ec76..85c1ac5da5 100644
--- a/superdesk/data_updates/00006_20171124-195408_content_types.py
+++ b/superdesk/data_updates/00006_20171124-195408_content_types.py
@@ -9,8 +9,7 @@
 
 from copy import deepcopy
 from superdesk.commands.data_updates import BaseDataUpdate
-
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -51,5 +50,5 @@ def _process_content_type(self, mongodb_collection, replace_values):
             if original_editor != content_type["editor"]:
                 print("update editor in content type", content_type["label"])
                 mongodb_collection.update(
-                    {"_id": content_type.get(config.ID_FIELD)}, {"$set": {"editor": content_type["editor"]}}
+                    {"_id": content_type.get(ID_FIELD)}, {"$set": {"editor": content_type["editor"]}}
                 )
diff --git a/superdesk/data_updates/00009_20180425-010702_vocabularies.py b/superdesk/data_updates/00009_20180425-010702_vocabularies.py
index 92816e441a..a0dd906fbc 100644
--- a/superdesk/data_updates/00009_20180425-010702_vocabularies.py
+++ b/superdesk/data_updates/00009_20180425-010702_vocabularies.py
@@ -7,7 +7,7 @@
 # Author  : mugur
 # Creation: 2018-04-25 01:07
 
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.commands.data_updates import BaseDataUpdate
 
 
@@ -22,7 +22,7 @@ def forwards(self, mongodb_collection, mongodb_database):
                 for field in self.update_fields:
                     if field in vocabulary["schema"] and isinstance(vocabulary["schema"], dict):
                         schema[field]["required"] = True
-                mongodb_collection.update({"_id": vocabulary.get(config.ID_FIELD)}, {"$set": {"schema": schema}})
+                mongodb_collection.update({"_id": vocabulary.get(ID_FIELD)}, {"$set": {"schema": schema}})
 
     def backwards(self, mongodb_collection, mongodb_database):
         pass
diff --git a/superdesk/data_updates/00012_20180605-151019_vocabularies.py b/superdesk/data_updates/00012_20180605-151019_vocabularies.py
index d49c14d977..ebd7476d71 100644
--- a/superdesk/data_updates/00012_20180605-151019_vocabularies.py
+++ b/superdesk/data_updates/00012_20180605-151019_vocabularies.py
@@ -7,7 +7,7 @@
 # Author  : mugur
 # Creation: 2018-06-05 15:10
 
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.commands.data_updates import BaseDataUpdate
 
 
@@ -20,7 +20,7 @@ def forwards(self, mongodb_collection, mongodb_database):
             qcode = schema.get("qcode", {})
             qcode["type"] = "integer"
             schema["qcode"] = qcode
-            print(mongodb_collection.update({"_id": vocabulary.get(config.ID_FIELD)}, {"$set": {"schema": schema}}))
+            print(mongodb_collection.update({"_id": vocabulary.get(ID_FIELD)}, {"$set": {"schema": schema}}))
 
     def backwards(self, mongodb_collection, mongodb_database):
         pass
diff --git a/superdesk/data_updates/00016_20181227-160331_archive.py b/superdesk/data_updates/00016_20181227-160331_archive.py
index 3e10c6bb92..f72670a4e3 100644
--- a/superdesk/data_updates/00016_20181227-160331_archive.py
+++ b/superdesk/data_updates/00016_20181227-160331_archive.py
@@ -7,8 +7,8 @@
 # Author  : Gyan
 # Creation: 2018-12-27 16:03
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
 
 
 # This script replaces the whole json of related item with respective _id only
@@ -25,7 +25,7 @@ def forwards(self, mongodb_collection, mongodb_database):
             for item_name, item_obj in item["associations"].items():
                 if item_obj and related_content:
                     if item_name.split("--")[0] in [content["_id"] for content in related_content]:
-                        related_item_id = item_obj[config.ID_FIELD]
+                        related_item_id = item_obj[ID_FIELD]
 
                         updates = {"$set": {}}
                         updates["$set"]["associations." + item_name] = {"_id": related_item_id}
diff --git a/superdesk/data_updates/00023_20200513-180314_content_types.py b/superdesk/data_updates/00023_20200513-180314_content_types.py
index 9ad5cd1784..018b9a1ac7 100644
--- a/superdesk/data_updates/00023_20200513-180314_content_types.py
+++ b/superdesk/data_updates/00023_20200513-180314_content_types.py
@@ -9,7 +9,7 @@
 
 from copy import deepcopy
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -47,7 +47,7 @@ def forwards(self, mongodb_collection, mongodb_database):
             if original_schema != content_type["schema"]:
                 print("update schema in content type", content_type["label"])
                 mongodb_collection.update(
-                    {"_id": content_type.get(config.ID_FIELD)}, {"$set": {"schema": content_type["schema"]}}
+                    {"_id": content_type.get(ID_FIELD)}, {"$set": {"schema": content_type["schema"]}}
                 )
 
     def backwards(self, mongodb_collection, mongodb_database):
diff --git a/superdesk/data_updates/00024_20200909-142600_vocabularies.py b/superdesk/data_updates/00024_20200909-142600_vocabularies.py
index 590ce62116..f5e589da4a 100644
--- a/superdesk/data_updates/00024_20200909-142600_vocabularies.py
+++ b/superdesk/data_updates/00024_20200909-142600_vocabularies.py
@@ -8,7 +8,7 @@
 # Creation: 2020-09-09 14:08
 
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -17,9 +17,7 @@ class DataUpdate(BaseDataUpdate):
     def forwards(self, mongodb_collection, mongodb_database):
         for vocabulary in mongodb_collection.find({"_id": "usageterms"}):
             if "schema_field" not in vocabulary:
-                mongodb_collection.update(
-                    {"_id": vocabulary.get(config.ID_FIELD)}, {"$set": {"schema_field": "usageterms"}}
-                )
+                mongodb_collection.update({"_id": vocabulary.get(ID_FIELD)}, {"$set": {"schema_field": "usageterms"}})
 
     def backwards(self, mongodb_collection, mongodb_database):
         pass
diff --git a/superdesk/data_updates/00028_20210211-020113_contacts.py b/superdesk/data_updates/00028_20210211-020113_contacts.py
index a6c27b6bd8..7803f4b989 100644
--- a/superdesk/data_updates/00028_20210211-020113_contacts.py
+++ b/superdesk/data_updates/00028_20210211-020113_contacts.py
@@ -9,7 +9,7 @@
 
 from superdesk import get_resource_service
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -22,7 +22,7 @@ def forwards(self, mongodb_collection, mongodb_database):
                 country = [t for t in countries.get("items") if t.get("name") == document["country"]]
                 if country:
                     mongodb_collection.update(
-                        {"_id": document.get(config.ID_FIELD)},
+                        {"_id": document.get(ID_FIELD)},
                         {
                             "$set": {
                                 "country": {
diff --git a/superdesk/data_updates/00029_20210305-132352_contacts.py b/superdesk/data_updates/00029_20210305-132352_contacts.py
index 28d1492e51..87b5d981e7 100644
--- a/superdesk/data_updates/00029_20210305-132352_contacts.py
+++ b/superdesk/data_updates/00029_20210305-132352_contacts.py
@@ -9,7 +9,7 @@
 
 from superdesk import get_resource_service
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -36,7 +36,7 @@ def forwards(self, mongodb_collection, mongodb_database):
                         }
                     }
                 mongodb_collection.update(
-                    {"_id": document.get(config.ID_FIELD)},
+                    {"_id": document.get(ID_FIELD)},
                     {"$set": contact_state},
                 )
 
diff --git a/superdesk/data_updates/00030_20231127-142300_content_types.py b/superdesk/data_updates/00030_20231127-142300_content_types.py
index 307adef53b..29532f9261 100644
--- a/superdesk/data_updates/00030_20231127-142300_content_types.py
+++ b/superdesk/data_updates/00030_20231127-142300_content_types.py
@@ -8,7 +8,7 @@
 # Creation: 2023-11-27 14:23
 
 from superdesk.commands.data_updates import BaseDataUpdate
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 
 
 class DataUpdate(BaseDataUpdate):
@@ -22,9 +22,7 @@ def forwards(self, mongodb_collection, mongodb_database):
                     if properties and "sdWidth" not in properties:
                         properties["sdWidth"] = "full"
 
-                mongodb_collection.update(
-                    {"_id": profile.get(config.ID_FIELD)}, {"$set": {"editor": profile["editor"]}}
-                )
+                mongodb_collection.update({"_id": profile.get(ID_FIELD)}, {"$set": {"editor": profile["editor"]}})
                 print(f"Content Profile {profile['_id']} updated successfully")
             except Exception as e:
                 print(f"Error updating Content Profile {profile['_id']}: {str(e)}")
diff --git a/superdesk/datalayer.py b/superdesk/datalayer.py
index 9e4e39ba5a..06f3b2e4c1 100644
--- a/superdesk/datalayer.py
+++ b/superdesk/datalayer.py
@@ -12,14 +12,12 @@
 
 from eve.io.base import DataLayer
 from eve.io.mongo import Mongo
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
 from eve_elastic import Elastic, InvalidSearchString  # noqa
-from flask import current_app
+from superdesk.core import get_current_async_app, get_app_config
 from superdesk.lock import lock, unlock
 from superdesk.json_utils import SuperdeskJSONEncoder
 
-from superdesk.core.app import get_current_async_app
-
 
 class SuperdeskDataLayer(DataLayer):
     """Superdesk Data Layer.
@@ -114,15 +112,15 @@ def is_empty(self, resource):
         return self._backend(resource).is_empty(resource)
 
     def _search_backend(self, resource):
-        if resource.endswith(current_app.config["VERSIONS"]):
+        if resource.endswith(get_app_config("VERSIONS")):
             return
         datasource = self.datasource(resource)
-        backend = config.SOURCES.get(datasource[0], {}).get("search_backend", None)
+        backend = get_app_config("SOURCES", {}).get(datasource[0], {}).get("search_backend", None)
         return getattr(self, backend) if backend is not None else None
 
     def _backend(self, resource):
         datasource = self.datasource(resource)
-        backend = config.SOURCES.get(datasource[0], {"backend": "mongo"}).get("backend", "mongo")
+        backend = get_app_config("SOURCES", {}).get(datasource[0], {"backend": "mongo"}).get("backend", "mongo")
         return getattr(self, backend)
 
     def get_mongo_collection(self, resource):
@@ -131,7 +129,7 @@ def get_mongo_collection(self, resource):
     def get_elastic_resources(self):
         """Get set of available elastic resources."""
         resources = set()
-        for resource in config.SOURCES:
+        for resource in get_app_config("SOURCES", {}):
             datasource = self.datasource(resource)[0]
             if self._search_backend(datasource):
                 resources.add(datasource)
diff --git a/superdesk/download.py b/superdesk/download.py
index f52a207ee9..f8578498f4 100644
--- a/superdesk/download.py
+++ b/superdesk/download.py
@@ -16,9 +16,10 @@
 from werkzeug.wsgi import wrap_file
 from .resource import Resource
 from .services import BaseService
-from flask import url_for, request, current_app as app
+from superdesk.core import get_current_app
+from superdesk.flask import request, Blueprint
 
-bp = superdesk.Blueprint("download_raw", __name__)
+bp = Blueprint("download_raw", __name__)
 logger = logging.getLogger(__name__)
 
 
@@ -27,6 +28,7 @@
 @blueprint_auth()
 def download_file(id, folder=None):
     filename = "{}/{}".format(folder, id) if folder else id
+    app = get_current_app()
 
     file = app.media.get(filename, "download")
     if file:
@@ -39,14 +41,14 @@ def download_file(id, folder=None):
     raise SuperdeskApiError.notFoundError("File not found on media storage.")
 
 
-def download_url(media_id):
-    prefered_url_scheme = app.config.get("PREFERRED_URL_SCHEME", "http")
-    return url_for("download_raw.download_file", id=media_id, _external=True, _scheme=prefered_url_scheme)
+# def download_url(media_id):
+#     prefered_url_scheme = app.config.get("PREFERRED_URL_SCHEME", "http")
+#     return url_for("download_raw.download_file", id=media_id, _external=True, _scheme=prefered_url_scheme)
 
 
 def init_app(app) -> None:
     endpoint_name = "download"
-    app.download_url = download_url
+    # app.download_url = download_url
     superdesk.blueprint(bp, app)
     service = BaseService(endpoint_name, backend=superdesk.get_backend())
     DownloadResource(endpoint_name, app=app, service=service)
diff --git a/superdesk/editor_utils.py b/superdesk/editor_utils.py
index 841e28bad0..e9ccd59322 100644
--- a/superdesk/editor_utils.py
+++ b/superdesk/editor_utils.py
@@ -21,7 +21,7 @@
 from textwrap import dedent
 from collections.abc import MutableSequence
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 
 from draftjs_exporter.html import HTML
 from draftjs_exporter.constants import ENTITY_TYPES, INLINE_STYLES, BLOCK_TYPES
@@ -430,7 +430,7 @@ def render_link(self, props):
         return DOM.create_element("a", attribs, props["children"])
 
     def render_embed(self, props):
-        embed_pre_process = app.config.get("EMBED_PRE_PROCESS")
+        embed_pre_process = get_app_config("EMBED_PRE_PROCESS")
         if embed_pre_process:
             for callback in embed_pre_process:
                 callback(props["data"])
diff --git a/superdesk/emails/__init__.py b/superdesk/emails/__init__.py
index 1c18a9c850..34bdf9cd09 100644
--- a/superdesk/emails/__init__.py
+++ b/superdesk/emails/__init__.py
@@ -18,7 +18,8 @@
 from bson.json_util import dumps
 from flask_mail import Message
 from superdesk.celery_app import celery
-from flask import current_app as app, render_template, render_template_string
+from superdesk.core import get_current_app, get_app_config
+from superdesk.flask import render_template, render_template_string
 from superdesk import get_resource_service
 
 logger = logging.getLogger(__name__)
@@ -66,8 +67,8 @@ def send_email(self, subject, sender, recipients, text_body, html_body, cc=None,
             html=html_body,
             attachments=attachments,
         )
-        if app.config.get("E2E") is not True:
-            return app.mail.send(msg)
+        if get_app_config("E2E") is not True:
+            return get_current_app().mail.send(msg)
     except OSError:
         logger.exception("can not send email %s", subject)
     finally:
@@ -77,9 +78,9 @@ def send_email(self, subject, sender, recipients, text_body, html_body, cc=None,
 def send_activate_account_email(doc, activate_ttl):
     user = get_resource_service("users").find_one(req=None, _id=doc["user"])
     first_name = user.get("first_name")
-    app_name = app.config["APPLICATION_NAME"]
-    admins = app.config["ADMINS"]
-    client_url = app.config["CLIENT_URL"].rstrip("/")
+    app_name = get_app_config("APPLICATION_NAME")
+    admins = get_app_config("ADMINS")
+    client_url = get_app_config("CLIENT_URL").rstrip("/")
     url = "{}/#/reset-password?token={}".format(client_url, doc["token"])
     hours = activate_ttl * 24
     subject = render_template("account_created_subject.txt", app_name=app_name)
@@ -107,8 +108,8 @@ def send_activate_account_email(doc, activate_ttl):
 
 
 def send_user_status_changed_email(recipients, status):
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
     subject = render_template("account_status_changed_subject.txt", app_name=app_name, status=status)
     text_body = render_template("account_status_changed.txt", app_name=app_name, status=status)
     html_body = render_template("account_status_changed.html", app_name=app_name, status=status)
@@ -116,8 +117,8 @@ def send_user_status_changed_email(recipients, status):
 
 
 def send_user_type_changed_email(recipients):
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
     subject = render_template("account_type_changed_subject.txt", app_name=app_name)
     text_body = render_template("account_type_changed.txt", app_name=app_name)
     html_body = render_template("account_type_changed.html", app_name=app_name)
@@ -125,9 +126,9 @@ def send_user_type_changed_email(recipients):
 
 
 def send_reset_password_email(doc, token_ttl):
-    admins = app.config["ADMINS"]
-    client_url = app.config["CLIENT_URL"].rstrip("/")
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    client_url = get_app_config("CLIENT_URL").rstrip("/")
+    app_name = get_app_config("APPLICATION_NAME")
     url = "{}/#/reset-password?token={}".format(client_url, doc["token"])
     hours = token_ttl * 24
     subject = render_template("reset_password_subject.txt")
@@ -140,8 +141,8 @@ def send_reset_password_email(doc, token_ttl):
 
 def send_user_mentioned_email(recipients, user_name, doc, url):
     logging.info("sending mention email to: %s", recipients)
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
     subject = render_template("user_mention_subject.txt", username=user_name)
     text_body = render_template("user_mention.txt", text=doc["text"], username=user_name, link=url, app_name=app_name)
     html_body = render_template("user_mention.html", text=doc["text"], username=user_name, link=url, app_name=app_name)
@@ -150,7 +151,7 @@ def send_user_mentioned_email(recipients, user_name, doc, url):
 
 def get_activity_digest(value):
     h = hashlib.sha1()
-    json_encoder = app.data.json_encoder_class()
+    json_encoder = get_current_app().data.json_encoder_class()
     h.update(dumps(value, sort_keys=True, default=json_encoder.default).encode("utf-8"))
     return h.hexdigest()
 
@@ -160,15 +161,15 @@ def send_activity_emails(activity, recipients):
     message_id = get_activity_digest(activity)
     # there is no resource for email timestamps registered,
     # so use users resoure to get pymongo db
-    email_timestamps = app.data.mongo.pymongo("users").db[EMAIL_TIMESTAMP_RESOURCE]
+    email_timestamps = get_current_app().data.mongo.pymongo("users").db[EMAIL_TIMESTAMP_RESOURCE]
     last_message_info = email_timestamps.find_one(message_id)
-    resend_interval = app.config.get("EMAIL_NOTIFICATION_RESEND", 24)
+    resend_interval = get_app_config("EMAIL_NOTIFICATION_RESEND", 24)
 
     if last_message_info and last_message_info["_created"] + timedelta(hours=resend_interval) > now:
         return
 
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
     link = activity.get("data", {}).get("link", None)
 
     notification = render_template_string(activity.get("message"), **activity.get("data"))
@@ -181,8 +182,8 @@ def send_activity_emails(activity, recipients):
 
 
 def send_article_killed_email(article, recipients, transmitted_at):
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
     place = next(iter(article.get("place") or []), "")
     if place:
         place = place.get("qcode", "")
@@ -198,9 +199,9 @@ def send_article_killed_email(article, recipients, transmitted_at):
 
 
 def send_translation_changed(username, article, recipients):
-    admins = app.config["ADMINS"]
-    app_name = app.config["APPLICATION_NAME"]
-    client_url = app.config.get("CLIENT_URL", "").rstrip("/")
+    admins = get_app_config("ADMINS")
+    app_name = get_app_config("APPLICATION_NAME")
+    client_url = get_app_config("CLIENT_URL", "").rstrip("/")
 
     link = "{}/#/workspace?item={}&action=edit".format(client_url, article["guid"])
     headline = article.get("headline", link)
diff --git a/superdesk/errors.py b/superdesk/errors.py
index 713828f556..b1575ed8d8 100644
--- a/superdesk/errors.py
+++ b/superdesk/errors.py
@@ -10,13 +10,13 @@
 
 import logging
 
-from flask import current_app as app
 from cerberus import DocumentError
 from eve.endpoints import send_response
 from werkzeug.exceptions import HTTPException
 from elasticsearch.exceptions import ConnectionTimeout  # noqa
 
 from superdesk.utils import save_error_data
+from superdesk.resource_fields import STATUS, STATUS_ERR, ISSUES
 
 
 logger = logging.getLogger(__name__)
@@ -68,7 +68,9 @@ def log_exception(message, extra=None, data=None):
 
 def notifications_enabled():
     """Test if notifications are enabled in config."""
-    return app.config.get("ERROR_NOTIFICATIONS", True)
+    from superdesk.core import get_app_config
+
+    return get_app_config("ERROR_NOTIFICATIONS", True)
 
 
 class SuperdeskError(DocumentError):
@@ -127,10 +129,10 @@ def __init__(self, message=None, status_code=None, payload=None, exception=None)
     def to_dict(self):
         """Create dict for json response."""
         rv = {}
-        rv[app.config["STATUS"]] = app.config["STATUS_ERR"]
+        rv[STATUS] = STATUS_ERR
         rv["_message"] = self.message or ""
         if hasattr(self, "payload"):
-            rv[app.config["ISSUES"]] = self.payload
+            rv[ISSUES] = self.payload
         return rv
 
     def __str__(self):
@@ -764,13 +766,14 @@ def __init__(self, errors, fields, message=None):
         Exception.__init__(self)
         self.errors = errors
         self.fields = fields
+
         try:
             self.response = send_response(
                 None,
                 (
                     {
-                        app.config["STATUS"]: app.config["STATUS_ERR"],
-                        app.config["ISSUES"]: {
+                        STATUS: STATUS_ERR,
+                        ISSUES: {
                             "validator exception": str([self.errors]),  # BC
                             "fields": self.fields,
                         },
diff --git a/superdesk/es_utils.py b/superdesk/es_utils.py
index e20d04c1bc..64056ace1d 100644
--- a/superdesk/es_utils.py
+++ b/superdesk/es_utils.py
@@ -14,7 +14,7 @@
 import json
 import pytz
 from datetime import datetime
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 
 logger = logging.getLogger(__name__)
 
@@ -51,9 +51,10 @@ def get_index(repos=None):
     """Get index id for all repos."""
     if repos is None:
         repos = REPOS
+    app = get_current_app()
     indexes = {app.data.elastic.index}
     for repo in repos:
-        indexes.add(app.config["ELASTICSEARCH_INDEXES"].get(repo, app.data.elastic.index))
+        indexes.add(get_app_config("ELASTICSEARCH_INDEXES").get(repo, app.data.elastic.index))
     return ",".join(indexes)
 
 
@@ -85,7 +86,7 @@ def filter2query(filter_, user_id=None):
     post_filter_must_not = []
 
     # controlled vocabularies can be overriden in settings
-    search_cvs = app.config.get("search_cvs", SEARCH_CVS)
+    search_cvs = get_app_config("search_cvs", SEARCH_CVS)
 
     for cv in search_cvs:
         if cv["id"] in search_query and cv["field"] != cv["id"]:
@@ -203,7 +204,7 @@ def filter2query(filter_, user_id=None):
         post_filter.append({"terms": {"credit": [v["value"] for v in values]}})
 
     # date filters
-    tz = pytz.timezone(app.config["DEFAULT_TIMEZONE"])
+    tz = pytz.timezone(get_app_config("DEFAULT_TIMEZONE"))
     range_ = {}
     to_delete = []
     for field in DATE_FIELDS:
@@ -267,7 +268,7 @@ def filter2query(filter_, user_id=None):
         query["post_filter"] = {"bool": {"must": post_filter, "must_not": post_filter_must_not}}
 
     query["sort"] = {"versioncreated": "desc"}
-    query.setdefault("size", app.config["ELASTIC_DEFAULT_SIZE"])
+    query.setdefault("size", get_app_config("ELASTIC_DEFAULT_SIZE"))
 
     search_query.pop("repo", None)
 
diff --git a/superdesk/etree.py b/superdesk/etree.py
index e16458e656..617eec5b96 100644
--- a/superdesk/etree.py
+++ b/superdesk/etree.py
@@ -11,7 +11,6 @@
 from lxml import etree  # noqa
 from lxml.etree import ParseError  # noqa
 from lxml import html
-from superdesk import config
 
 
 # from https://developer.mozilla.org/en-US/docs/Web/HTML/Block-level_elements
@@ -169,12 +168,14 @@ def clean_html(elem):
     :param etree._Element elem: element to clean (will be converted to HtmlElement if it is not already one
     :return html.HtmlElement: cleaned element
     """
+    from superdesk.core import get_app_config
+
     if not isinstance(elem, html.HtmlElement):
         elem = html.fromstring(etree.tostring(elem, encoding="unicode"))
     safe_attrs = set(html.defs.safe_attrs)
     safe_attrs.remove("class")
     cleaner = html.clean.Cleaner(
-        allow_tags=config.HTML_TAGS_WHITELIST, remove_unknown_tags=False, safe_attrs=safe_attrs
+        allow_tags=get_app_config("HTML_TAGS_WHITELIST"), remove_unknown_tags=False, safe_attrs=safe_attrs
     )
     return cleaner.clean_html(elem)
 
diff --git a/superdesk/eve_backend.py b/superdesk/eve_backend.py
index 955213a3f8..79c49fc55e 100644
--- a/superdesk/eve_backend.py
+++ b/superdesk/eve_backend.py
@@ -16,13 +16,15 @@
 from typing_extensions import Literal
 from pymongo.cursor import Cursor as MongoCursor
 from pymongo.collation import Collation
-from flask import current_app as app, json
-from eve.utils import document_etag, config, ParsedRequest
+from eve.utils import document_etag, ParsedRequest
 from eve.io.mongo import MongoJSONEncoder
 from superdesk.utc import utcnow
 from superdesk.logging import logger, item_msg
 from eve.methods.common import resolve_document_etag
 from elasticsearch.exceptions import RequestError, NotFoundError
+
+from superdesk.core import json, get_app_config, get_current_app
+from superdesk.resource_fields import ID_FIELD, ETAG, LAST_UPDATED, DATE_CREATED
 from superdesk.errors import SuperdeskApiError
 from superdesk.notification import push_notification as _push_notification
 from superdesk.cache import cache
@@ -31,9 +33,9 @@
 
 SYSTEM_KEYS = set(
     [
-        "_etag",
-        "_updated",
-        "_created",
+        ETAG,
+        LAST_UPDATED,
+        DATE_CREATED,
     ]
 )
 
@@ -70,7 +72,7 @@ def find_one(self, endpoint_name, req, **lookup):
         backend = self._backend(endpoint_name)
         item = backend.find_one(endpoint_name, req=req, **lookup)
         search_backend = self._lookup_backend(endpoint_name, fallback=True)
-        if search_backend and app.config.get("BACKEND_FIND_ONE_SEARCH_TEST", False):
+        if search_backend and get_app_config("BACKEND_FIND_ONE_SEARCH_TEST", False):
             # set the parent for the parent child in elastic search
             self._set_parent(endpoint_name, item, lookup)
             item_search = search_backend.find_one(endpoint_name, req=req, **lookup)
@@ -138,9 +140,9 @@ def get(self, endpoint_name, req, lookup, **kwargs):
             req.if_modified_since = None
             cursor, count = backend.find(endpoint_name, req, lookup, perform_count=False)
 
-        source_config = app.config["DOMAIN"][endpoint_name]
+        source_config = get_app_config("DOMAIN")[endpoint_name]
         if is_mongo and source_config.get("collation"):
-            cursor.collation(Collation(locale=app.config.get("MONGO_LOCALE", "en_US")))
+            cursor.collation(Collation(locale=get_app_config("MONGO_LOCALE", "en_US")))
 
         self._cursor_hook(cursor=cursor, endpoint_name=endpoint_name, req=req, lookup=lookup)
         return cursor
@@ -200,8 +202,8 @@ def create_in_mongo(self, endpoint_name, docs, **kwargs):
         """
         for doc in docs:
             self.set_default_dates(doc)
-            if not doc.get(config.ETAG):
-                doc[config.ETAG] = document_etag(doc)
+            if not doc.get(ETAG):
+                doc[ETAG] = document_etag(doc)
 
         backend = self._backend(endpoint_name)
         ids = backend.insert(endpoint_name, docs)
@@ -226,13 +228,13 @@ def update(self, endpoint_name, id, updates, original):
         :param original: original document
         """
         # change etag on update so following request will refetch it
-        updates.setdefault(config.LAST_UPDATED, utcnow())
-        if config.ETAG not in updates:
+        updates.setdefault(LAST_UPDATED, utcnow())
+        if ETAG not in updates:
             updated = original.copy()
             updated.update(updates)
             resolve_document_etag(updated, endpoint_name)
-            if config.IF_MATCH:
-                updates[config.ETAG] = updated[config.ETAG]
+            if get_app_config("IF_MATCH"):
+                updates[ETAG] = updated[ETAG]
         return self._change_request(endpoint_name, id, updates, original)
 
     def system_update(self, endpoint_name, id, updates, original, change_request=False, push_notification=True):
@@ -248,9 +250,9 @@ def system_update(self, endpoint_name, id, updates, original, change_request=Fal
         :param push_notification: if False it won't send resource: notifications for update
         """
         if not change_request:
-            updates.setdefault(config.LAST_UPDATED, utcnow())
+            updates.setdefault(LAST_UPDATED, utcnow())
         updated = original.copy()
-        updated.pop(config.ETAG, None)  # make sure we update
+        updated.pop(ETAG, None)  # make sure we update
         return self._change_request(
             endpoint_name, id, updates, updated, change_request=change_request, push_notification=push_notification
         )
@@ -329,12 +331,12 @@ def update_in_mongo(self, endpoint_name, id, updates, original):
         :param updates: updates to item to be saved
         :param original: current version of the item
         """
-        updates.setdefault(config.LAST_UPDATED, utcnow())
-        if config.ETAG not in updates:
+        updates.setdefault(LAST_UPDATED, utcnow())
+        if ETAG not in updates:
             updated = original.copy()
             updated.update(updates)
             resolve_document_etag(updated, endpoint_name)
-            updates[config.ETAG] = updated[config.ETAG]
+            updates[ETAG] = updated[ETAG]
         backend = self._backend(endpoint_name)
         res = backend.update(endpoint_name, id, updates, original)
         return res if res is not None else updates
@@ -381,7 +383,7 @@ def delete_docs(self, endpoint_name, docs):
         """Delete using list of documents."""
         backend = self._backend(endpoint_name)
         search_backend = self._lookup_backend(endpoint_name)
-        ids = [doc[config.ID_FIELD] for doc in docs]
+        ids = [doc[ID_FIELD] for doc in docs]
         removed_ids = ids
         logger.info("total documents to be removed {}".format(len(ids)))
         if search_backend and ids:
@@ -390,15 +392,15 @@ def delete_docs(self, endpoint_name, docs):
             for doc in docs:
                 try:
                     self.remove_from_search(endpoint_name, doc)
-                    removed_ids.append(doc[config.ID_FIELD])
+                    removed_ids.append(doc[ID_FIELD])
                 except NotFoundError:
-                    logger.warning("item missing from elastic _id=%s" % (doc[config.ID_FIELD],))
-                    removed_ids.append(doc[config.ID_FIELD])
+                    logger.warning("item missing from elastic _id=%s" % (doc[ID_FIELD],))
+                    removed_ids.append(doc[ID_FIELD])
                 except Exception:
-                    logger.exception("item can not be removed from elastic _id=%s" % (doc[config.ID_FIELD],))
+                    logger.exception("item can not be removed from elastic _id=%s" % (doc[ID_FIELD],))
         if len(removed_ids):
             for chunk in get_list_chunks(removed_ids):
-                backend.remove(endpoint_name, {config.ID_FIELD: {"$in": chunk}})
+                backend.remove(endpoint_name, {ID_FIELD: {"$in": chunk}})
             logger.info("Removed %d documents from %s.", len(removed_ids), endpoint_name)
             for doc in docs:
                 self._push_resource_notification("deleted", endpoint_name, _id=str(doc["_id"]))
@@ -413,7 +415,7 @@ def delete_ids_from_mongo(self, endpoint_name, ids):
         :return:
         """
 
-        self.delete_from_mongo(endpoint_name, {config.ID_FIELD: {"$in": ids}})
+        self.delete_from_mongo(endpoint_name, {ID_FIELD: {"$in": ids}})
         return ids
 
     def delete_from_mongo(self, endpoint_name: str, lookup: Dict[str, Any]):
@@ -438,18 +440,20 @@ def remove_from_search(self, endpoint_name, doc):
         :param endpoint_name
         :param dict doc: Document to delete
         """
-        search_backend = app.data._search_backend(endpoint_name)
+
+        search_backend = get_current_app().data._search_backend(endpoint_name)
         search_backend.remove(
-            endpoint_name, {"_id": doc.get(config.ID_FIELD)}, search_backend.get_parent_id(endpoint_name, doc)
+            endpoint_name, {"_id": doc.get(ID_FIELD)}, search_backend.get_parent_id(endpoint_name, doc)
         )
 
     def _datasource(self, endpoint_name):
-        return app.data.datasource(endpoint_name)[0]
+        return get_current_app().data.datasource(endpoint_name)[0]
 
     def _backend(self, endpoint_name):
-        return app.data._backend(endpoint_name)
+        return get_current_app().data._backend(endpoint_name)
 
     def _lookup_backend(self, endpoint_name, fallback=False):
+        app = get_current_app()
         backend = app.data._search_backend(endpoint_name)
         if backend is None and fallback:
             backend = app.data._backend(endpoint_name)
@@ -458,8 +462,8 @@ def _lookup_backend(self, endpoint_name, fallback=False):
     def set_default_dates(self, doc):
         """Helper to populate ``_created`` and ``_updated`` timestamps."""
         now = utcnow()
-        doc.setdefault(config.DATE_CREATED, now)
-        doc.setdefault(config.LAST_UPDATED, now)
+        doc.setdefault(DATE_CREATED, now)
+        doc.setdefault(LAST_UPDATED, now)
 
     def _set_parent(self, endpoint_name, doc, lookup):
         """Set the parent id for parent child document in elastic"""
@@ -507,7 +511,7 @@ def _cursor_hook(self, cursor, endpoint_name, req, lookup):
 
     def notify_on_change(self, endpoint_name):
         """Test if we should push notifications for given resource."""
-        source_config = app.config["DOMAIN"][endpoint_name]
+        source_config = get_app_config("DOMAIN")[endpoint_name]
         return source_config["notifications"] is True
 
     def _push_resource_notification(self, action: Literal["created", "updated", "deleted"], endpoint_name, **kwargs):
diff --git a/superdesk/factory/app.py b/superdesk/factory/app.py
index 48934272f2..b511990928 100644
--- a/superdesk/factory/app.py
+++ b/superdesk/factory/app.py
@@ -13,7 +13,6 @@
 
 import os
 import eve
-import flask
 from werkzeug.exceptions import NotFound
 import jinja2
 import importlib
@@ -26,10 +25,10 @@
 from eve.io.media import MediaStorage
 from eve.render import send_response
 from flask_babel import Babel
-from flask import g, json
 from babel import parse_locale
 from pymongo.errors import DuplicateKeyError
 
+from superdesk.flask import g, url_for, Config, Request as FlaskRequest, abort, Blueprint, request as flask_request
 from superdesk.celery_app import init_celery
 from superdesk.datalayer import SuperdeskDataLayer  # noqa
 from superdesk.errors import SuperdeskError, SuperdeskApiError, DocumentError
@@ -50,9 +49,9 @@
 
 class HttpFlaskRequest(Request):
     endpoint: Endpoint
-    request: flask.Request
+    request: FlaskRequest
 
-    def __init__(self, endpoint: Endpoint, request: flask.Request):
+    def __init__(self, endpoint: Endpoint, request: FlaskRequest):
         self.endpoint = endpoint
         self.request = request
 
@@ -77,7 +76,7 @@ async def get_data(self) -> Union[bytes, str]:
         return self.request.get_data()
 
     async def abort(self, code: int, *args: Any, **kwargs: Any) -> NoReturn:
-        flask.abort(code, *args, **kwargs)
+        abort(code, *args, **kwargs)
 
 
 def set_error_handlers(app):
@@ -121,6 +120,9 @@ class SuperdeskEve(eve.Eve):
     _endpoints: List[Endpoint]
     _endpoint_groups: List[EndpointGroup]
 
+    media: Any
+    data: Any
+
     def __init__(self, **kwargs):
         self.async_app = SuperdeskAsyncApp(self)
         self.json_provider_class = SuperdeskFlaskJSONProvider
@@ -189,7 +191,7 @@ def update_resource_schema(resource):
 
     def register_endpoint(self, endpoint: Endpoint | EndpointGroup):
         if isinstance(endpoint, EndpointGroup):
-            blueprint = flask.Blueprint(endpoint.name, endpoint.import_name)
+            blueprint = Blueprint(endpoint.name, endpoint.import_name)
             for sub_endpoint in endpoint.endpoints:
                 blueprint.add_url_rule(
                     (
@@ -217,7 +219,6 @@ def register_endpoint(self, endpoint: Endpoint | EndpointGroup):
 
     async def _process_async_endpoint(self, **kwargs):
         # Get Endpoint instance
-        from flask import request as flask_request
 
         endpoint_name = flask_request.endpoint
 
@@ -255,6 +256,16 @@ async def _process_async_endpoint(self, **kwargs):
 
         return response.body, response.status_code, response.headers
 
+    def get_current_user_dict(self) -> Optional[Dict[str, Any]]:
+        return getattr(g, "user", None)
+
+    def download_url(self, media_id: str) -> str:
+        prefered_url_scheme = self.config.get("PREFERRED_URL_SCHEME", "http")
+        return url_for("download_raw.download_file", id=media_id, _external=True, _scheme=prefered_url_scheme)
+
+    def as_any(self) -> Any:
+        return self
+
 
 def get_media_storage_class(app_config: Dict[str, Any], use_provider_config: bool = True) -> Type[MediaStorage]:
     if use_provider_config and app_config.get("MEDIA_STORAGE_PROVIDER"):
@@ -280,7 +291,7 @@ def get_app(config=None, media_storage=None, config_object=None, init_elastic=No
     """
 
     abs_path = SUPERDESK_PATH
-    app_config = flask.Config(abs_path)
+    app_config = Config(abs_path)
     app_config.from_object("superdesk.default_settings")
     app_config.setdefault("APP_ABSPATH", abs_path)
     app_config.setdefault("DOMAIN", {})
@@ -316,6 +327,7 @@ def get_app(config=None, media_storage=None, config_object=None, init_elastic=No
     }
 
     superdesk.app = app
+    app.async_app.start()
 
     custom_loader = jinja2.ChoiceLoader(
         [
@@ -386,7 +398,6 @@ def install_app(module_name):
         app.jinja_env.filters[name] = jinja_filter
 
     configure_logging(app.config["LOG_CONFIG_FILE"])
-    app.async_app.start()
 
     return app
 
diff --git a/superdesk/factory/elastic_apm.py b/superdesk/factory/elastic_apm.py
index 92118b9ad4..02b3fd4562 100644
--- a/superdesk/factory/elastic_apm.py
+++ b/superdesk/factory/elastic_apm.py
@@ -1,11 +1,11 @@
 import re
-import flask
 
 from typing import Literal
 from elasticapm.contrib.flask import ElasticAPM
+from superdesk.flask import Flask
 
 
-def setup_apm(app: flask.Flask, service="Core API") -> None:
+def setup_apm(app: Flask, service="Core API") -> None:
     if getattr(app, "apm", None) is None and app.config.get("APM_SERVER_URL") and app.config.get("APM_SECRET_TOKEN"):
         app.config["ELASTIC_APM"] = {
             "DEBUG": app.debug,
@@ -21,7 +21,7 @@ def setup_apm(app: flask.Flask, service="Core API") -> None:
         app.apm = ElasticAPM(app)
 
 
-def get_environment(app: flask.Flask) -> Literal["testing", "staging", "production"]:
+def get_environment(app: Flask) -> Literal["testing", "staging", "production"]:
     if app.config.get("CLIENT_URL"):
         if "localhost" in app.config["CLIENT_URL"] or app.debug:
             return "testing"
diff --git a/superdesk/factory/manager.py b/superdesk/factory/manager.py
index 3f13e4b16d..cdb641e2b1 100644
--- a/superdesk/factory/manager.py
+++ b/superdesk/factory/manager.py
@@ -1,4 +1,4 @@
-from flask import Flask
+from superdesk.flask import Flask
 import superdesk
 
 
diff --git a/superdesk/filemeta.py b/superdesk/filemeta.py
index c7ae2fec5b..3d2bf91ee8 100644
--- a/superdesk/filemeta.py
+++ b/superdesk/filemeta.py
@@ -1,4 +1,4 @@
-from flask import json
+from superdesk.core import json
 
 
 def set_filemeta(item, metadata):
diff --git a/superdesk/flask.py b/superdesk/flask.py
new file mode 100644
index 0000000000..83321a5b7b
--- /dev/null
+++ b/superdesk/flask.py
@@ -0,0 +1,52 @@
+# -*- coding: utf-8; -*-
+#
+# This file is part of Superdesk.
+#
+# Copyright 2024 Sourcefabric z.u. and contributors.
+#
+# For the full copyright and license information, please see the
+# AUTHORS and LICENSE files distributed with this source code, or
+# at https://www.sourcefabric.org/superdesk/license
+
+# Temporary file to proxy Flask/Quart object to legacy code
+# To be removed once we completely move to new superdesk.core code
+
+from flask import (
+    request,
+    url_for,
+    Blueprint,
+    Response,
+    make_response,
+    Flask,
+    g,
+    redirect,
+    jsonify,
+    render_template,
+    render_template_string,
+    session,
+    Config,
+    Request,
+    abort,
+    send_file,
+)
+from flask.json.provider import DefaultJSONProvider
+
+__all__ = [
+    "request",
+    "url_for",
+    "Blueprint",
+    "Response",
+    "make_response",
+    "Flask",
+    "DefaultJSONProvider",
+    "g",
+    "redirect",
+    "jsonify",
+    "render_template",
+    "render_template_string",
+    "session",
+    "Config",
+    "Request",
+    "abort",
+    "send_file",
+]
diff --git a/superdesk/ftp.py b/superdesk/ftp.py
index f9af859d36..f0a8d97f54 100644
--- a/superdesk/ftp.py
+++ b/superdesk/ftp.py
@@ -2,7 +2,7 @@
 import ftplib
 
 from contextlib import contextmanager
-from flask import current_app as app
+from superdesk.core import get_app_config
 
 from superdesk.errors import IngestFtpError
 
@@ -17,7 +17,7 @@ def ftp_connect(config):
     """
     if config.get("use_ftps", False):
         try:
-            ftp = ftplib.FTP_TLS(config.get("host"), timeout=app.config.get("FTP_TIMEOUT", 300))
+            ftp = ftplib.FTP_TLS(config.get("host"), timeout=get_app_config("FTP_TIMEOUT", 300))
         except socket.gaierror as e:
             raise IngestFtpError.ftpHostError(exception=e)
 
@@ -28,7 +28,7 @@ def ftp_connect(config):
             raise IngestFtpError.ftpAuthError(exception=ae)
     else:
         try:
-            ftp = ftplib.FTP(config.get("host"), timeout=app.config.get("FTP_TIMEOUT", 300))
+            ftp = ftplib.FTP(config.get("host"), timeout=get_app_config("FTP_TIMEOUT", 300))
         except socket.gaierror as e:
             raise IngestFtpError.ftpHostError(exception=e)
 
diff --git a/superdesk/geonames.py b/superdesk/geonames.py
index 87361197a4..2f55b775bc 100644
--- a/superdesk/geonames.py
+++ b/superdesk/geonames.py
@@ -5,7 +5,7 @@
 from urllib.parse import urljoin
 from urllib3.util.retry import Retry
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 
 session = requests.Session()
 retries = Retry(total=3, backoff_factor=0.1)
@@ -59,14 +59,14 @@ def format_geoname_item(item):
 def geonames_request(service, service_params):
     params = [
         ("type", "json"),
-        ("username", app.config.get("GEONAMES_USERNAME", "")),
+        ("username", get_app_config("GEONAMES_USERNAME", "")),
     ]
 
-    if app.config.get("GEONAMES_TOKEN"):
-        params.append(("token", app.config["GEONAMES_TOKEN"]))
+    if get_app_config("GEONAMES_TOKEN"):
+        params.append(("token", get_app_config("GEONAMES_TOKEN")))
 
     params.extend(service_params)
-    url = urljoin(app.config["GEONAMES_URL"], service)
+    url = urljoin(get_app_config("GEONAMES_URL"), service)
     res = session.get(url, params=params, timeout=10)
     if res.status_code != 200:
         res.raise_for_status()
diff --git a/superdesk/http_proxy.py b/superdesk/http_proxy.py
index cc38749a2f..5fffc7a9dd 100644
--- a/superdesk/http_proxy.py
+++ b/superdesk/http_proxy.py
@@ -1,7 +1,9 @@
-from typing import List, Optional, Dict, Any
+from typing import List, Optional, Dict, Any, cast
 import requests
-from flask import Flask, request, current_app, Response as FlaskResponse, make_response
+
 from superdesk import __version__ as superdesk_version
+from superdesk.core import get_app_config, get_current_app
+from superdesk.flask import request, Response as FlaskResponse, make_response, Flask
 from superdesk.utils import get_cors_headers
 
 
@@ -27,7 +29,7 @@ class HTTPProxy:
 
     Example:
     ::
-        from flask import Flask
+        from superdesk.flask import Flask
         from superdesk.http_proxy import HTTPProxy, register_http_proxy
 
         def init_app(app: Flask) -> None:
@@ -75,10 +77,10 @@ def __init__(
         self.use_cors = use_cors
         self.session = requests.Session()
 
-    def get_internal_url(self, app: Optional[Flask] = None) -> str:
+    def get_internal_url(self) -> str:
         """Returns the base URL route used when registering the proxy with Flask"""
 
-        url_prefix = ((app or current_app).config["URL_PREFIX"]).lstrip("/")
+        url_prefix = cast(str, get_app_config("URL_PREFIX")).lstrip("/")
         return f"/{url_prefix}/{self.internal_url}"
 
     def process_request(self, path: str) -> FlaskResponse:
@@ -88,6 +90,8 @@ def process_request(self, path: str) -> FlaskResponse:
         response = make_response() if request.method == "OPTIONS" else self.send_proxy_request()
         if self.use_cors:
             # Ignore following type check, as ``typing--Werkzeug=1.0.9`` is missing stub for ``update`` method
+            response.headers.set("Access-Control-Allow-Origin", "*")
+
             response.headers.update(get_cors_headers(",".join(self.http_methods)))  # type: ignore
         return response
 
@@ -95,6 +99,8 @@ def authenticate(self):
         """If auth is enabled, make sure the current session is authenticated"""
 
         # Use ``_blueprint`` for resource name for auth purposes (copied from the ``blueprint_auth`` decorator)
+        current_app = get_current_app()
+
         if self.auth and not current_app.auth.authorized([], "_blueprint", request.method):
             # Calling ``auth.authenticate`` raises a ``SuperdeskApiError.unauthorizedError()`` exception
             current_app.auth.authenticate()
@@ -126,7 +132,7 @@ def get_proxy_request_kwargs(self) -> Dict[str, Any]:
             data=request.get_data(),
             allow_redirects=True,
             stream=True,
-            timeout=current_app.config["HTTP_PROXY_TIMEOUT"],
+            timeout=get_app_config("HTTP_PROXY_TIMEOUT"),
         )
 
     def construct_response(self, result: requests.Response) -> FlaskResponse:
@@ -146,7 +152,7 @@ def construct_response(self, result: requests.Response) -> FlaskResponse:
         ]
         headers = [(k, v) for k, v in result.raw.headers.items() if k.lower() not in excluded_headers]
 
-        return current_app.response_class(
+        return get_current_app().response_class(
             result.iter_content(),
             result.status_code,
             headers,
@@ -157,7 +163,7 @@ def construct_response(self, result: requests.Response) -> FlaskResponse:
 def register_http_proxy(app: Flask, proxy: HTTPProxy):
     """Register a HTTPProxy instance by adding URL rules to Flask"""
 
-    internal_url = proxy.get_internal_url(app)
+    internal_url = proxy.get_internal_url()
     app.add_url_rule(
         internal_url, proxy.endpoint_name, proxy.process_request, defaults={"path": ""}, methods=proxy.http_methods
     )
diff --git a/superdesk/io/commands/add_provider.py b/superdesk/io/commands/add_provider.py
index b301dd50f7..f4e8a75339 100644
--- a/superdesk/io/commands/add_provider.py
+++ b/superdesk/io/commands/add_provider.py
@@ -9,7 +9,7 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app, json
 from superdesk import get_resource_service
 from superdesk.errors import ProviderError
 
@@ -35,10 +35,11 @@ class AddProvider(superdesk.Command):
     def run(self, provider):
         try:
             data = {}
-            data = superdesk.json.loads(provider)
-            data.setdefault("content_expiry", app.config["INGEST_EXPIRY_MINUTES"])
+            data = json.loads(provider)
+            data.setdefault("content_expiry", get_app_config("INGEST_EXPIRY_MINUTES"))
 
-            validator = app.validator(app.config["DOMAIN"]["ingest_providers"]["schema"], "ingest_providers")
+            app = get_current_app()
+            validator = app.validator(get_app_config("DOMAIN")["ingest_providers"]["schema"], "ingest_providers")
             validation = validator.validate(data)
 
             if validation:
diff --git a/superdesk/io/commands/remove_expired_content.py b/superdesk/io/commands/remove_expired_content.py
index 881bfe22dd..44d5895570 100644
--- a/superdesk/io/commands/remove_expired_content.py
+++ b/superdesk/io/commands/remove_expired_content.py
@@ -9,7 +9,7 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import superdesk
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.logging import logger
 from superdesk.utc import utcnow
 from superdesk.notification import push_notification
@@ -86,6 +86,7 @@ def remove_expired_data(provider):
         ingest_service.delete({"_id": {"$in": ids}})
         push_expired_notification(ids)
 
+    app = get_current_app()
     for file_id in file_ids:
         logger.info("Deleting file: %s" % file_id)
         app.media.delete(file_id)
diff --git a/superdesk/io/commands/update_ingest.py b/superdesk/io/commands/update_ingest.py
index 449c16b07f..3a81c9de43 100644
--- a/superdesk/io/commands/update_ingest.py
+++ b/superdesk/io/commands/update_ingest.py
@@ -13,9 +13,10 @@
 import logging
 from datetime import timedelta, timezone, datetime
 import pytz
-from flask import current_app as app
 from werkzeug.exceptions import HTTPException
 
+from superdesk.core import get_app_config, get_current_app
+from superdesk.resource_fields import ID_FIELD
 import superdesk
 from superdesk.activity import ACTIVITY_EVENT, notify_and_add_activity
 from superdesk.celery_app import celery
@@ -130,7 +131,7 @@ def filter_expired_items(provider, items):
                 del provider["content_expiry"]
                 content_expiry = None
 
-        delta = timedelta(minutes=content_expiry or app.config["INGEST_EXPIRY_MINUTES"])
+        delta = timedelta(minutes=content_expiry or get_app_config("INGEST_EXPIRY_MINUTES"))
         filtered_items = [
             item
             for item in items
@@ -221,7 +222,7 @@ def get_is_idle(provider):
 
 
 def get_task_id(provider):
-    return "update-ingest-{0}-{1}".format(provider.get("name"), provider.get(superdesk.config.ID_FIELD))
+    return "update-ingest-{0}-{1}".format(provider.get("name"), provider.get(ID_FIELD))
 
 
 def has_system_renditions(item):
@@ -290,7 +291,7 @@ def update_provider(provider, rule_set=None, routing_scheme=None, sync=False):
     :param routing_scheme: Routing Scheme if one is associated with Ingest Provider.
     :param sync: Running in sync mode from cli.
     """
-    lock_name = get_lock_id("ingest", provider["name"], provider[superdesk.config.ID_FIELD])
+    lock_name = get_lock_id("ingest", provider["name"], provider[ID_FIELD])
 
     if not lock(lock_name, expire=UPDATE_TTL + 10):
         if sync:
@@ -311,7 +312,7 @@ def update_provider(provider, rule_set=None, routing_scheme=None, sync=False):
         while True:
             try:
                 if not touch(lock_name, expire=UPDATE_TTL):
-                    logger.warning("lock expired while updating provider %s", provider[superdesk.config.ID_FIELD])
+                    logger.warning("lock expired while updating provider %s", provider[ID_FIELD])
                     return
                 items = generator.send(failed)
                 failed = ingest_items(items, provider, feeding_service, rule_set, routing_scheme)
@@ -326,8 +327,8 @@ def update_provider(provider, rule_set=None, routing_scheme=None, sync=False):
         # Some Feeding Services update the collection and by this time the _etag might have been changed.
         # So it's necessary to fetch it once again. Otherwise, OriginalChangedError is raised.
         ingest_provider_service = superdesk.get_resource_service("ingest_providers")
-        provider = ingest_provider_service.find_one(req=None, _id=provider[superdesk.config.ID_FIELD])
-        ingest_provider_service.system_update(provider[superdesk.config.ID_FIELD], update, provider)
+        provider = ingest_provider_service.find_one(req=None, _id=provider[ID_FIELD])
+        ingest_provider_service.system_update(provider[ID_FIELD], update, provider)
 
         if LAST_ITEM_UPDATE not in update and get_is_idle(provider):
             admins = superdesk.get_resource_service("users").get_users_by_user_type("administrator")
@@ -340,10 +341,10 @@ def update_provider(provider, rule_set=None, routing_scheme=None, sync=False):
                 last=provider[LAST_ITEM_UPDATE].replace(tzinfo=timezone.utc).astimezone(tz=None).strftime("%c"),
             )
 
-        logger.info("Provider {0} updated".format(provider[superdesk.config.ID_FIELD]))
+        logger.info("Provider {0} updated".format(provider[ID_FIELD]))
 
         if LAST_ITEM_UPDATE in update:  # Only push a notification if there has been an update
-            push_notification("ingest:update", provider_id=str(provider[superdesk.config.ID_FIELD]))
+            push_notification("ingest:update", provider_id=str(provider[ID_FIELD]))
     except Exception as e:
         logger.error("Failed to ingest file: {error}".format(error=e))
         raise IngestFileError(3000, e, provider)
@@ -536,7 +537,7 @@ def ingest_items(items, provider, feeding_service, rule_set=None, routing_scheme
                 ref.setdefault("renditions", itemRendition)
             ref[GUID_FIELD] = ref["residRef"]
             if items_dict.get(ref["residRef"]):
-                ref["residRef"] = items_dict.get(ref["residRef"], {}).get(superdesk.config.ID_FIELD)
+                ref["residRef"] = items_dict.get(ref["residRef"], {}).get(ID_FIELD)
         if item[GUID_FIELD] in failed_items:
             continue
         ingested, ids = ingest_item(item, provider, feeding_service, rule_set, routing_scheme)
@@ -551,6 +552,7 @@ def ingest_items(items, provider, feeding_service, rule_set=None, routing_scheme
         ingest_collection = feeding_service.service if hasattr(feeding_service, "service") else "ingest"
     ingest_service = superdesk.get_resource_service(ingest_collection)
     updated_items = ingest_service.find({"_id": {"$in": created_ids}}, max_results=len(created_ids))
+    app = get_current_app()
     app.data._search_backend(ingest_collection).bulk_insert(ingest_collection, list(updated_items))
     if failed_items:
         logger.error("Failed to ingest the following items: %s", failed_items)
@@ -577,8 +579,8 @@ def ingest_item(item, provider, feeding_service, rule_set=None, routing_scheme=N
         old_item = ingest_service.find_one(guid=item[GUID_FIELD], req=None)
 
         if not old_item:
-            item.setdefault(superdesk.config.ID_FIELD, generate_guid(type=GUID_NEWSML))
-            item[FAMILY_ID] = item[superdesk.config.ID_FIELD]
+            item.setdefault(ID_FIELD, generate_guid(type=GUID_NEWSML))
+            item[FAMILY_ID] = item[ID_FIELD]
         elif provider.get("disable_item_updates", False):
             logger.warning(
                 f"Resource '{ingest_collection}' "
@@ -590,7 +592,7 @@ def ingest_item(item, provider, feeding_service, rule_set=None, routing_scheme=N
             logger.info(f"Resource '{ingest_collection}' " f"item '{item[GUID_FIELD]}' should not be updated")
             return False, []
 
-        item["ingest_provider"] = str(provider[superdesk.config.ID_FIELD])
+        item["ingest_provider"] = str(provider[ID_FIELD])
         item.setdefault("source", provider.get("source", ""))
         item.setdefault("uri", item[GUID_FIELD])  # keep it as original guid
 
@@ -610,7 +612,7 @@ def ingest_item(item, provider, feeding_service, rule_set=None, routing_scheme=N
             process_anpa_category(item, provider)
 
         if "subject" in item:
-            if not app.config.get("INGEST_SKIP_IPTC_CODES", False):
+            if not get_app_config("INGEST_SKIP_IPTC_CODES", False):
                 # FIXME: temporary fix for SDNTB-344, need to be removed once SDESK-439 is implemented
                 process_iptc_codes(item, provider)
             if "anpa_category" not in item:
@@ -686,7 +688,7 @@ def ingest_item(item, provider, feeding_service, rule_set=None, routing_scheme=N
             new_version = _is_new_version(item, old_item)
             updates = deepcopy(item)
             if new_version:
-                ingest_service.patch_in_mongo(old_item[superdesk.config.ID_FIELD], updates, old_item)
+                ingest_service.patch_in_mongo(old_item[ID_FIELD], updates, old_item)
                 item.update(old_item)
                 item.update(updates)
                 items_ids.append(item["_id"])
@@ -702,7 +704,7 @@ def ingest_item(item, provider, feeding_service, rule_set=None, routing_scheme=N
                 raise e
 
         if routing_scheme and new_version:
-            routed = ingest_service.find_one(_id=item[superdesk.config.ID_FIELD], req=None)
+            routed = ingest_service.find_one(_id=item[ID_FIELD], req=None)
             superdesk.get_resource_service("routing_schemes").apply_routing_scheme(routed, provider, routing_scheme)
 
     except Exception as ex:
@@ -770,7 +772,8 @@ def set_expiry(item, provider, parent_expiry=None):
         expiry_offset = item["dates"]["end"]
 
     item.setdefault(
-        "expiry", get_expiry_date(provider.get("content_expiry") or app.config["INGEST_EXPIRY_MINUTES"], expiry_offset)
+        "expiry",
+        get_expiry_date(provider.get("content_expiry") or get_app_config("INGEST_EXPIRY_MINUTES"), expiry_offset),
     )
 
 
diff --git a/superdesk/io/feed_parsers/__init__.py b/superdesk/io/feed_parsers/__init__.py
index f3c9a29f2f..30062c4614 100644
--- a/superdesk/io/feed_parsers/__init__.py
+++ b/superdesk/io/feed_parsers/__init__.py
@@ -12,7 +12,7 @@
 
 from superdesk.etree import etree as sd_etree
 from superdesk.errors import SkipValue
-from flask import current_app as app
+from superdesk.core import get_current_app, get_app_config
 from superdesk.metadata.item import Priority
 from collections import OrderedDict
 import inspect
@@ -72,7 +72,7 @@ def set_dateline(self, item, city=None, text=None):
         item.setdefault("dateline", {})
 
         if city:
-            cities = app.locators.find_cities()
+            cities = get_current_app().locators.find_cities()
             located = [c for c in cities if c["city"] == city]
             item["dateline"]["located"] = (
                 located[0] if len(located) > 0 else {"city_code": city, "city": city, "tz": "UTC", "dateline": "city"}
@@ -184,7 +184,7 @@ def _generate_mapping(self, setting_param_name):
             class_mapping = {}
 
         if setting_param_name is not None:
-            settings_mapping = getattr(superdesk.config, setting_param_name)
+            settings_mapping = get_app_config(setting_param_name)
             if settings_mapping is None:
                 logging.info("No mapping found in settings for NITF parser, using default one")
                 settings_mapping = {}
diff --git a/superdesk/io/feed_parsers/ana_mpe_newsml.py b/superdesk/io/feed_parsers/ana_mpe_newsml.py
index ebe30d209d..c140844f48 100644
--- a/superdesk/io/feed_parsers/ana_mpe_newsml.py
+++ b/superdesk/io/feed_parsers/ana_mpe_newsml.py
@@ -12,11 +12,11 @@
 import pytz
 from superdesk.etree import etree
 import html
+from superdesk.core import get_current_app
 from superdesk.io.feed_parsers.newsml_1_2 import NewsMLOneFeedParser
 from superdesk.io.registry import register_feed_parser
 from superdesk.errors import ParserError
 from dateutil.parser import parse as date_parser
-from flask import current_app as app
 from apps.archive.common import format_dateline_to_locmmmddsrc
 
 
@@ -103,7 +103,7 @@ def parse(self, xml, provider=None):
             # Normalise the country code
             country = "GR" if country == "GRC" else country
 
-            cities = app.locators.find_cities()
+            cities = get_current_app().locators.find_cities()
             located = [c for c in cities if c["city"] == city and c["country_code"] == country]
             if len(located) == 1:
                 item["dateline"]["located"] = located[0]
diff --git a/superdesk/io/feed_parsers/ap_anpa.py b/superdesk/io/feed_parsers/ap_anpa.py
index 1b70f3600f..fdf2eb9fab 100644
--- a/superdesk/io/feed_parsers/ap_anpa.py
+++ b/superdesk/io/feed_parsers/ap_anpa.py
@@ -11,7 +11,7 @@
 from .anpa import ANPAFeedParser
 from superdesk.io.registry import register_feed_parser
 from superdesk.io.iptc import subject_codes
-from flask import current_app as app
+from superdesk.core import get_current_app
 from apps.archive.common import format_dateline_to_locmmmddsrc
 from superdesk.utc import get_date
 from superdesk import get_resource_service
@@ -128,7 +128,7 @@ def ap_derive_dateline(self, item):
                         city = city.split(",")[0]
                         if any(char.isdigit() for char in city):
                             return
-                        cities = app.locators.find_cities()
+                        cities = get_current_app().locators.find_cities()
                         located = [c for c in cities if c["city"].lower() == city.lower()]
                         item.setdefault("dateline", {})
                         item["dateline"]["located"] = (
diff --git a/superdesk/io/feed_parsers/ap_media.py b/superdesk/io/feed_parsers/ap_media.py
index 71cbdfbcb0..56e88d8ee1 100644
--- a/superdesk/io/feed_parsers/ap_media.py
+++ b/superdesk/io/feed_parsers/ap_media.py
@@ -11,6 +11,7 @@
 import logging
 import datetime
 
+from superdesk.core import get_current_app, get_app_config
 from superdesk.utc import utc
 from superdesk.io.registry import register_feed_parser
 from superdesk.io.feed_parsers import FeedParser
@@ -18,7 +19,6 @@
 from superdesk.metadata.item import ITEM_URGENCY, ITEM_PRIORITY, Priority
 from apps.archive.common import format_dateline_to_locmmmddsrc
 from superdesk.utc import get_date
-from flask import current_app as app
 from superdesk import get_resource_service
 
 
@@ -189,7 +189,7 @@ def parse(self, s_json, provider=None):
             item["original_source"] = ",".join([n.get("name") for n in in_item.get("infosource", [])])
 
         if in_item.get("datelinelocation"):
-            cities = app.locators.find_cities()
+            cities = get_current_app().locators.find_cities()
             # Try to find a single matching city either by city and country or city country and state
             located = [
                 c
@@ -271,7 +271,7 @@ def parse(self, s_json, provider=None):
         return item
 
     def _parse_associations(self, associations, item, provider=None):
-        related_id = getattr(self, "RELATED_ID", app.config.get("INGEST_AP_RELATED_ID"))
+        related_id = getattr(self, "RELATED_ID", get_app_config("INGEST_AP_RELATED_ID"))
         if related_id:
             item["associations"] = {}
             for key, raw in associations.items():
diff --git a/superdesk/io/feed_parsers/efe_nitf.py b/superdesk/io/feed_parsers/efe_nitf.py
index 8247706466..f8f8711c20 100644
--- a/superdesk/io/feed_parsers/efe_nitf.py
+++ b/superdesk/io/feed_parsers/efe_nitf.py
@@ -9,11 +9,11 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from superdesk.core import get_current_app
 from superdesk.io.feed_parsers.nitf import NITFFeedParser
 from superdesk.io.registry import register_feed_parser
 from superdesk.metadata.item import FORMAT
 from superdesk.io.iptc import subject_codes
-from flask import current_app as app
 from apps.archive.common import format_dateline_to_locmmmddsrc
 from superdesk.utc import get_date
 import logging
@@ -90,7 +90,7 @@ def derive_dateline(self, item):
         """
         try:
             if len(item.get("place", [])) == 1:
-                cities = app.locators.find_cities()
+                cities = get_current_app().locators.find_cities()
                 city = item.get("place", "")[0].get("name", "")
                 if city:
                     located = [c for c in cities if c["city"].lower() == city.lower()]
diff --git a/superdesk/io/feed_parsers/image_iptc.py b/superdesk/io/feed_parsers/image_iptc.py
index 3fa6425c60..0e500ea6ec 100644
--- a/superdesk/io/feed_parsers/image_iptc.py
+++ b/superdesk/io/feed_parsers/image_iptc.py
@@ -8,25 +8,27 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from datetime import datetime
+import mimetypes
+import logging
+import os.path
 import arrow
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import VERSION, ITEM_TYPE
 from superdesk.io.feed_parsers import FileFeedParser
 from superdesk.io.registry import register_feed_parser
 from superdesk.errors import ParserError
 from superdesk.media.media_operations import process_file_from_stream
 from superdesk.media.image import get_meta_iptc
 from superdesk.media.iim_codes import TAG
-from superdesk.metadata.item import GUID_TAG, ITEM_TYPE, CONTENT_TYPE
+from superdesk.metadata.item import GUID_TAG, CONTENT_TYPE
 from superdesk.metadata import utils
 from superdesk.media.renditions import generate_renditions, get_renditions_spec
 from superdesk.upload import url_for_media
 from superdesk.utc import utcnow
 from superdesk import filemeta
-from flask import current_app as app
-from eve.utils import config
-from datetime import datetime
-import mimetypes
-import logging
-import os.path
+
 
 logger = logging.getLogger(__name__)
 
@@ -73,7 +75,7 @@ def parse_item(self, image_path):
         item = {
             "guid": guid,
             "uri": guid,
-            config.VERSION: 1,
+            VERSION: 1,
             ITEM_TYPE: CONTENT_TYPE.PICTURE,
             "mimetype": content_type,
             "versioncreated": utcnow(),
@@ -81,6 +83,7 @@ def parse_item(self, image_path):
         with open(image_path, "rb") as f:
             _, content_type, file_metadata = process_file_from_stream(f, content_type=content_type)
             f.seek(0)
+            app = get_current_app()
             file_id = app.media.put(f, filename=filename, content_type=content_type, metadata=file_metadata)
             filemeta.set_filemeta(item, file_metadata)
             f.seek(0)
diff --git a/superdesk/io/feed_parsers/newsml_2_0.py b/superdesk/io/feed_parsers/newsml_2_0.py
index 9dcba71a85..0d7f25abc0 100644
--- a/superdesk/io/feed_parsers/newsml_2_0.py
+++ b/superdesk/io/feed_parsers/newsml_2_0.py
@@ -13,7 +13,7 @@
 import datetime
 import logging
 
-from flask import current_app as app
+from superdesk.core import get_current_app, get_app_config
 from superdesk import etree as sd_etree, get_resource_service
 from superdesk.errors import ParserError
 from superdesk.io.registry import register_feed_parser
@@ -78,7 +78,7 @@ def parse(self, xml, provider=None):
     def parse_item(self, tree):
         # config is not accessible during __init__, so we check it here
         if self.__class__.missing_voc is None:
-            self.__class__.missing_voc = app.config.get("QCODE_MISSING_VOC", "continue")
+            self.__class__.missing_voc = get_app_config("QCODE_MISSING_VOC", "continue")
             if self.__class__.missing_voc not in ("reject", "create", "continue"):
                 logger.warning(
                     'Bad QCODE_MISSING_VOC value ({value}) using default ("continue")'.format(value=self.missing_voc)
@@ -220,6 +220,7 @@ def parse_meta_item_text(key, dest=None, elemTree=None):
     def parse_content_subject(self, tree, item):
         """Parse subj type subjects into subject list."""
         item["subject"] = []
+        app = get_current_app()
         for subject_elt in tree.findall(self.qname("subject")):
             qcode_parts = subject_elt.get("qcode", "").split(":")
             if len(qcode_parts) == 2 and qcode_parts[0] in self.SUBJ_QCODE_PREFIXES:
diff --git a/superdesk/io/feed_parsers/rfc822.py b/superdesk/io/feed_parsers/rfc822.py
index baaea1c6f6..0062d527ca 100644
--- a/superdesk/io/feed_parsers/rfc822.py
+++ b/superdesk/io/feed_parsers/rfc822.py
@@ -15,11 +15,11 @@
 import re
 from email.header import decode_header
 
-import eve
-from flask import current_app as app, json
 from pytz import timezone
 
 import superdesk
+from superdesk.resource_fields import ID_FIELD
+from superdesk.core import json, get_current_app, get_app_config
 from superdesk import get_resource_service
 from superdesk.errors import IngestEmailError
 from superdesk.io.registry import register_feed_parser
@@ -52,9 +52,6 @@ class EMailRFC822FeedParser(EmailFeedParser):
 
     label = "EMail RFC822 Parser"
 
-    def __init__(self):
-        self.parser_app = app
-
     def can_parse(self, email_message):
         for response_part in email_message:
             if isinstance(response_part, tuple):
@@ -83,6 +80,7 @@ def parse(self, data, provider=None):
 
             html_body = None
             text_body = None
+            app = get_current_app()
 
             for response_part in data:
                 if isinstance(response_part, tuple):
@@ -94,7 +92,7 @@ def parse(self, data, provider=None):
                         if email_regex.findall(field_from):
                             email_address = email_regex.findall(field_from)[0]
                             user = get_resource_service("users").get_user_by_email(email_address)
-                            item["original_creator"] = user[eve.utils.config.ID_FIELD]
+                            item["original_creator"] = user[ID_FIELD]
                     except UserNotRegisteredException:
                         pass
                     item["guid"] = msg["Message-ID"]
@@ -157,7 +155,7 @@ def parse(self, data, provider=None):
                             if content_type == "image/gif" or content_type == "image/png":
                                 continue
                             content.seek(0)
-                            image_id = self.parser_app.media.put(
+                            image_id = app.media.put(
                                 content, filename=fileName, content_type=content_type, metadata=metadata
                             )
 
@@ -312,6 +310,7 @@ def _parse_formatted_email(self, data, provider):
             item = dict()
             item[ITEM_TYPE] = CONTENT_TYPE.TEXT
             item["versioncreated"] = utcnow()
+            app = get_current_app()
             for response_part in data:
                 if isinstance(response_part, tuple):
                     msg = email.message_from_bytes(response_part[1])
@@ -346,7 +345,7 @@ def _parse_formatted_email(self, data, provider):
                             item["slugline"] = mail_item.get("Slugline", "")
                             item["body_html"] = "<p>" + mail_item.get("Body", "").replace("\n", "</p><p>") + "</p>"
 
-                            default_source = app.config.get("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
+                            default_source = get_app_config("DEFAULT_SOURCE_VALUE_FOR_MANUAL_ARTICLES")
                             city = mail_item.get("Dateline", "")
                             cities = app.locators.find_cities()
                             located = [c for c in cities if c["city"].lower() == city.lower()]
diff --git a/superdesk/io/feed_parsers/scoop_newsml_2_0.py b/superdesk/io/feed_parsers/scoop_newsml_2_0.py
index b8e667051b..4842224f47 100644
--- a/superdesk/io/feed_parsers/scoop_newsml_2_0.py
+++ b/superdesk/io/feed_parsers/scoop_newsml_2_0.py
@@ -11,13 +11,13 @@
 import pytz
 import datetime
 import superdesk
+from superdesk.core import get_current_app
 from .newsml_2_0 import NewsMLTwoFeedParser
 from superdesk.io.registry import register_feed_parser
 from superdesk.errors import ParserError
 from superdesk.metadata.item import ITEM_TYPE
 from superdesk.io.iptc import subject_codes
 from superdesk.text_utils import get_word_count
-from flask import current_app as app
 from dateutil.parser import parse as date_parser
 from superdesk.etree import parse_html, to_string
 
@@ -43,6 +43,7 @@ def parse(self, xml, provider=None):
         self.root = xml
         items = []
         try:
+            app = get_current_app()
             for item_set in xml.findall(self.qname("itemSet")):
                 for item_tree in item_set:
                     # Ignore the packageItem, it has no guid
diff --git a/superdesk/io/feeding_services/__init__.py b/superdesk/io/feeding_services/__init__.py
index 0b1052459f..b16a0d7466 100644
--- a/superdesk/io/feeding_services/__init__.py
+++ b/superdesk/io/feeding_services/__init__.py
@@ -13,8 +13,9 @@
 from abc import ABCMeta, abstractmethod
 from datetime import timedelta, datetime
 from pytz import utc
-from flask import current_app as app
-import superdesk
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError, SuperdeskIngestError
 from superdesk.io.registry import registered_feed_parsers, restricted_feeding_service_parsers
@@ -196,9 +197,7 @@ def close_provider(self, provider, error, force=False):
                 },
             }
 
-            get_resource_service("ingest_providers").system_update(
-                provider[superdesk.config.ID_FIELD], updates, provider
-            )
+            get_resource_service("ingest_providers").system_update(provider[ID_FIELD], updates, provider)
 
     def add_timestamps(self, item):
         warnings.warn("deprecated, use localize_timestamps", DeprecationWarning)
@@ -218,7 +217,7 @@ def is_latest_content(self, last_updated, provider_last_updated=None):
         if not provider_last_updated:
             provider_last_updated = utcnow() - timedelta(days=7)
 
-        return provider_last_updated - timedelta(minutes=app.config[OLD_CONTENT_MINUTES]) < last_updated
+        return provider_last_updated - timedelta(minutes=get_app_config(OLD_CONTENT_MINUTES)) < last_updated
 
     def is_old_content(self, last_updated):
         """Test if file is old so it wouldn't probably work in is_latest_content next time.
@@ -227,7 +226,7 @@ def is_old_content(self, last_updated):
 
         :param last_updated: file last updated datetime
         """
-        return last_updated < utcnow() - timedelta(minutes=app.config[OLD_CONTENT_MINUTES])
+        return last_updated < utcnow() - timedelta(minutes=get_app_config(OLD_CONTENT_MINUTES))
 
     def log_item_error(self, err, item, provider):
         """TODO: put item into provider error basket."""
diff --git a/superdesk/io/feeding_services/ap_media.py b/superdesk/io/feeding_services/ap_media.py
index 9163bb57fa..425062b74e 100644
--- a/superdesk/io/feeding_services/ap_media.py
+++ b/superdesk/io/feeding_services/ap_media.py
@@ -16,9 +16,9 @@
 from datetime import timedelta, datetime
 
 from lxml import etree
-from flask import current_app as app
 
 import superdesk
+from superdesk.core import get_app_config
 from superdesk.io.registry import register_feeding_service
 from superdesk.io.feeding_services.http_base_service import HTTPFeedingServiceBase
 from superdesk.errors import IngestApiError
@@ -223,7 +223,7 @@ def api_get(self, url):
     def get_request_kwargs(self) -> Dict[str, Any]:
         request_kwargs = dict(
             timeout=self.HTTP_TIMEOUT,
-            verify=app.config.get("AP_MEDIA_API_VERIFY_SSL", True),
+            verify=get_app_config("AP_MEDIA_API_VERIFY_SSL", True),
             allow_redirects=True,
         )
         try:
diff --git a/superdesk/io/feeding_services/email.py b/superdesk/io/feeding_services/email.py
index c54e5e8a83..0263593e8e 100644
--- a/superdesk/io/feeding_services/email.py
+++ b/superdesk/io/feeding_services/email.py
@@ -12,8 +12,9 @@
 import socket
 import imaplib
 
-from flask import current_app as app
 from flask_babel import lazy_gettext as l_
+
+from superdesk.core import get_app_config
 from superdesk.errors import IngestEmailError
 from superdesk.io.registry import register_feeding_service, register_feeding_service_parser
 from superdesk.io.feeding_services import FeedingService
@@ -79,7 +80,7 @@ def authenticate(self, provider: dict, config: dict) -> imaplib.IMAP4_SSL:
         server = config.get("server", "")
         port = int(config.get("port", 993))
         try:
-            socket.setdefaulttimeout(app.config.get("EMAIL_TIMEOUT", 10))
+            socket.setdefaulttimeout(get_app_config("EMAIL_TIMEOUT", 10))
             imap = imaplib.IMAP4_SSL(host=server, port=port)
         except (socket.gaierror, OSError) as e:
             raise IngestEmailError.emailHostError(exception=e, provider=provider)
diff --git a/superdesk/io/feeding_services/file_service.py b/superdesk/io/feeding_services/file_service.py
index b7e0c8c4d0..b2de062d2b 100644
--- a/superdesk/io/feeding_services/file_service.py
+++ b/superdesk/io/feeding_services/file_service.py
@@ -13,7 +13,8 @@
 import shutil
 from datetime import datetime
 from lxml import etree
-from flask import current_app as app
+
+from superdesk.core import get_current_app
 from superdesk.errors import IngestFileError, ParserError, ProviderError
 from superdesk.io.registry import register_feeding_service
 from superdesk.io.feed_parsers import XMLFeedParser
@@ -62,6 +63,7 @@ def _test(self, provider):
 
     def _update(self, provider, update):
         # check if deprecated FILE_INGEST_OLD_CONTENT_MINUTES setting is still used
+        app = get_current_app()
         if "FILE_INGEST_OLD_CONTENT_MINUTES" in app.config:
             deprecated_cont_min = app.config["FILE_INGEST_OLD_CONTENT_MINUTES"]
             cont_min = app.config[OLD_CONTENT_MINUTES]
diff --git a/superdesk/io/feeding_services/ftp.py b/superdesk/io/feeding_services/ftp.py
index 7872812a35..26fe5d2fcc 100644
--- a/superdesk/io/feeding_services/ftp.py
+++ b/superdesk/io/feeding_services/ftp.py
@@ -18,7 +18,7 @@
 from datetime import datetime
 from urllib.parse import urlparse
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.io.registry import register_feeding_service
 from superdesk.io.feed_parsers import XMLFeedParser
 from superdesk.utc import utc, utcnow
@@ -291,7 +291,7 @@ def _update(self, provider, update):
         config = provider.get("config", {})
         do_move = config.get("move", False)
         last_processed_file_modify = provider.get("private", {}).get("last_processed_file_modify")
-        limit = app.config.get("FTP_INGEST_FILES_LIST_LIMIT", 100)
+        limit = get_app_config("FTP_INGEST_FILES_LIST_LIMIT", 100)
         registered_parser = self.get_feed_parser(provider)
         allowed_ext = getattr(registered_parser, "ALLOWED_EXT", self.ALLOWED_EXT_DEFAULT)
         if config.get(ALLOWED_EXTENSIONS_CONFIG):
diff --git a/superdesk/io/feeding_services/http_service.py b/superdesk/io/feeding_services/http_service.py
index fdcbbfcf39..5bc46e4a4c 100644
--- a/superdesk/io/feeding_services/http_service.py
+++ b/superdesk/io/feeding_services/http_service.py
@@ -14,8 +14,7 @@
 import arrow
 import requests
 
-from eve.utils import config
-
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.errors import IngestApiError
 from superdesk.io.feeding_services import FeedingService
@@ -54,7 +53,7 @@ def _generate_token_and_update_provider(self, provider):
         """
         token = {"auth_token": self._generate_auth_token(provider), "created": utcnow()}
         get_resource_service("ingest_providers").system_update(
-            provider[config.ID_FIELD], updates={"tokens": token}, original=provider
+            provider[ID_FIELD], updates={"tokens": token}, original=provider
         )
         provider["tokens"] = token
         return token["auth_token"]
diff --git a/superdesk/io/format_document_for_preview.py b/superdesk/io/format_document_for_preview.py
index a54179fd34..5bf3e01432 100644
--- a/superdesk/io/format_document_for_preview.py
+++ b/superdesk/io/format_document_for_preview.py
@@ -1,4 +1,5 @@
-from flask import Blueprint, request, Response, current_app as app
+from superdesk.core import get_app_config
+from superdesk.flask import Blueprint, request, Response
 import superdesk
 from superdesk import get_resource_service
 from superdesk.publish.formatters import get_formatter
@@ -30,9 +31,9 @@ def format_document():
     formatted_docs = formatter.format(article=apply_schema(doc), subscriber=subscriber, codes=None)
 
     headers = {
-        "Access-Control-Allow-Origin": app.config["CLIENT_URL"],
+        "Access-Control-Allow-Origin": get_app_config("CLIENT_URL"),
         "Access-Control-Allow-Methods": "GET",
-        "Access-Control-Allow-Headers": ",".join(app.config["X_HEADERS"]),
+        "Access-Control-Allow-Headers": ",".join(get_app_config("X_HEADERS")),
         "Access-Control-Allow-Credentials": "true",
         "Cache-Control": "no-cache, no-store, must-revalidate",
     }
diff --git a/superdesk/io/ingest.py b/superdesk/io/ingest.py
index 0a9b7eed09..16d31816f0 100644
--- a/superdesk/io/ingest.py
+++ b/superdesk/io/ingest.py
@@ -8,16 +8,15 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import superdesk
-
+from superdesk.resource_fields import ID_FIELD
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.metadata.item import metadata_schema
 from superdesk.metadata.utils import extra_response_fields, item_url, aggregations, get_elastic_highlight_query
 from eve.methods.common import resolve_document_etag
 from superdesk import get_resource_service
-from eve.utils import config
-from flask import current_app as app
+
+from superdesk.core import get_app_config, get_current_app
 from apps.auth import get_user
 from superdesk.notification import push_notification
 from superdesk.privilege import GLOBAL_SEARCH_PRIVILEGE
@@ -62,8 +61,8 @@ def set_ingest_provider_sequence(self, item, provider):
         :param provider: ingest_provider object, used to build the key name of sequence
         """
         sequence_number = get_resource_service("sequences").get_next_sequence_number(
-            key_name="ingest_providers_{_id}".format(_id=provider[config.ID_FIELD]),
-            max_seq_number=app.config["MAX_VALUE_OF_INGEST_SEQUENCE"],
+            key_name="ingest_providers_{_id}".format(_id=provider[ID_FIELD]),
+            max_seq_number=get_app_config("MAX_VALUE_OF_INGEST_SEQUENCE"),
         )
         item["ingest_provider_sequence"] = str(sequence_number)
 
@@ -76,6 +75,7 @@ def on_deleted(self, docs):
             if not doc.get("archived") and rend.get("media")
         ]
 
+        app = get_current_app()
         for file_id in file_ids:
             app.media.delete(file_id)
 
@@ -92,7 +92,7 @@ def on_deleted(self, docs):
 
         user = get_user(required=True)
         if docs:
-            push_notification("item:deleted", item=str(docs[0].get(config.ID_FIELD)), user=str(user))
+            push_notification("item:deleted", item=str(docs[0].get(ID_FIELD)), user=str(user))
 
     def should_update(self, old_item, new_item, provider):
         return True
diff --git a/superdesk/io/ingest_provider_model.py b/superdesk/io/ingest_provider_model.py
index 93055dd956..180fecebcf 100644
--- a/superdesk/io/ingest_provider_model.py
+++ b/superdesk/io/ingest_provider_model.py
@@ -10,9 +10,9 @@
 
 import logging
 
-from eve.utils import config
-from flask import g, current_app as app
-
+from superdesk.resource_fields import ID_FIELD
+from superdesk.core import get_app_config
+from superdesk.flask import g
 import superdesk
 from superdesk import get_resource_service
 from superdesk.activity import (
@@ -74,7 +74,7 @@ def __init__(self, endpoint_name, app, service, endpoint_schema=None):
             "content_types": {"type": "list", "default": tuple(CONTENT_TYPE), "allowed": tuple(CONTENT_TYPE)},
             "allow_remove_ingested": {"type": "boolean", "default": False},
             "disable_item_updates": {"type": "boolean", "default": False},
-            "content_expiry": {"type": "integer", "default": app.config["INGEST_EXPIRY_MINUTES"]},
+            "content_expiry": {"type": "integer", "default": get_app_config("INGEST_EXPIRY_MINUTES")},
             "config": {
                 "type": "dict",
                 "schema": {},
@@ -183,7 +183,7 @@ def on_create(self, docs):
         for doc in docs:
             content_expiry = doc.get("content_expiry", 0)
             if content_expiry == 0:
-                doc["content_expiry"] = app.config["INGEST_EXPIRY_MINUTES"]
+                doc["content_expiry"] = get_app_config("INGEST_EXPIRY_MINUTES")
                 self._set_provider_status(doc, doc.get("last_closed", {}).get("message", ""))
             elif content_expiry < 0:
                 doc["content_expiry"] = None
@@ -210,7 +210,7 @@ def on_update(self, updates, original):
             content_expiry = None
         else:
             if content_expiry == 0:
-                content_expiry = app.config["INGEST_EXPIRY_MINUTES"]
+                content_expiry = get_app_config("INGEST_EXPIRY_MINUTES")
             elif content_expiry < 0:
                 content_expiry = None
             updates["content_expiry"] = content_expiry
@@ -281,12 +281,10 @@ def on_deleted(self, doc):
             item=None,
             user_list=self.user_service.get_users_by_user_type("administrator"),
             name=doc.get("name"),
-            provider_id=doc.get(config.ID_FIELD),
-        )
-        push_notification("ingest_provider:delete", provider_id=str(doc.get(config.ID_FIELD)))
-        get_resource_service("sequences").delete(
-            lookup={"key": "ingest_providers_{_id}".format(_id=doc[config.ID_FIELD])}
+            provider_id=doc.get(ID_FIELD),
         )
+        push_notification("ingest_provider:delete", provider_id=str(doc.get(ID_FIELD)))
+        get_resource_service("sequences").delete(lookup={"key": "ingest_providers_{_id}".format(_id=doc[ID_FIELD])})
         logger.info("Deleted Ingest Channel. Data:{}".format(doc))
 
     def _test_config(self, updates, original=None):
diff --git a/superdesk/io/iptc.py b/superdesk/io/iptc.py
index 8b9d813792..834fb014fe 100644
--- a/superdesk/io/iptc.py
+++ b/superdesk/io/iptc.py
@@ -11,7 +11,7 @@
 """IPTC module"""
 
 import os
-from superdesk import json
+from superdesk.core import json
 from datetime import datetime
 
 
diff --git a/superdesk/io/subjectcodes.py b/superdesk/io/subjectcodes.py
index 70f9b23045..d3da0c095a 100644
--- a/superdesk/io/subjectcodes.py
+++ b/superdesk/io/subjectcodes.py
@@ -11,9 +11,11 @@
 
 import re
 import superdesk
-from flask import Blueprint, current_app as app
 from datetime import datetime
 from eve.render import send_response
+
+from superdesk.core import get_current_app
+from superdesk.flask import Blueprint
 from superdesk.auth.decorator import blueprint_auth
 
 
@@ -75,12 +77,12 @@ def get_parent_subjectcode(code):
 def render_subjectcodes():
     items = get_subjectcodeitems()
     response_data = {"_items": items, "_meta": {"total": len(items)}}
-    return send_response(None, (response_data, app.subjects.last_modified, None, 200))
+    return send_response(None, (response_data, get_current_app().subjects.last_modified, None, 200))
 
 
 def get_subjectcodeitems():
     """Get subjects for current app."""
-    return app.subjects.get_items()
+    return get_current_app().subjects.get_items()
 
 
 def init_app(app) -> None:
diff --git a/superdesk/io/webhooks/__init__.py b/superdesk/io/webhooks/__init__.py
index 8beb452681..a606083b09 100644
--- a/superdesk/io/webhooks/__init__.py
+++ b/superdesk/io/webhooks/__init__.py
@@ -11,9 +11,9 @@
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.io.commands import update_ingest
-from flask import request, abort
 import os
 import superdesk
+from superdesk.flask import request, abort
 import logging
 
 logger = logging.getLogger(__name__)
diff --git a/superdesk/json_utils.py b/superdesk/json_utils.py
index 46463d20a4..b570230377 100644
--- a/superdesk/json_utils.py
+++ b/superdesk/json_utils.py
@@ -1,15 +1,15 @@
-from typing import Optional
 import arrow
 
 from arrow.parser import ParserError
-from flask import Flask, json
 from bson import ObjectId
 from bson.errors import InvalidId
 from eve.utils import str_to_date
 from eve.io.mongo import MongoJSONEncoder
 from eve_elastic import ElasticJSONSerializer
 from flask_babel import LazyString
-from flask.json.provider import DefaultJSONProvider
+
+from superdesk.core import json
+from superdesk.flask import Flask, DefaultJSONProvider
 
 
 class SuperdeskJSONEncoder(MongoJSONEncoder, ElasticJSONSerializer):
diff --git a/superdesk/locales.py b/superdesk/locales.py
index fe8eb93e17..2273e32d7b 100644
--- a/superdesk/locales.py
+++ b/superdesk/locales.py
@@ -1,18 +1,19 @@
 import pytz
-import flask
 import babel.dates as dates
 
+from superdesk.core import get_app_config
+from superdesk.flask import Blueprint, request
 from superdesk.auth.decorator import blueprint_auth
 from apps.auth import get_user
 from eve.render import send_response
 
 
-bp = flask.Blueprint("locales", __name__)
+bp = Blueprint("locales", __name__)
 
 
 def get_timezones():
     user = get_user()
-    lang = user.get("language", flask.current_app.config.get("DEFAULT_LANGUAGE", "en")).replace("-", "_")
+    lang = user.get("language", get_app_config("DEFAULT_LANGUAGE", "en")).replace("-", "_")
     return [
         {
             "id": tz,
@@ -27,7 +28,7 @@ def get_timezones():
 @blueprint_auth("locales")
 def locales_view():
     resp = None
-    if flask.request.method == "GET":
+    if request.method == "GET":
         resp = {"timezones": get_timezones()}
     return send_response(None, (resp, None, None, 200))
 
diff --git a/superdesk/locators/locators.py b/superdesk/locators/locators.py
index e6f1ff4534..d9efc654ad 100644
--- a/superdesk/locators/locators.py
+++ b/superdesk/locators/locators.py
@@ -10,7 +10,9 @@
 
 from operator import itemgetter
 from eve.render import send_response
-from flask import Blueprint, current_app as app
+
+from superdesk.core import get_current_app
+from superdesk.flask import Blueprint
 from superdesk.utc import utcnow
 from superdesk.auth.decorator import blueprint_auth
 
@@ -123,7 +125,7 @@ def get_cities(country_code=None, state_code=None):
         Returns HTTP Response with body {'_items': cities, '_meta': {'total': City Count}}.
     """
 
-    cities = app.locators.find_cities(country_code=country_code, state_code=state_code)
+    cities = get_current_app().locators.find_cities(country_code=country_code, state_code=state_code)
 
     if cities and len(cities):
         response_data = {"_items": cities, "_meta": {"total": len(cities)}}
diff --git a/superdesk/lock.py b/superdesk/lock.py
index df055d5ad7..83ce8e8254 100644
--- a/superdesk/lock.py
+++ b/superdesk/lock.py
@@ -3,22 +3,15 @@
 import socket
 import logging
 
-from datetime import datetime
 from pymongo import MongoClient
+from werkzeug.local import LocalProxy
 
 from superdesk.core.mongo import get_mongo_client_config
-from superdesk.mongolock import MongoLock, MongoLockException
-from werkzeug.local import LocalProxy
-from flask import current_app as app
-from superdesk.logging import logger
+from superdesk.core import get_current_app
 from superdesk.utc import utcnow
+from superdesk.mongolock import MongoLock, MongoLockException
 
 
-_lock_resource_settings = {
-    "internal_resource": True,
-    "versioning": False,
-}
-
 logger = logging.getLogger(__name__)
 
 
@@ -50,8 +43,7 @@ def _try_get_lock(self, key, owner, expire):
 
 def _get_lock():
     """Get mongolock instance using app mongodb."""
-
-    client_config, dbname = get_mongo_client_config(app.config)
+    client_config, dbname = get_mongo_client_config(get_current_app().config)
     client = MongoClient(**client_config)
     collection = client.get_database(dbname).get_collection("_lock")
     return SuperdeskMongoLock(collection=collection)
diff --git a/superdesk/macros/assign_status.py b/superdesk/macros/assign_status.py
index 3a56d0b85f..00a80bc13b 100644
--- a/superdesk/macros/assign_status.py
+++ b/superdesk/macros/assign_status.py
@@ -8,7 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from eve.utils import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from planning.common import ASSIGNMENT_WORKFLOW_STATE
 from copy import deepcopy
@@ -25,7 +25,7 @@ def update_on_assign_id(item, **kwargs):
         if assignment["assigned_to"]["state"] == ASSIGNMENT_WORKFLOW_STATE.ASSIGNED:
             updates = {"assigned_to": deepcopy(assignment.get("assigned_to"))}
             updates["assigned_to"]["state"] = ASSIGNMENT_WORKFLOW_STATE.IN_PROGRESS
-            assignments_service.patch(assignment[config.ID_FIELD], updates)
+            assignments_service.patch(assignment[ID_FIELD], updates)
     return item
 
 
diff --git a/superdesk/macros/internal_destination_auto_publish.py b/superdesk/macros/internal_destination_auto_publish.py
index 3f4296314a..fef0768f4a 100644
--- a/superdesk/macros/internal_destination_auto_publish.py
+++ b/superdesk/macros/internal_destination_auto_publish.py
@@ -10,7 +10,9 @@
 
 import json
 from copy import deepcopy
-from eve.utils import config, ParsedRequest
+from eve.utils import ParsedRequest
+
+from superdesk.resource_fields import ID_FIELD
 from superdesk import get_resource_service
 from superdesk.errors import StopDuplication, InvalidStateTransitionError
 from superdesk.metadata.item import (
@@ -46,7 +48,7 @@ def internal_destination_auto_publish(item, **kwargs):
     # if any macro is doing publishing then we need the duplicate item that was published earlier
     req = ParsedRequest()
     req.where = json.dumps(
-        {"$and": [{PROCESSED_FROM: item.get(config.ID_FIELD)}, {"task.desk": str(item.get("task").get("desk"))}]}
+        {"$and": [{PROCESSED_FROM: item.get(ID_FIELD)}, {"task.desk": str(item.get("task").get("desk"))}]}
     )
     req.max_results = 1
     overwrite_item = next((archive_service.get_from_mongo(req=req, lookup=None)), None)
@@ -56,7 +58,7 @@ def internal_destination_auto_publish(item, **kwargs):
     if item.get(ITEM_STATE) == CONTENT_STATE.PUBLISHED or not overwrite_item:
         new_id = archive_service.duplicate_content(item, state="routed", extra_fields=extra_fields)
         updates[ITEM_STATE] = item.get(ITEM_STATE)
-        updates[PROCESSED_FROM] = item[config.ID_FIELD]
+        updates[PROCESSED_FROM] = item[ID_FIELD]
 
         get_resource_service("archive_publish").patch(id=new_id, updates=updates)
     else:
@@ -75,7 +77,7 @@ def internal_destination_auto_publish(item, **kwargs):
                 }
             )
 
-            archive_action_service.patch(id=overwrite_item[config.ID_FIELD], updates=updates)
+            archive_action_service.patch(id=overwrite_item[ID_FIELD], updates=updates)
 
     # raise stop duplication on successful completion so that
     # internal destination superdesk.internal_destination.handle_item_published
diff --git a/superdesk/macros/set_default_template_metadata.py b/superdesk/macros/set_default_template_metadata.py
index f340a8759f..cef293bf41 100644
--- a/superdesk/macros/set_default_template_metadata.py
+++ b/superdesk/macros/set_default_template_metadata.py
@@ -1,6 +1,7 @@
 import logging
-from flask import current_app as app
+
 from flask_babel import lazy_gettext
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
 
 logger = logging.getLogger(__name__)
@@ -43,8 +44,8 @@ def get_default_content_template(item, **kwargs):
 
 
 def set_default_template_metadata(item, **kwargs):
-    fields_to_exclude = app.config.get("DEFAULT_TEMPLATE_METADATA_MACRO_EXCLUDE", [])
-    fields_to_override = app.config.get("DEFAULT_TEMPLATE_METADATA_MACRO_OVERRIDE", [])
+    fields_to_exclude = get_app_config("DEFAULT_TEMPLATE_METADATA_MACRO_EXCLUDE", [])
+    fields_to_override = get_app_config("DEFAULT_TEMPLATE_METADATA_MACRO_OVERRIDE", [])
 
     """Replace some metadata from default content template"""
 
diff --git a/superdesk/macros/validate_for_publish.py b/superdesk/macros/validate_for_publish.py
index ad1f193a40..1560ac7c02 100644
--- a/superdesk/macros/validate_for_publish.py
+++ b/superdesk/macros/validate_for_publish.py
@@ -8,7 +8,8 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from superdesk import get_resource_service, config
+from superdesk import get_resource_service
+from superdesk.resource_fields import ID_FIELD
 from superdesk.validation import ValidationError
 from apps.publish.content.common import ITEM_PUBLISH
 from flask_babel import lazy_gettext
@@ -16,7 +17,7 @@
 
 
 def validate_for_publish(item, **kwargs):
-    doc = get_resource_service("archive").find_one(req=None, _id=item[config.ID_FIELD])
+    doc = get_resource_service("archive").find_one(req=None, _id=item[ID_FIELD])
     validate_item = {"act": ITEM_PUBLISH, "type": doc["type"], "validate": doc}
     validation_errors = get_resource_service("validate").validate(validate_item)
     if validation_errors:
diff --git a/superdesk/media/crop.py b/superdesk/media/crop.py
index 3ce8b419cd..51adf3bedd 100644
--- a/superdesk/media/crop.py
+++ b/superdesk/media/crop.py
@@ -11,11 +11,12 @@
 
 from eve.utils import ParsedRequest
 
-import superdesk
 import logging
 from copy import deepcopy
-from flask import current_app as app
-from superdesk import get_resource_service, config
+
+from superdesk.core import get_current_app
+from superdesk.resource_fields import ID_FIELD
+from superdesk import get_resource_service
 from superdesk.errors import SuperdeskApiError
 from superdesk.media.media_operations import crop_image, process_file_from_stream
 from superdesk.upload import url_for_media
@@ -168,7 +169,7 @@ def create_crop(self, original_image, crop_name, crop_data):
         :raises SuperdeskApiError.badRequestError
         :return dict: rendition
         """
-        original_file = app.media.fetch_rendition(original_image)
+        original_file = get_current_app().media.fetch_rendition(original_image)
         if not original_file:
             raise SuperdeskApiError.badRequestError("Original file couldn't be found")
         try:
@@ -202,6 +203,7 @@ def _save_cropped_image(self, file_stream, original, doc):
         :raises SuperdeskApiError.internalError
         """
         crop = {}
+        app = get_current_app()
         try:
             file_name, content_type, metadata = process_file_from_stream(
                 file_stream, content_type=original.get("mimetype")
@@ -231,7 +233,7 @@ def _delete_crop_file(self, file_id):
         :param Object_id file_id: Object_Id of the file.
         """
         try:
-            app.media.delete(file_id)
+            get_current_app().media.delete(file_id)
         except Exception:
             logger.exception("Crop File cannot be deleted. File_Id {}".format(file_id))
 
@@ -336,7 +338,7 @@ def update_media_references(self, updates, original, published=False):
         :param dict original: Original item
         :param boolean published: True if publishing the item else False
         """
-        item_id = original.get(config.ID_FIELD)
+        item_id = original.get(ID_FIELD)
         references = {}
         if updates.get("renditions", original.get("renditions", {})):
             references = {item_id: updates.get("renditions", original.get("renditions", {}))}
@@ -347,7 +349,7 @@ def update_media_references(self, updates, original, published=False):
                 return
 
             references = {
-                assoc.get(config.ID_FIELD): assoc.get("renditions")
+                assoc.get(ID_FIELD): assoc.get("renditions")
                 for assoc in associations.values()
                 if assoc and assoc.get("renditions")
             }
@@ -387,7 +389,7 @@ def update_media_references(self, updates, original, published=False):
         refs = list(get_resource_service("media_references").get(req=req, lookup=None))
         for ref in refs:
             try:
-                get_resource_service("media_references").patch(ref.get(config.ID_FIELD), updates={"published": True})
+                get_resource_service("media_references").patch(ref.get(ID_FIELD), updates={"published": True})
             except Exception:
                 logger.exception(
                     "Failed to update media "
diff --git a/superdesk/media/image.py b/superdesk/media/image.py
index 4d9d9756f9..2041f5b269 100644
--- a/superdesk/media/image.py
+++ b/superdesk/media/image.py
@@ -18,8 +18,8 @@
 from PIL import Image, ExifTags
 from PIL import IptcImagePlugin
 from PIL.TiffImagePlugin import IFDRational
-from flask import json
 
+from superdesk.core import json
 from superdesk.types import Item
 from .iim_codes import TAG, iim_codes
 
diff --git a/superdesk/media/media_editor.py b/superdesk/media/media_editor.py
index b742128971..77eb5c5408 100644
--- a/superdesk/media/media_editor.py
+++ b/superdesk/media/media_editor.py
@@ -9,15 +9,16 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk.resource import Resource, not_analyzed
 from superdesk.services import BaseService
 from superdesk.media.renditions import generate_renditions, get_renditions_spec
 from superdesk import get_resource_service
 from superdesk import errors
-from flask import current_app
+from superdesk.core import get_current_app
+
 from PIL import Image, ImageEnhance
 from io import BytesIO
-from eve.utils import config
 import os.path
 import uuid
 import logging
@@ -89,6 +90,7 @@ def transform(self, im, operation):
 
     def create(self, docs):
         """Apply transformation requested in 'edit'"""
+        app = get_current_app()
         ids = []
         archive = get_resource_service("archive")
         for doc in docs:
@@ -100,7 +102,7 @@ def create(self, docs):
                 except KeyError:
                     raise errors.SuperdeskApiError.badRequestError("either item or item_id must be specified")
             else:
-                item_id = item[config.ID_FIELD]
+                item_id = item[ID_FIELD]
 
             if item is None and item_id:
                 item = next(archive.find({"_id": item_id}))
@@ -109,7 +111,7 @@ def create(self, docs):
             # now we retrieve and load current original media
             rendition = item["renditions"]["original"]
             media_id = rendition["media"]
-            media = current_app.media.get(media_id)
+            media = app.media.get(media_id)
             out = im = Image.open(media)
 
             # we apply all requested operations on original media
@@ -133,12 +135,12 @@ def create(self, docs):
             filename = str(uuid.uuid4()) + ext
 
             # and save transformed media in database
-            media_id = current_app.media.put(buf, filename=filename, content_type=content_type)
+            media_id = app.media.put(buf, filename=filename, content_type=content_type)
 
             # now we recreate other renditions based on transformed original media
             buf.seek(0)
             renditions = generate_renditions(
-                buf, media_id, [], "image", content_type, get_renditions_spec(), current_app.media.url_for_media
+                buf, media_id, [], "image", content_type, get_renditions_spec(), app.media.url_for_media
             )
 
             ids.append(item_id)
diff --git a/superdesk/media/media_operations.py b/superdesk/media/media_operations.py
index 9c5ad43604..63d472dd2e 100644
--- a/superdesk/media/media_operations.py
+++ b/superdesk/media/media_operations.py
@@ -22,11 +22,12 @@
 from bson import ObjectId
 from io import BytesIO
 from PIL import Image, ImageEnhance
-from flask import json, url_for
 from .image import get_meta
 from .video import get_meta as video_meta
+
+from superdesk.core import json, get_app_config
+from superdesk.flask import url_for
 from superdesk.errors import SuperdeskApiError
-from flask import current_app as app
 from mimetypes import guess_extension
 from superdesk import __version__ as superdesk_version
 
@@ -227,11 +228,11 @@ def get_watermark(image):
     :return: watermarked image
     """
     image = image.copy()
-    if not app.config.get("WATERMARK_IMAGE"):
+    if not get_app_config("WATERMARK_IMAGE"):
         return image
     if image.mode != "RGBA":
         image = image.convert("RGBA")
-    path = os.path.join(app.config["ABS_PATH"], app.config["WATERMARK_IMAGE"])
+    path = os.path.join(get_app_config("ABS_PATH"), get_app_config("WATERMARK_IMAGE"))
     if not os.path.isfile(path):
         logger.warning("No water mark file found at : {}".format(path))
         return image
diff --git a/superdesk/media/renditions.py b/superdesk/media/renditions.py
index 1bb878af26..fa69c8b57f 100644
--- a/superdesk/media/renditions.py
+++ b/superdesk/media/renditions.py
@@ -14,14 +14,14 @@
 from io import BytesIO
 import logging
 from copy import deepcopy
-from flask import current_app as app
+
+from superdesk.core import get_current_app, get_app_config
 from .media_operations import process_file_from_stream
 from .media_operations import crop_image
 from .media_operations import download_file_from_url
 from .media_operations import process_file
 from .media_operations import guess_media_extension
 from .image import fix_orientation
-from eve.utils import config
 from superdesk import get_resource_service
 from superdesk.filemeta import set_filemeta
 import os
@@ -55,6 +55,7 @@ def generate_renditions(
                            files in "temp" folder will be removed after 24 hours
     :return: dict of renditions
     """
+    app = get_current_app()
     rend = {"href": app.media.url_for_media(media_id, content_type), "media": media_id, "mimetype": content_type}
     renditions = {"original": rend}
 
@@ -74,7 +75,7 @@ def generate_renditions(
         if rendition not in rendition_config:
             continue
         if not can_generate_custom_crop_from_original(width, height, crop):
-            if rendition in config.RENDITIONS["picture"]:
+            if rendition in get_app_config("RENDITIONS", {})["picture"]:
                 logger.info(
                     'image is too small for rendition "{rendition}", but it is an internal one, '
                     "so we keep it".format(rendition=rendition)
@@ -98,7 +99,9 @@ def generate_renditions(
         # reset
         base.seek(0)
         # create the rendition (can be based on ratio or pixels)
-        if rsize.get("width") and rsize.get("height") and rendition not in config.RENDITIONS["picture"]:  # custom crop
+        if (
+            rsize.get("width") and rsize.get("height") and rendition not in get_app_config("RENDITIONS", {})["picture"]
+        ):  # custom crop
             resized, width, height, cropping_data = _crop_image_center(base, ext, rsize["width"], rsize["height"])
             # we need crop data for original size image
             cropping_data = _get_center_crop(original, rsize["width"], rsize["height"])
@@ -171,7 +174,7 @@ def delete_file_on_error(doc, file_id):
     # Don't delete the file if we are on the import from storage flow
     if doc.get("_import", None):
         return
-    app.media.delete(file_id)
+    get_current_app().media.delete(file_id)
 
 
 def _crop_image(content, format, ratio):
@@ -332,7 +335,7 @@ def get_renditions_spec(without_internal_renditions=False, no_custom_crops=False
     rendition_spec = {}
     # renditions required by superdesk
     if not without_internal_renditions:
-        rendition_spec = deepcopy(config.RENDITIONS["picture"])
+        rendition_spec = deepcopy(get_app_config("RENDITIONS", {})["picture"])
 
     if not no_custom_crops:
         # load custom renditions sizes
@@ -373,6 +376,7 @@ def update_renditions(
     :return: item with renditions
     """
     inserted = []
+    app = get_current_app()
     try:
         # If there is an existing set of renditions we keep those
         if old_item:
@@ -421,6 +425,7 @@ def transfer_renditions(
     """
     if not renditions:
         return
+    app = get_current_app()
     for rend in iter(renditions.values()):
         if rend.get("media"):
             local = app.media.get(rend["media"])
diff --git a/superdesk/media/video.py b/superdesk/media/video.py
index 238728527a..3ce06e8e66 100644
--- a/superdesk/media/video.py
+++ b/superdesk/media/video.py
@@ -11,8 +11,8 @@
 from hachoir.stream import InputIOStream
 from hachoir.parser import guessParser
 from hachoir.metadata import extractMetadata
-from flask import json
 import logging
+from superdesk.core import json
 
 
 logger = logging.getLogger(__name__)
diff --git a/superdesk/media/video_editor.py b/superdesk/media/video_editor.py
index 5a21b9ec7c..92718cef72 100644
--- a/superdesk/media/video_editor.py
+++ b/superdesk/media/video_editor.py
@@ -3,8 +3,8 @@
 
 import requests
 from bson import json_util
-from flask import current_app as app
 
+from superdesk.core import get_app_config
 from superdesk.errors import SuperdeskApiError
 
 logger = logging.getLogger(__name__)
@@ -20,7 +20,7 @@ def __init__(self):
         self.session.headers.update({"User-Agent": "superdesk"})
 
     def get_base_url(self):
-        return app.config.get("VIDEO_SERVER_URL") + "/projects/"
+        return get_app_config("VIDEO_SERVER_URL") + "/projects/"
 
     def create(self, file):
         return self._post(file)
diff --git a/superdesk/metadata/item.py b/superdesk/metadata/item.py
index 3713404fe3..4647bb54d5 100644
--- a/superdesk/metadata/item.py
+++ b/superdesk/metadata/item.py
@@ -11,6 +11,7 @@
 from typing import NamedTuple
 from copy import deepcopy
 
+from superdesk.resource_fields import ID_FIELD
 from superdesk.resource import (
     Resource,
     not_analyzed,
@@ -21,7 +22,6 @@
     string_with_analyzer,
 )
 from .packages import LINKED_IN_PACKAGES, PACKAGE
-from eve.utils import config
 from superdesk.utils import SuperdeskBaseEnum
 
 GUID_TAG = "tag"
@@ -163,7 +163,7 @@ class Formats(NamedTuple):
 }
 
 metadata_schema = {
-    config.ID_FIELD: {"type": "string", "unique": True},
+    ID_FIELD: {"type": "string", "unique": True},
     #: Identifiers
     "guid": {"type": "string", "unique": True, "mapping": not_analyzed},
     "uri": {
diff --git a/superdesk/metadata/utils.py b/superdesk/metadata/utils.py
index 6f4f93bfb1..718147ca3d 100644
--- a/superdesk/metadata/utils.py
+++ b/superdesk/metadata/utils.py
@@ -13,9 +13,9 @@
 from uuid import uuid4
 from bson import ObjectId
 from urllib.parse import urlparse
-from flask import current_app as app
 from contextlib import contextmanager
 
+from superdesk.core import get_app_config
 from superdesk.utils import SuperdeskBaseEnum
 from .item import GUID_TAG, GUID_NEWSML, GUID_FIELD, ITEM_TYPE, CONTENT_TYPE
 
@@ -98,8 +98,8 @@ def get_elastic_highlight_query(query_string):
 def _set_highlight_query(source):
     query_string = source.get("query", {}).get("filtered", {}).get("query", {}).get("query_string")
     if query_string:
-        query_string.setdefault("analyze_wildcard", app.config["ELASTIC_QUERY_STRING_ANALYZE_WILDCARD"])
-        query_string.setdefault("type", app.config["ELASTIC_QUERY_STRING_TYPE"])
+        query_string.setdefault("analyze_wildcard", get_app_config("ELASTIC_QUERY_STRING_ANALYZE_WILDCARD"))
+        query_string.setdefault("type", get_app_config("ELASTIC_QUERY_STRING_TYPE"))
         highlight_query = get_elastic_highlight_query(query_string)
         if highlight_query:
             source["highlight"] = highlight_query
@@ -116,16 +116,16 @@ def generate_guid(**hints):
     if not hints.get("id"):
         hints["id"] = str(uuid4())
 
-    if app.config.get("GENERATE_SHORT_GUID", False):
+    if get_app_config("GENERATE_SHORT_GUID", False):
         return hints["id"]
 
     t = datetime.today()
 
     if hints["type"].lower() == GUID_TAG:
-        return tag_guid_format % {"domain": app.config["URN_DOMAIN"], "year": t.year, "identifier": hints["id"]}
+        return tag_guid_format % {"domain": get_app_config("URN_DOMAIN"), "year": t.year, "identifier": hints["id"]}
     elif hints["type"].lower() == GUID_NEWSML:
         return newsml_guid_format % {
-            "domain": app.config["URN_DOMAIN"],
+            "domain": get_app_config("URN_DOMAIN"),
             "timestamp": t.isoformat(),
             "identifier": hints["id"],
         }
@@ -133,7 +133,7 @@ def generate_guid(**hints):
 
 
 def generate_urn(resource_name: str, resource_id: Union[ObjectId, str]) -> str:
-    domain = app.config["URN_DOMAIN"]
+    domain = get_app_config("URN_DOMAIN")
     return f"urn:{domain}:{resource_name}:{resource_id}"
 
 
diff --git a/superdesk/notification.py b/superdesk/notification.py
index b819213f21..049365e77b 100644
--- a/superdesk/notification.py
+++ b/superdesk/notification.py
@@ -16,7 +16,7 @@
 import json
 
 from datetime import datetime
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.utils import json_serialize_datetime_objectId
 from superdesk.websockets_comms import SocketMessageProducer
 from superdesk.types import WebsocketMessageData, WebsocketMessageFilterConditions
@@ -60,6 +60,7 @@ def push_notification(name, filters: Optional[WebsocketMessageFilterConditions]
     :param filters: filter out websocket consumers by certain conditions
     """
     logger.debug("pushing event {0} ({1})".format(name, json.dumps(kwargs, default=json_serialize_datetime_objectId)))
+    app = get_current_app()
 
     if not getattr(app, "notification_client", None):
         # not initialized - ignore
diff --git a/superdesk/places/places_autocomplete.py b/superdesk/places/places_autocomplete.py
index 19b6984220..89961c75b7 100644
--- a/superdesk/places/places_autocomplete.py
+++ b/superdesk/places/places_autocomplete.py
@@ -1,6 +1,6 @@
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.utils import ListCursor
 from superdesk.geonames import geonames_request, format_geoname_item
 
@@ -37,13 +37,13 @@ def get(self, req, lookup):
         params = [
             ("name", req.args.get("name")),
             ("lang", req.args.get("lang", "en").split("-")[0]),
-            ("style", req.args.get("style", app.config["GEONAMES_SEARCH_STYLE"])),
+            ("style", req.args.get("style", get_app_config("GEONAMES_SEARCH_STYLE"))),
         ]
 
         if req.args.get("featureClass"):
             params.append(("featureClass", req.args.get("featureClass")))
         else:
-            for feature_class in app.config["GEONAMES_FEATURE_CLASSES"]:
+            for feature_class in get_app_config("GEONAMES_FEATURE_CLASSES"):
                 params.append(("featureClass", feature_class.upper()))
 
         json_data = geonames_request("search", params)
@@ -55,7 +55,7 @@ def get_place(self, geoname_id, language="en"):
         params = [
             ("geonameId", geoname_id),
             ("lang", language),
-            ("style", app.config.get("GEONAMES_SEARCH_STYLE", "full")),
+            ("style", get_app_config("GEONAMES_SEARCH_STYLE", "full")),
         ]
 
         json_data = geonames_request("getJSON", params)
diff --git a/superdesk/profiling/__init__.py b/superdesk/profiling/__init__.py
index e24cb70d04..31c48a4186 100644
--- a/superdesk/profiling/__init__.py
+++ b/superdesk/profiling/__init__.py
@@ -16,9 +16,7 @@
 from flask_babel import lazy_gettext
 from superdesk import get_resource_service
 import superdesk
-
-from flask import current_app as app
-
+from superdesk.core import get_app_config
 from superdesk.profiling.resource import ProfilingResource
 from superdesk.profiling.service import ProfilingService, profile
 
@@ -46,11 +44,11 @@ def __init__(self, name):
         self.name = name
 
     def __enter__(self):
-        if app.config.get("ENABLE_PROFILING"):
+        if get_app_config("ENABLE_PROFILING"):
             profile.enable()
 
     def __exit__(self, exc_type, exc_value, traceback):
-        if app.config.get("ENABLE_PROFILING"):
+        if get_app_config("ENABLE_PROFILING"):
             profile.disable()
             dump_stats(profile, self.name)
 
diff --git a/superdesk/profiling/service.py b/superdesk/profiling/service.py
index d181f5bc93..3f0b12f332 100644
--- a/superdesk/profiling/service.py
+++ b/superdesk/profiling/service.py
@@ -10,11 +10,11 @@
 
 import cProfile
 import logging
+
+from superdesk.resource_fields import ID_FIELD
 from superdesk.errors import SuperdeskApiError
 from superdesk.services import BaseService
 
-from eve.utils import config
-
 
 logger = logging.getLogger(__name__)
 
@@ -49,7 +49,7 @@ class ProfilingService(BaseService):
 
     def on_create(self, docs):
         for doc in docs:
-            doc[config.ID_FIELD] = doc["name"]
+            doc[ID_FIELD] = doc["name"]
 
     def delete(self, lookup):
         """
diff --git a/superdesk/publish/formatters/email_formatter.py b/superdesk/publish/formatters/email_formatter.py
index a6981a0f07..62f05f5b5c 100644
--- a/superdesk/publish/formatters/email_formatter.py
+++ b/superdesk/publish/formatters/email_formatter.py
@@ -13,8 +13,8 @@
 
 from superdesk.publish.formatters import Formatter
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, FORMAT, FORMATS
-from flask import render_template
 from copy import deepcopy
+from superdesk.flask import render_template
 from superdesk.errors import FormatterError
 from superdesk import etree as sd_etree
 from superdesk.editor_utils import remove_all_embeds
diff --git a/superdesk/publish/formatters/newsml_1_2_formatter.py b/superdesk/publish/formatters/newsml_1_2_formatter.py
index 5fe61604d5..c0bdc878a4 100644
--- a/superdesk/publish/formatters/newsml_1_2_formatter.py
+++ b/superdesk/publish/formatters/newsml_1_2_formatter.py
@@ -12,7 +12,8 @@
 import logging
 from lxml import etree
 from lxml.etree import SubElement
-from eve.utils import config
+
+from superdesk.resource_fields import ID_FIELD, VERSION
 from superdesk.publish.formatters import Formatter
 import superdesk
 from superdesk.errors import FormatterError
@@ -20,7 +21,7 @@
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, EMBARGO, ITEM_STATE, CONTENT_STATE, GUID_FIELD
 from superdesk.metadata.packages import GROUP_ID, REFS, RESIDREF, ROLE, ROOT_GROUP
 from superdesk.utc import utcnow
-from flask import current_app as app
+from superdesk.core import get_app_config
 from apps.archive.common import get_utc_schedule
 from superdesk.filemeta import get_filemeta
 
@@ -100,12 +101,12 @@ def _format_identification(self, article, news_item):
         identification = SubElement(news_item, "Identification")
         news_identifier = SubElement(identification, "NewsIdentifier")
         date_id = article.get("firstcreated").strftime("%Y%m%d")
-        SubElement(news_identifier, "ProviderId").text = app.config["NEWSML_PROVIDER_ID"]
+        SubElement(news_identifier, "ProviderId").text = get_app_config("NEWSML_PROVIDER_ID")
         SubElement(news_identifier, "DateId").text = date_id
         SubElement(news_identifier, "NewsItemId").text = article[GUID_FIELD]
-        SubElement(news_identifier, "RevisionId", attrib=revision).text = str(article.get(config.VERSION, ""))
+        SubElement(news_identifier, "RevisionId", attrib=revision).text = str(article.get(VERSION, ""))
         SubElement(news_identifier, "PublicIdentifier").text = self._generate_public_identifier(
-            article[config.ID_FIELD], article.get(config.VERSION, ""), revision.get("Update", "")
+            article[ID_FIELD], article.get(VERSION, ""), revision.get("Update", "")
         )
         SubElement(identification, "DateLabel").text = self.now.strftime("%A %d %B %Y")
 
@@ -129,7 +130,7 @@ def _process_revision(self, article):
         """
         revision = {"PreviousRevision": "0", "Update": "N"}
         if article.get(ITEM_STATE) in {CONTENT_STATE.CORRECTED, CONTENT_STATE.KILLED, CONTENT_STATE.RECALLED}:
-            revision["PreviousRevision"] = str(article.get(config.VERSION) - 1)
+            revision["PreviousRevision"] = str(article.get(VERSION) - 1)
         return revision
 
     def _format_news_management(self, article, news_item):
@@ -407,7 +408,7 @@ def _format_package(self, article, main_news_component):
                 if RESIDREF in ref:
                     revision = self._process_revision({})
                     item_ref = self._generate_public_identifier(
-                        ref.get(RESIDREF), ref.get(config.VERSION), revision.get("Update", "")
+                        ref.get(RESIDREF), ref.get(VERSION), revision.get("Update", "")
                     )
                     SubElement(sub_news_component, "NewsItemRef", attrib={"NewsItem": item_ref})
 
diff --git a/superdesk/publish/formatters/newsml_g2_formatter.py b/superdesk/publish/formatters/newsml_g2_formatter.py
index 1ed5159eb2..26688b777a 100644
--- a/superdesk/publish/formatters/newsml_g2_formatter.py
+++ b/superdesk/publish/formatters/newsml_g2_formatter.py
@@ -14,8 +14,9 @@
 
 from lxml import etree
 from lxml.etree import SubElement
-from flask import current_app as app
 
+from superdesk.core import get_app_config
+from superdesk.resource_fields import VERSION
 from superdesk import text_utils
 from superdesk.publish.formatters import Formatter
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, EMBARGO, FORMATS, FORMAT
@@ -32,7 +33,7 @@
 
 
 def get_newsml_provider_id():
-    return app.config.get("NEWSML_PROVIDER_ID")
+    return get_app_config("NEWSML_PROVIDER_ID")
 
 
 def _get_cv_qcode(item):
@@ -147,7 +148,7 @@ def _format_item_set(self, article, item_set, item_type):
                 "standard": "NewsML-G2",
                 "standardversion": "2.18",
                 "guid": article["guid"],
-                "version": str(article[superdesk.config.VERSION]),
+                "version": str(article[VERSION]),
                 XML_LANG: self._get_lang(article),
                 "conformance": "power",
             },
@@ -595,11 +596,11 @@ def _get_translated_name(self, subject, article):
             return translations["name"][lang], lang
         except KeyError:
             pass
-        return subject.get("name", ""), app.config["DEFAULT_LANGUAGE"]
+        return subject.get("name", ""), get_app_config("DEFAULT_LANGUAGE")
 
     def _format_translated_name(self, dest, subject, article):
         name, lang = self._get_translated_name(subject, article)
         SubElement(dest, "name", attrib={XML_LANG: lang}).text = name
 
     def _get_lang(self, article):
-        return article.get("language", app.config["DEFAULT_LANGUAGE"])
+        return article.get("language", get_app_config("DEFAULT_LANGUAGE"))
diff --git a/superdesk/publish/formatters/ninjs_formatter.py b/superdesk/publish/formatters/ninjs_formatter.py
index 190c93c046..99e1ffd873 100644
--- a/superdesk/publish/formatters/ninjs_formatter.py
+++ b/superdesk/publish/formatters/ninjs_formatter.py
@@ -35,8 +35,9 @@
 import logging
 
 from typing import List, Literal, Sequence, Tuple, TypedDict
-from flask import current_app as app
-from eve.utils import config
+
+from superdesk.core import get_app_config
+from superdesk.resource_fields import VERSION
 from superdesk.publish.formatters import Formatter
 from superdesk.errors import FormatterError
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, EMBARGO, GUID_FIELD, ASSOCIATIONS
@@ -169,7 +170,7 @@ class NINJSFormatter(Formatter):
     def __init__(self):
         self.can_preview = True
         self.can_export = True
-        self.internal_renditions = app.config.get("NINJS_COMMON_RENDITIONS", []) + ["original"]
+        self.internal_renditions = get_app_config("NINJS_COMMON_RENDITIONS", []) + ["original"]
 
     def format(self, article, subscriber, codes=None):
         try:
@@ -183,7 +184,7 @@ def format(self, article, subscriber, codes=None):
     def _transform_to_ninjs(self, article, subscriber, recursive=True):
         ninjs = {
             "guid": article.get(GUID_FIELD, article.get("uri")),
-            "version": str(article.get(config.VERSION, 1)),
+            "version": str(article.get(VERSION, 1)),
             "type": self._get_type(article),
         }
 
@@ -503,7 +504,7 @@ def get_label(item):
             if item.get("scheme") == "geonames":
                 places.append(self._format_geonames(item))
             else:
-                if config.NINJS_PLACE_EXTENDED:
+                if get_app_config("NINJS_PLACE_EXTENDED"):
                     place = {}
                     for key in item.keys():
                         if item.get(key):
@@ -523,7 +524,7 @@ def get_label(item):
 
     def _format_geonames(self, place):
         fields = ["scheme", "code", "name"]
-        if app.config.get("NINJS_PLACE_EXTENDED"):
+        if get_app_config("NINJS_PLACE_EXTENDED"):
             fields.extend(
                 [
                     "state",
@@ -533,7 +534,7 @@ def _format_geonames(self, place):
                 ]
             )
         geo = {k: v for k, v in place.items() if k in fields}
-        if app.config.get("NINJS_PLACE_EXTENDED") and place.get("location"):
+        if get_app_config("NINJS_PLACE_EXTENDED") and place.get("location"):
             geo["geometry_point"] = {
                 "type": "Point",
                 "coordinates": [place["location"].get("lat"), place["location"].get("lon")],
diff --git a/superdesk/publish/formatters/ninjs_ftp_formatter.py b/superdesk/publish/formatters/ninjs_ftp_formatter.py
index 256f367357..3538523b7e 100644
--- a/superdesk/publish/formatters/ninjs_ftp_formatter.py
+++ b/superdesk/publish/formatters/ninjs_ftp_formatter.py
@@ -10,7 +10,7 @@
 
 
 from .ninjs_formatter import NINJSFormatter
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.media.renditions import get_rendition_file_name
 from superdesk import get_resource_service
 from superdesk.editor_utils import get_content_state_fields, Editor3Content, DraftJSHTMLExporter
@@ -72,7 +72,7 @@ def apply_product_filtering_to_associations(self, article, subscriber):
         :param subscriber:
         :return:
         """
-        if not app.config["EMBED_PRODUCT_FILTERING"]:
+        if not get_app_config("EMBED_PRODUCT_FILTERING"):
             return
 
         remove_keys = []
diff --git a/superdesk/publish/formatters/ninjs_newsroom_formatter.py b/superdesk/publish/formatters/ninjs_newsroom_formatter.py
index 9ba1afca5a..b62e3ad49f 100644
--- a/superdesk/publish/formatters/ninjs_newsroom_formatter.py
+++ b/superdesk/publish/formatters/ninjs_newsroom_formatter.py
@@ -9,8 +9,8 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 
-import flask
 import superdesk
+from superdesk.flask import g
 import elasticapm
 
 from .ninjs_formatter import NINJSFormatter
@@ -34,14 +34,14 @@ def _format_products(self, article):
         :return:
         """
         cache_id = "article-products-{_id}".format(_id=article.get("_id") or article.get("guid"))
-        if not hasattr(flask.g, cache_id):
+        if not hasattr(g, cache_id):
             matches = superdesk.get_resource_service("product_tests").test_products(article)
             setattr(
-                flask.g,
+                g,
                 cache_id,
                 [{"code": p["product_id"], "name": p.get("name")} for p in matches if p.get("matched", False)],
             )
-        return getattr(flask.g, cache_id)
+        return getattr(g, cache_id)
 
     @elasticapm.capture_span()
     def _transform_to_ninjs(self, article, subscriber, recursive=True):
diff --git a/superdesk/publish/formatters/nitf_formatter.py b/superdesk/publish/formatters/nitf_formatter.py
index 99fc770181..f42cbc9a7e 100644
--- a/superdesk/publish/formatters/nitf_formatter.py
+++ b/superdesk/publish/formatters/nitf_formatter.py
@@ -11,7 +11,7 @@
 import superdesk
 from lxml import etree as etree
 from lxml.etree import SubElement
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.publish.formatters import Formatter
 from superdesk.errors import FormatterError
 from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, EMBARGO, FORMAT, FORMATS
@@ -151,7 +151,7 @@ def format(self, article, subscriber, codes=None):
             raise FormatterError.nitfFormatterError(ex, subscriber)
 
     def get_nitf(self, article, destination, pub_seq_num):
-        if app.config.get("NITF_INCLUDE_SCHEMA", False):
+        if get_app_config("NITF_INCLUDE_SCHEMA", False):
             self._message_attrib.update(self._debug_message_extra)
         nitf = etree.Element("nitf", attrib=self._message_attrib)
         head = SubElement(nitf, "head")
diff --git a/superdesk/publish/publish_content.py b/superdesk/publish/publish_content.py
index 0f0f10e35c..2f6b16442f 100644
--- a/superdesk/publish/publish_content.py
+++ b/superdesk/publish/publish_content.py
@@ -13,8 +13,10 @@
 import superdesk.publish
 
 from datetime import timedelta
-from eve.utils import config, ParsedRequest
-from flask import current_app as app
+from eve.utils import ParsedRequest
+
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED
+from superdesk.core import get_app_config, get_current_app
 from superdesk import get_resource_service
 from superdesk.celery_task_utils import get_lock_id
 from superdesk.errors import PublishHTTPPushClientError
@@ -94,6 +96,7 @@ def _get_queue_lookup(retries=False, priority=None):
 
 def get_queue_subscribers(retries=False, priority=None):
     lookup = _get_queue_lookup(retries, priority)
+    app = get_current_app()
     return app.data.mongo.pymongo(resource=PUBLISH_QUEUE).db[PUBLISH_QUEUE].distinct("subscriber_id", lookup)
 
 
@@ -102,14 +105,14 @@ def get_queue_items(retries=False, subscriber_id=None, priority=None):
     if subscriber_id:
         lookup["$and"].append({"subscriber_id": subscriber_id})
     request = ParsedRequest()
-    request.max_results = app.config.get("MAX_TRANSMIT_QUERY_LIMIT", 100)  # limit per subscriber now
+    request.max_results = get_app_config("MAX_TRANSMIT_QUERY_LIMIT", 100)  # limit per subscriber now
     request.sort = '[("_created", 1), ("published_seq_num", 1)]'
     return get_resource_service(PUBLISH_QUEUE).get_from_mongo(req=request, lookup=lookup)
 
 
 def _get_queue(priority=None):
-    if priority and app.config.get("HIGH_PRIORITY_QUEUE_ENABLED"):
-        return app.config["HIGH_PRIORITY_QUEUE"]
+    if priority and get_app_config("HIGH_PRIORITY_QUEUE_ENABLED"):
+        return get_app_config("HIGH_PRIORITY_QUEUE")
 
 
 @celery.task(soft_time_limit=600, expires=10)
@@ -124,7 +127,7 @@ def transmit_subscriber_items(subscriber, retries=False, priority=None):
     try:
         queue_items = get_queue_items(retries, subscriber, priority)
         for queue_item in queue_items:
-            args = [queue_item[config.ID_FIELD]]
+            args = [queue_item[ID_FIELD]]
             kwargs = {"is_async": is_async}
             if is_async:
                 transmit_item.apply_async(
@@ -154,7 +157,7 @@ def transmit_item(queue_item_id, is_async=False):
         if queue_item.get("state") not in [QueueState.PENDING.value, QueueState.RETRYING.value]:
             logger.info(
                 "Transmit State is not pending/retrying for queue item: {}. It is in {}".format(
-                    queue_item.get(config.ID_FIELD), queue_item.get("state")
+                    queue_item.get(ID_FIELD), queue_item.get("state")
                 )
             )
             return
@@ -166,7 +169,7 @@ def transmit_item(queue_item_id, is_async=False):
 
         # update the status of the item to in-progress
         queue_update = {"state": "in-progress", "transmit_started_at": utcnow()}
-        publish_queue_service.patch(queue_item.get(config.ID_FIELD), queue_update)
+        publish_queue_service.patch(queue_item.get(ID_FIELD), queue_update)
         logger.info("Transmitting queue item {}".format(log_msg))
 
         destination = queue_item["destination"]
@@ -181,12 +184,12 @@ def transmit_item(queue_item_id, is_async=False):
     except Exception as e:
         logger.exception("Failed to transmit queue item {}".format(log_msg))
 
-        max_retry_attempt = app.config.get("MAX_TRANSMIT_RETRY_ATTEMPT")
-        retry_attempt_delay = app.config.get("TRANSMIT_RETRY_ATTEMPT_DELAY_MINUTES")
+        max_retry_attempt = get_app_config("MAX_TRANSMIT_RETRY_ATTEMPT")
+        retry_attempt_delay = get_app_config("TRANSMIT_RETRY_ATTEMPT_DELAY_MINUTES")
         try:
             orig_item = publish_queue_service.find_one(req=None, _id=queue_item["_id"])
             timeout = 2 ** min(6, orig_item.get("retry_attempt", retry_attempt_delay))
-            updates = {config.LAST_UPDATED: utcnow()}
+            updates = {LAST_UPDATED: utcnow()}
 
             if orig_item.get("retry_attempt", 0) < max_retry_attempt and not isinstance(e, PublishHTTPPushClientError):
                 updates["retry_attempt"] = orig_item.get("retry_attempt", 0) + 1
@@ -196,7 +199,7 @@ def transmit_item(queue_item_id, is_async=False):
                 # all retry attempts exhausted marking the item as failed.
                 updates["state"] = QueueState.FAILED.value
 
-            publish_queue_service.system_update(orig_item.get(config.ID_FIELD), updates, orig_item)
+            publish_queue_service.system_update(orig_item.get(ID_FIELD), updates, orig_item)
         except Exception:
             logger.error("Failed to set the state for failed publish queue item {}.".format(queue_item["_id"]))
 
diff --git a/superdesk/publish/publish_queue.py b/superdesk/publish/publish_queue.py
index b88b8ad9b1..e550b6dfe9 100644
--- a/superdesk/publish/publish_queue.py
+++ b/superdesk/publish/publish_queue.py
@@ -11,12 +11,12 @@
 import logging
 from typing import Dict, Any
 
+from superdesk.core import get_current_app
 from superdesk import get_resource_service
 from superdesk.notification import push_notification
 from superdesk.resource import Resource
 from superdesk.services import BaseService
 from superdesk.utils import SuperdeskBaseEnum
-from flask import current_app as app
 
 logger = logging.getLogger(__name__)
 
@@ -132,7 +132,7 @@ def delete(self, lookup):
             except KeyError:
                 pass
             else:
-                app.storage.delete(encoded_item_id)
+                get_current_app().storage.delete(encoded_item_id)
         return super().delete(lookup)
 
     def delete_by_article_id(self, _id):
diff --git a/superdesk/publish/publish_service.py b/superdesk/publish/publish_service.py
index dc9a44d019..68f7296f53 100644
--- a/superdesk/publish/publish_service.py
+++ b/superdesk/publish/publish_service.py
@@ -12,9 +12,11 @@
 import superdesk
 
 from bson import ObjectId
-from flask import current_app as app
 from typing import Any, Dict, Optional
-from superdesk import get_resource_service, config
+
+from superdesk.resource_fields import ID_FIELD
+from superdesk.core import get_current_app
+from superdesk import get_resource_service
 from superdesk.utc import utcnow
 from superdesk.errors import SubscriberError, SuperdeskPublishError, PublishQueueError
 
@@ -56,7 +58,7 @@ def transmit(self, queue_item):
                 # we fill encoded_item using "formatted_item" and "item_encoding"
                 if "encoded_item_id" in queue_item:
                     encoded_item_id = queue_item["encoded_item_id"]
-                    queue_item["encoded_item"] = app.storage.get(encoded_item_id).read()
+                    queue_item["encoded_item"] = get_current_app().storage.get(encoded_item_id).read()
                 else:
                     encoding = queue_item.get("item_encoding", "utf-8")
                     queue_item["encoded_item"] = queue_item["formatted_item"].encode(encoding, errors="replace")
@@ -87,7 +89,7 @@ def close_transmitter(self, subscriber, error):
                 },
             }
 
-            get_resource_service("subscribers").system_update(subscriber[config.ID_FIELD], update, subscriber)
+            get_resource_service("subscribers").system_update(subscriber[ID_FIELD], update, subscriber)
 
     def update_item_status(self, queue_item, status, error=None):
         try:
diff --git a/superdesk/publish/subscribers.py b/superdesk/publish/subscribers.py
index 54e9e7b6c1..f50457beff 100644
--- a/superdesk/publish/subscribers.py
+++ b/superdesk/publish/subscribers.py
@@ -13,9 +13,10 @@
 
 from copy import deepcopy
 
-from flask import current_app as app
+from superdesk.resource_fields import ID_FIELD
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
-from eve.utils import ParsedRequest, config
+from eve.utils import ParsedRequest
 from superdesk.utils import ListCursor, get_dict_hash
 from superdesk.resource import Resource, build_custom_hateoas
 from superdesk.services import CacheableService
@@ -125,7 +126,7 @@ def on_create(self, docs):
             self._validate_products_destinations(doc)
 
     def on_created(self, docs):
-        push_notification("subscriber:create", _id=[doc.get(config.ID_FIELD) for doc in docs])
+        push_notification("subscriber:create", _id=[doc.get(ID_FIELD) for doc in docs])
 
     def on_update(self, updates, original):
         self._validate_seq_num_settings(updates)
@@ -148,12 +149,10 @@ def keep_destinations_secrets(self, updates, original):
                         update_destination["config"].setdefault(field, value)
 
     def on_updated(self, updates, original):
-        push_notification("subscriber:update", _id=[original.get(config.ID_FIELD)])
+        push_notification("subscriber:update", _id=[original.get(ID_FIELD)])
 
     def on_deleted(self, doc):
-        get_resource_service("sequences").delete(
-            lookup={"key": "ingest_providers_{_id}".format(_id=doc[config.ID_FIELD])}
-        )
+        get_resource_service("sequences").delete(lookup={"key": "ingest_providers_{_id}".format(_id=doc[ID_FIELD])})
 
     def is_async(self, subscriber_id):
         subscriber = self.find_one(req=None, _id=subscriber_id)
@@ -236,7 +235,7 @@ def _validate_products_destinations(self, subscriber):
             )
 
         if subscriber.get("products"):
-            lookup = {config.ID_FIELD: {"$in": subscriber.get("products")}, "product_type": ProductTypes.API.value}
+            lookup = {ID_FIELD: {"$in": subscriber.get("products")}, "product_type": ProductTypes.API.value}
             products = get_resource_service("products").get_product_names(lookup)
             if products:
                 raise SuperdeskApiError.badRequestError(
@@ -244,7 +243,7 @@ def _validate_products_destinations(self, subscriber):
                 )
         if subscriber.get("api_products"):
             lookup = {
-                config.ID_FIELD: {"$in": subscriber.get("api_products")},
+                ID_FIELD: {"$in": subscriber.get("api_products")},
                 "product_type": ProductTypes.DIRECT.value,
             }
             products = get_resource_service("products").get_product_names(lookup)
@@ -275,7 +274,7 @@ def _validate_seq_num_settings(self, subscriber):
 
         if subscriber.get("sequence_num_settings"):
             min = subscriber.get("sequence_num_settings").get("min", 1)
-            max = subscriber.get("sequence_num_settings").get("max", app.config["MAX_VALUE_OF_PUBLISH_SEQUENCE"])
+            max = subscriber.get("sequence_num_settings").get("max", get_app_config("MAX_VALUE_OF_PUBLISH_SEQUENCE"))
 
             if min <= 0:
                 raise SuperdeskApiError.badRequestError(
@@ -301,13 +300,13 @@ def generate_sequence_number(self, subscriber):
 
         assert subscriber is not None, "Subscriber can't be null"
         min_seq_number = 1
-        max_seq_number = app.config["MAX_VALUE_OF_PUBLISH_SEQUENCE"]
+        max_seq_number = get_app_config("MAX_VALUE_OF_PUBLISH_SEQUENCE")
         if subscriber.get("sequence_num_settings"):
             min_seq_number = subscriber["sequence_num_settings"]["min"]
             max_seq_number = subscriber["sequence_num_settings"]["max"]
 
         return get_resource_service("sequences").get_next_sequence_number(
-            key_name="subscribers_{_id})".format(_id=subscriber[config.ID_FIELD]),
+            key_name="subscribers_{_id})".format(_id=subscriber[ID_FIELD]),
             max_seq_number=max_seq_number,
             min_seq_number=min_seq_number,
         )
diff --git a/superdesk/publish/transmitters/email.py b/superdesk/publish/transmitters/email.py
index 48e51749f6..4fbd8de483 100644
--- a/superdesk/publish/transmitters/email.py
+++ b/superdesk/publish/transmitters/email.py
@@ -10,7 +10,7 @@
 
 import json
 
-from flask import current_app as app
+from superdesk.core import get_app_config, get_current_app
 from superdesk.emails import send_email
 from superdesk.publish import register_transmitter
 from superdesk.publish.publish_service import PublishService
@@ -48,7 +48,7 @@ def _transmit(self, queue_item, subscriber):
             except Exception:
                 item = {}
 
-            admins = app.config["ADMINS"]
+            admins = get_app_config("ADMINS")
             recipients = [r.strip() for r in config.get("recipients", "").split(";") if r.strip()]
             bcc = [r.strip() for r in config.get("recipients_bcc", "").split(";") if r.strip()]
             if not recipients and not bcc:
@@ -67,7 +67,7 @@ def _transmit(self, queue_item, subscriber):
                 rendition = config.get("media_rendition", "")
                 media_item = item.get("renditions", {}).get(rendition)
                 if media_item and rendition:
-                    media = app.media.get(media_item["media"], resource="upload")
+                    media = get_current_app().media.get(media_item["media"], resource="upload")
                     im = Image.open(media)
                     if config.get("watermark", False):
                         im = get_watermark(im)
diff --git a/superdesk/publish/transmitters/ftp.py b/superdesk/publish/transmitters/ftp.py
index 1a5490814f..29b5f6a4db 100644
--- a/superdesk/publish/transmitters/ftp.py
+++ b/superdesk/publish/transmitters/ftp.py
@@ -13,8 +13,8 @@
 import superdesk
 from urllib.parse import urlparse
 from io import BytesIO
-from flask import current_app as app
 
+from superdesk.core import get_current_app
 from superdesk.ftp import ftp_connect
 from superdesk.publish import register_transmitter, registered_transmitter_file_providers
 from superdesk.publish.publish_service import get_publish_service, PublishService
@@ -98,6 +98,7 @@ def _copy_published_media_files(self, item, ftp):
         remote_items = []
         ftp.retrlines("LIST", remote_items.append)
 
+        app = get_current_app()
         for media_id, rendition in media.items():
             if not self._media_exists(rendition, remote_items):
                 binary = app.media.get(media_id, resource=rendition.get("resource", "upload"))
diff --git a/superdesk/publish/transmitters/http_push.py b/superdesk/publish/transmitters/http_push.py
index 58e8e46748..2f5ef3f846 100644
--- a/superdesk/publish/transmitters/http_push.py
+++ b/superdesk/publish/transmitters/http_push.py
@@ -13,7 +13,7 @@
 import logging
 import requests
 
-from flask import current_app as app
+from superdesk.core import get_current_app, get_app_config
 from superdesk.publish import register_transmitter, registered_transmitter_file_providers
 
 from superdesk.errors import PublishHTTPPushError, PublishHTTPPushServerError, PublishHTTPPushClientError
@@ -120,6 +120,7 @@ def _copy_published_media_files(self, item, destination):
         for get_files in registered_transmitter_file_providers:
             media.update(get_files(self.NAME, item))
 
+        app = get_current_app()
         for media_id, rendition in media.items():
             if not self._media_exists(media_id, destination):
                 binary = app.media.get(media_id, resource=rendition.get("resource", "upload"))
@@ -168,7 +169,7 @@ def _media_exists(self, media_id, destination):
         return response.status_code == requests.codes.ok  # @UndefinedVariable
 
     def _get_timeout(self):
-        return app.config.get("HTTP_PUSH_TIMEOUT", (5, 30))
+        return get_app_config("HTTP_PUSH_TIMEOUT", (5, 30))
 
     def _get_headers(self, data, destination, current_headers):
         secret_token = self._get_secret_token(destination)
diff --git a/superdesk/publish/transmitters/imatrics.py b/superdesk/publish/transmitters/imatrics.py
index ce92d54aae..c3463ab06b 100644
--- a/superdesk/publish/transmitters/imatrics.py
+++ b/superdesk/publish/transmitters/imatrics.py
@@ -1,5 +1,4 @@
-from flask import current_app, json
-
+from superdesk.core import json, get_current_app
 from superdesk.publish import register_transmitter
 from superdesk.publish.publish_service import PublishService
 from superdesk.text_checkers.ai.imatrics import IMatrics
@@ -7,7 +6,7 @@
 
 class IMatricsTransmitter(PublishService):
     def _transmit(self, queue_item, subscriber):
-        imatrics = IMatrics(current_app)
+        imatrics = IMatrics(get_current_app())
         item = json.loads(queue_item["formatted_item"])
         imatrics.publish(item)
 
diff --git a/superdesk/publish/transmitters/odbc.py b/superdesk/publish/transmitters/odbc.py
index efef9c34a6..6f0ffa06d8 100644
--- a/superdesk/publish/transmitters/odbc.py
+++ b/superdesk/publish/transmitters/odbc.py
@@ -10,7 +10,7 @@
 
 import json
 
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk.publish import register_transmitter
 from superdesk.publish.publish_service import PublishService
 from superdesk.errors import PublishODBCError
@@ -43,7 +43,7 @@ def _transmit(self, queue_item, subscriber):
         Configuration must have connection string and the name of a stored procedure.
         """
 
-        if not app.config["ODBC_PUBLISH"] or not pyodbc_available:
+        if not get_app_config("ODBC_PUBLISH") or not pyodbc_available:
             raise PublishODBCError()
 
         config = queue_item.get("destination", {}).get("config", {})
diff --git a/superdesk/resource.py b/superdesk/resource.py
index 9839c56c98..13cfdde461 100644
--- a/superdesk/resource.py
+++ b/superdesk/resource.py
@@ -15,8 +15,8 @@
 from typing_extensions import Literal
 
 import superdesk
+from superdesk.resource_fields import LINKS
 
-from eve.utils import config
 from .services import Service
 
 from . import resource_locking
@@ -44,10 +44,10 @@
 
 def build_custom_hateoas(hateoas, doc, **values):
     values.update(doc)
-    links = doc.get(config.LINKS)
+    links = doc.get(LINKS)
     if not links:
         links = {}
-        doc[config.LINKS] = links
+        doc[LINKS] = links
 
     for link_name in hateoas.keys():
         link = hateoas[link_name]
diff --git a/superdesk/resource_fields.py b/superdesk/resource_fields.py
new file mode 100644
index 0000000000..001c5594e0
--- /dev/null
+++ b/superdesk/resource_fields.py
@@ -0,0 +1,26 @@
+# -*- coding: utf-8; -*-
+#
+# This file is part of Superdesk.
+#
+# Copyright 2024 Sourcefabric z.u. and contributors.
+#
+# For the full copyright and license information, please see the
+# AUTHORS and LICENSE files distributed with this source code, or
+# at https://www.sourcefabric.org/superdesk/license
+
+ID_FIELD = "_id"
+ITEM_TYPE = "type"
+ITEM_STATE = "state"
+
+STATUS_OK = "OK"
+STATUS_ERR = "ERR"
+LAST_UPDATED = "_updated"
+DATE_CREATED = "_created"
+ISSUES = "_issues"
+STATUS = "_status"
+ERROR = "_error"
+ITEMS = "_items"
+LINKS = "_links"
+ETAG = "_etag"
+VERSION = "_current_version"
+META = "_meta"
diff --git a/superdesk/resource_locking.py b/superdesk/resource_locking.py
index cd4360f4f3..e0e6bcc29d 100644
--- a/superdesk/resource_locking.py
+++ b/superdesk/resource_locking.py
@@ -1,9 +1,9 @@
 import enum
-import flask
 import datetime
 
 from flask_babel import _
 
+from superdesk.flask import abort
 from superdesk.utc import utcnow
 
 
@@ -32,7 +32,7 @@ def on_update(updates: dict, original: dict):
         if updates.get("_lock_action") == LockActions.FORCE_LOCK.value:
             pass  # force locking, might need specific permissions eventually
         elif auth["_id"] != original.get("_lock_session"):
-            flask.abort(412, description=_("Resource is locked."))
+            abort(412, description=_("Resource is locked."))
 
     # lock
     if updates.get("_lock_action") in LOCKED_ACTIONS:
diff --git a/superdesk/roles/roles.py b/superdesk/roles/roles.py
index ff5b94de79..01be6d0efc 100644
--- a/superdesk/roles/roles.py
+++ b/superdesk/roles/roles.py
@@ -11,7 +11,7 @@
 import logging
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.activity import add_activity, ACTIVITY_UPDATE
 from superdesk.services import BaseService
 from superdesk import get_resource_service
@@ -116,6 +116,7 @@ def __send_notification(self, updates, role):
                 push_notification("activity", _dest=activity["recipients"])
                 privileges_updated = True
             if privileges_updated:
+                app = get_current_app().as_any()
                 app.on_role_privileges_updated(role, role_users)
 
         else:
diff --git a/superdesk/sams/__init__.py b/superdesk/sams/__init__.py
index a51fa3296f..ce79e5c7fb 100644
--- a/superdesk/sams/__init__.py
+++ b/superdesk/sams/__init__.py
@@ -10,7 +10,7 @@
 
 
 def init_app(app: Eve):
-    client = get_sams_client(app)
+    client = get_sams_client()
 
     if not app.config["RENDITIONS"].get("sams"):
         # if SAMS renditions are not defined, then copy them from default settings
diff --git a/superdesk/sams/assets.py b/superdesk/sams/assets.py
index 6b17016117..cec0edaca1 100644
--- a/superdesk/sams/assets.py
+++ b/superdesk/sams/assets.py
@@ -22,11 +22,11 @@
 
 import ast
 import logging
-from flask import request, current_app as app
 from flask_babel import _
 from bson import ObjectId
 
-import superdesk
+from superdesk.core import get_app_config
+from superdesk.flask import request, Blueprint
 from superdesk.errors import SuperdeskApiError
 from superdesk.notification import push_notification
 from superdesk.storage.superdesk_file import generate_response_for_file
@@ -38,7 +38,7 @@
 from .client import get_sams_client
 
 logger = logging.getLogger(__name__)
-assets_bp = superdesk.Blueprint("sams_assets", __name__)
+assets_bp = Blueprint("sams_assets", __name__)
 
 
 @assets_bp.route("/sams/assets", methods=["GET"])
@@ -98,9 +98,9 @@ def create():
     if post_response.status_code == 201:
         if response.get("mimetype", "").startswith("image/"):
             # Create renditions.
-            renditions = [k for k in app.config["RENDITIONS"]["sams"].keys()]
+            renditions = [k for k in get_app_config("RENDITIONS")["sams"].keys()]
             for rendition in renditions:
-                dimensions = app.config["RENDITIONS"]["sams"][rendition]
+                dimensions = get_app_config("RENDITIONS")["sams"][rendition]
                 rendition_response = sams_client.images.generate_rendition(
                     response["_id"],
                     width=dimensions.get("width"),
diff --git a/superdesk/sams/client.py b/superdesk/sams/client.py
index ea41acbd33..8972037295 100644
--- a/superdesk/sams/client.py
+++ b/superdesk/sams/client.py
@@ -1,22 +1,17 @@
-from typing import Optional
-from flask import current_app
-from eve.flaskapp import Eve
 from sams_client import SamsClient
+from superdesk.core import get_app_config
 
 _client: SamsClient = None
 
 
-def get_sams_client(app: Optional[Eve] = None) -> SamsClient:
+def get_sams_client() -> SamsClient:
     global _client
 
     if not _client:
-        if app is None:
-            app = current_app
-
         _client = SamsClient(
             {
-                "HOST": app.config.get("SAMS_HOST"),
-                "PORT": app.config.get("SAMS_PORT"),
+                "HOST": get_app_config("SAMS_HOST"),
+                "PORT": get_app_config("SAMS_PORT"),
             }
         )
 
diff --git a/superdesk/sams/media_storage.py b/superdesk/sams/media_storage.py
index 5cdc9aa2b4..eccbb44a03 100644
--- a/superdesk/sams/media_storage.py
+++ b/superdesk/sams/media_storage.py
@@ -8,16 +8,17 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from typing import Dict, Any, BinaryIO, Union, Optional
+from typing import Dict, Any, BinaryIO, Union, Optional, cast
 import logging
 from os import path
 
 from eve.flaskapp import Eve
 from eve.io.mongo.media import MediaStorage
 from bson import ObjectId
-from flask import current_app, request
 from werkzeug.utils import secure_filename
 
+from superdesk.core import get_current_app, get_app_config
+from superdesk.flask import request
 from superdesk.factory.app import get_media_storage_class
 from superdesk.default_settings import strtobool
 from superdesk.storage.mimetype_mixin import MimetypeMixin
@@ -42,7 +43,7 @@
 
 
 def get_sams_values_from_resource_schema(resource: str, data: Dict[str, Any]):
-    schema = current_app.config["DOMAIN"][resource]["schema"]
+    schema = cast(dict[str, Any], get_app_config("DOMAIN"))[resource]["schema"]
 
     def _get_field(field: str):
         sams_mapping = (schema.get(field) or {}).get("sams") or {}
@@ -69,11 +70,11 @@ class SAMSMediaStorage(MediaStorage, MimetypeMixin):
     """
 
     def __init__(self, app: Eve = None):
-        super(SAMSMediaStorage, self).__init__(app or current_app)
+        super(SAMSMediaStorage, self).__init__(app or get_current_app())
 
         fallback_klass = get_media_storage_class(self.app.config, False)
         self._fallback = fallback_klass(self.app)
-        self._client: SamsClient = get_sams_client(self.app)
+        self._client: SamsClient = get_sams_client()
 
     def url_for_external(self, media_id: str, resource: Optional[str] = None) -> str:
         """Returns a URL for external use
diff --git a/superdesk/sams/sets.py b/superdesk/sams/sets.py
index e12330fadf..46d8eac350 100644
--- a/superdesk/sams/sets.py
+++ b/superdesk/sams/sets.py
@@ -22,14 +22,16 @@
 
 import logging
 import superdesk
-from flask import request, current_app as app
+
+from superdesk.core import get_current_app
+from superdesk.flask import Blueprint, request
 from superdesk.errors import SuperdeskApiError
 from superdesk.notification import push_notification
 from apps.auth import get_auth, get_user_id
 from .client import get_sams_client
 
 logger = logging.getLogger(__name__)
-sets_bp = superdesk.Blueprint("sams_sets", __name__)
+sets_bp = Blueprint("sams_sets", __name__)
 
 
 @sets_bp.route("/sams/sets", methods=["GET"])
@@ -101,7 +103,7 @@ def remove_set_restriction_from_desks(set_id_to_remove: str):
     """
 
     desk_service = superdesk.get_resource_service("desks")
-    desks_db = app.data.mongo.pymongo("desks").db["desks"]
+    desks_db = get_current_app().data.mongo.pymongo("desks").db["desks"]
 
     # Use pymongo directly as this query doesn't work through services
     for desk in desks_db.find({"sams_settings.allowed_sets": set_id_to_remove}):
diff --git a/superdesk/sams/storage_destinations.py b/superdesk/sams/storage_destinations.py
index 8d3c8625ee..953395ec40 100644
--- a/superdesk/sams/storage_destinations.py
+++ b/superdesk/sams/storage_destinations.py
@@ -21,11 +21,11 @@
 """
 
 import logging
-import superdesk
+from superdesk.flask import Blueprint
 from .client import get_sams_client
 
 logger = logging.getLogger(__name__)
-destinations_bp = superdesk.Blueprint("sams_destinations", __name__)
+destinations_bp = Blueprint("sams_destinations", __name__)
 
 
 @destinations_bp.route("/sams/destinations", methods=["GET"])
diff --git a/superdesk/sams/utils.py b/superdesk/sams/utils.py
index f9f8205bdc..f4c0a6b791 100644
--- a/superdesk/sams/utils.py
+++ b/superdesk/sams/utils.py
@@ -13,10 +13,10 @@
 from bson import ObjectId
 from eve.utils import str_to_date
 from pymongo.cursor import Cursor as MongoCursor
-from flask import current_app as app
 from requests import Response
 from requests.exceptions import HTTPError
 
+from superdesk.core import get_current_app
 from superdesk.errors import SuperdeskApiError
 from superdesk.storage.superdesk_file import SuperdeskFile
 from sams_client import SamsClient
@@ -118,7 +118,7 @@ def get_attachments_from_asset_id(asset_id: Union[ObjectId, str]) -> MongoCursor
     """Returns the list of Attachments that use this SAMS Asset"""
 
     # use PyMongo directly as searching with `media: str` doesnt work for Superdesk's datalayer
-    db_attachments = app.data.get_mongo_collection("attachments")
+    db_attachments = get_current_app().data.get_mongo_collection("attachments")
     return db_attachments.find({"media": str(asset_id)})
 
 
diff --git a/superdesk/server_config.py b/superdesk/server_config.py
index 3b91231e5e..9dcf671e2f 100644
--- a/superdesk/server_config.py
+++ b/superdesk/server_config.py
@@ -1,6 +1,6 @@
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_current_app
 from apps.auth import is_current_user_admin
 from superdesk.utc import utcnow
 
@@ -34,7 +34,7 @@ def create(self, docs, **kwargs):
         return ids
 
     def set(self, key, val, namespace="superdesk"):
-        coll = app.data.mongo.get_collection_with_write_concern("config", "config")
+        coll = get_current_app().data.mongo.get_collection_with_write_concern("config", "config")
         if isinstance(val, dict):
             updates = {f"val.{k}": v for k, v in val.items()} if val else {}
         else:
diff --git a/superdesk/services.py b/superdesk/services.py
index 3d952e0a4b..73909b9631 100644
--- a/superdesk/services.py
+++ b/superdesk/services.py
@@ -13,9 +13,11 @@
 import logging
 
 from typing import Dict, Any, List, Optional, Union
-from flask import current_app as app, json, g
-from eve.utils import ParsedRequest, config
+from eve.utils import ParsedRequest
 from eve.methods.common import resolve_document_etag
+
+from superdesk.resource_fields import ETAG
+from superdesk.flask import g
 from superdesk.errors import SuperdeskApiError
 from superdesk.utc import utcnow
 from superdesk.cache import cache
@@ -127,6 +129,8 @@ def get_from_mongo(self, req, lookup, projection=None):
         if req is None:
             req = ParsedRequest()
         if not req.projection and projection:
+            from superdesk.core import json
+
             req.projection = json.dumps(projection)
         return self.backend.get_from_mongo(self.datasource, req=req, lookup=lookup)
 
@@ -160,12 +164,17 @@ def get_all_batch(self, size=500, max_iterations=10000, lookup=None):
             logger.warning("Not enough iterations for resource %s", self.datasource)
 
     def _validator(self, skip_validation=False):
-        resource_def = app.config["DOMAIN"][self.datasource]
+        from superdesk.core import get_app_config, get_current_app
+
+        resource_def = get_app_config("DOMAIN")[self.datasource]
         schema = resource_def["schema"]
+
         return (
             None
             if skip_validation
-            else app.validator(schema, resource=self.datasource, allow_unknown=resource_def["allow_unknown"])
+            else get_current_app().validator(
+                schema, resource=self.datasource, allow_unknown=resource_def["allow_unknown"]
+            )
         )
 
     def _resolve_defaults(self, doc):
@@ -184,13 +193,15 @@ def post(self, docs, **kwargs):
         return ids
 
     def patch(self, id, updates):
+        from superdesk.core import get_app_config
+
         original = self.find_one(req=None, _id=id)
         updated = original.copy()
         self.on_update(updates, original)
         updated.update(updates)
-        if config.IF_MATCH:
+        if get_app_config("IF_MATCH"):
             resolve_document_etag(updated, self.datasource)
-            updates[config.ETAG] = updated[config.ETAG]
+            updates[ETAG] = updated[ETAG]
         res = self.update(id, updates, original)
         self.on_updated(updates, original)
         return res
diff --git a/superdesk/storage/__init__.py b/superdesk/storage/__init__.py
index 293181e201..1113914321 100644
--- a/superdesk/storage/__init__.py
+++ b/superdesk/storage/__init__.py
@@ -10,13 +10,13 @@
 
 """Superdesk storage module."""
 
-from flask import current_app as app
 from typing import Optional
 import abc
 
 from eve.io.media import MediaStorage
 from eve.io.mongo.media import GridFSMediaStorage, GridFS
 
+from superdesk.core import get_current_app
 from .mimetype_mixin import MimetypeMixin
 
 
@@ -48,7 +48,7 @@ def fetch_rendition(self, rendition, resource=None):
 
 class SimpleMediaStorage(GridFSMediaStorage):
     def fs(self, resource):
-        driver = app.data.mongo
+        driver = get_current_app().data.mongo
 
         px = driver.current_mongo_prefix(resource)
         if px not in self._fs:
diff --git a/superdesk/storage/desk_media_storage.py b/superdesk/storage/desk_media_storage.py
index e4b5fe63f8..a2f58a03c9 100644
--- a/superdesk/storage/desk_media_storage.py
+++ b/superdesk/storage/desk_media_storage.py
@@ -9,7 +9,6 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 from typing import Optional
-from flask import current_app as app
 from flask_babel import _
 import logging
 import json
@@ -22,6 +21,7 @@
 
 from eve.io.mongo.media import GridFSMediaStorage
 
+from superdesk.core import get_current_app
 from superdesk.errors import SuperdeskApiError
 from . import SuperdeskMediaStorage
 
@@ -61,14 +61,14 @@ def url_for_media(self, media_id, content_type=None):
         ext = mimetypes.guess_extension(content_type or "") or ""
         if ext in (".jpe", ".jpeg"):
             ext = ".jpg"
-        return app.upload_url(str(media_id) + ext)
+        return get_current_app().upload_url(str(media_id) + ext)
 
     def url_for_download(self, media_id, content_type=None):
         """Return url for download.
 
         :param media_id: media id from media_id method
         """
-        return app.download_url(str(media_id))
+        return get_current_app().download_url(str(media_id))
 
     def url_for_external(self, media_id: str, resource: Optional[str] = None) -> str:
         """Returns a URL for external use
@@ -140,7 +140,7 @@ def put(self, content, filename=None, content_type=None, metadata=None, resource
 
     def fs(self, resource=None):
         resource = resource or "upload"
-        driver = app.data.mongo
+        driver = get_current_app().data.mongo
         px = driver.current_mongo_prefix(resource)
         if px not in self._fs:
             self._fs[px] = gridfs.GridFS(driver.pymongo(prefix=px).db)
diff --git a/superdesk/storage/fix_links.py b/superdesk/storage/fix_links.py
index 2c18a0c545..f1ded3a808 100644
--- a/superdesk/storage/fix_links.py
+++ b/superdesk/storage/fix_links.py
@@ -2,7 +2,7 @@
 import logging
 import superdesk
 
-from flask import current_app as app
+from superdesk.core import get_current_app
 from superdesk.editor_utils import get_field_content_state, get_field_value, set_field_value
 from superdesk.errors import SuperdeskApiError
 
@@ -76,6 +76,7 @@ def get_updates(self, item, prefix, hrefs):
 
         # check renditions
         renditions = item.get("renditions") or {}
+        app = get_current_app()
         for rendition in renditions.values():
             if rendition and rendition.get("href") and rendition["href"].startswith(prefix):
                 media = rendition.get("media")
diff --git a/superdesk/storage/migrate.py b/superdesk/storage/migrate.py
index 2c9bdce68b..6d4ed1f03b 100644
--- a/superdesk/storage/migrate.py
+++ b/superdesk/storage/migrate.py
@@ -1,7 +1,6 @@
 import codecs
 import superdesk
-
-from flask import current_app as app
+from superdesk.core import get_current_app
 
 
 class MigrateMediaCommand(superdesk.Command):
@@ -14,6 +13,7 @@ class MigrateMediaCommand(superdesk.Command):
     # ]
 
     def run(self, limit, skip, delete):
+        app = get_current_app()
         mongo = app.media._storage[1]
         amazon = app.media._storage[0]
 
diff --git a/superdesk/storage/superdesk_file.py b/superdesk/storage/superdesk_file.py
index d537cf6ee5..500ce61382 100644
--- a/superdesk/storage/superdesk_file.py
+++ b/superdesk/storage/superdesk_file.py
@@ -13,8 +13,9 @@
 from datetime import datetime
 
 from werkzeug.wsgi import wrap_file
-from flask import request, current_app as app
 
+from superdesk.core import get_current_app
+from superdesk.flask import request
 from superdesk.default_settings import strtobool
 
 
@@ -38,7 +39,7 @@ def generate_response_for_file(
     content_disposition: Optional[str] = None,
 ):
     data = wrap_file(request.environ, file, buffer_size=buffer_size)
-    response = app.response_class(data, mimetype=file.content_type, direct_passthrough=True)
+    response = get_current_app().response_class(data, mimetype=file.content_type, direct_passthrough=True)
     response.content_length = file.length
     response.last_modified = file.upload_date
     response.set_etag(file.md5)
diff --git a/superdesk/system/health.py b/superdesk/system/health.py
index 70f4092686..86e4479d02 100644
--- a/superdesk/system/health.py
+++ b/superdesk/system/health.py
@@ -17,7 +17,9 @@
 import superdesk
 
 from typing import Callable, List, Tuple
-from flask import Blueprint, current_app as app
+
+from superdesk.core import get_app_config, get_current_app
+from superdesk.flask import Blueprint
 
 
 bp = Blueprint("system", __name__)
@@ -25,23 +27,23 @@
 
 
 def mongo_health() -> bool:
-    info = app.data.mongo.pymongo().cx.server_info()
+    info = get_current_app().data.mongo.pymongo().cx.server_info()
     return bool(info["ok"])
 
 
 def elastic_health() -> bool:
-    health = app.data.elastic.es.cluster.health()
+    health = get_current_app().data.elastic.es.cluster.health()
     return health["status"] in ("green", "yellow")
 
 
 def celery_health() -> bool:
-    with app.celery.connection_for_write() as conn:
+    with get_current_app().celery.connection_for_write() as conn:
         conn.connect()
         return conn.connected
 
 
 def redis_health() -> bool:
-    info = app.redis.info()
+    info = get_current_app().redis.info()
     return bool(info)
 
 
@@ -60,7 +62,7 @@ def human(status: bool) -> str:
 @bp.route("/system/health", methods=["GET", "OPTIONS"])
 def health():
     output = {
-        "application_name": app.config.get("APPLICATION_NAME"),
+        "application_name": get_app_config("APPLICATION_NAME"),
     }
 
     status = True
diff --git a/superdesk/tests/__init__.py b/superdesk/tests/__init__.py
index 04ea947301..79a1892d2a 100644
--- a/superdesk/tests/__init__.py
+++ b/superdesk/tests/__init__.py
@@ -19,8 +19,8 @@
 from base64 import b64encode
 from unittest.mock import patch
 
-from flask import json, Config
-
+from superdesk.core import json
+from superdesk.flask import Config
 from apps.ldap import ADAuth
 from superdesk import get_resource_service
 from superdesk.cache import cache
@@ -79,7 +79,6 @@ def update_config(conf):
     conf["CELERY_BEAT_SCHEDULE_FILENAME"] = "./testschedule.db"
     conf["CELERY_BEAT_SCHEDULE"] = {}
     conf["CONTENT_EXPIRY_MINUTES"] = 99
-    conf["VERSION"] = "_current_version"
     conf["SECRET_KEY"] = "test-secret"
     conf["JSON_SORT_KEYS"] = True
     conf["ELASTICSEARCH_INDEXES"] = {
diff --git a/superdesk/tests/async_test_client.py b/superdesk/tests/async_test_client.py
index 4d0ef87342..032000be40 100644
--- a/superdesk/tests/async_test_client.py
+++ b/superdesk/tests/async_test_client.py
@@ -14,7 +14,8 @@
 from urllib.request import Request as U2Request
 from asgiref.wsgi import WsgiToAsgi
 from werkzeug.datastructures import Headers, Authorization
-from flask import Flask
+
+from superdesk.flask import Flask
 from quart import Response
 from quart.datastructures import FileStorage
 from quart.utils import decode_headers
diff --git a/superdesk/tests/asyncio.py b/superdesk/tests/asyncio.py
index 273289e9d9..09f6cc11ea 100644
--- a/superdesk/tests/asyncio.py
+++ b/superdesk/tests/asyncio.py
@@ -16,6 +16,7 @@
 
 from superdesk.factory.app import SuperdeskApp
 from superdesk.core.app import SuperdeskAsyncApp
+from superdesk.core import app as core_app
 
 from . import setup_config, setup
 from .async_test_client import AsyncTestClient
@@ -41,6 +42,8 @@ def setupApp(self):
         if getattr(self, "app", None):
             self.app.stop()
 
+        core_app._global_app = None
+
         self.app_config = setup_config(self.app_config)
         self.app = SuperdeskAsyncApp(MockWSGI(config=self.app_config))
         self.app.start()
diff --git a/superdesk/tests/environment.py b/superdesk/tests/environment.py
index 06785c9eb1..62bb31dc7b 100644
--- a/superdesk/tests/environment.py
+++ b/superdesk/tests/environment.py
@@ -9,9 +9,9 @@
 # at https://www.sourcefabric.org/superdesk/license
 
 import os
-from flask import json
 from copy import deepcopy
 
+from superdesk.core import json
 from apps.prepopulate.app_populate import AppPopulateCommand
 from apps.prepopulate.app_initialize import AppInitializeWithDataCommand
 from superdesk import tests
diff --git a/superdesk/tests/mocks/search_provider.py b/superdesk/tests/mocks/search_provider.py
index 0f832ab2f8..570f2405d5 100644
--- a/superdesk/tests/mocks/search_provider.py
+++ b/superdesk/tests/mocks/search_provider.py
@@ -1,6 +1,6 @@
-from flask import json
 from eve.utils import ParsedRequest
 
+from superdesk.core import json
 from superdesk import SearchProvider, register_search_provider, get_resource_service
 from superdesk.errors import SuperdeskApiError
 from apps.search_providers import allowed_search_providers
diff --git a/superdesk/tests/publish_steps.py b/superdesk/tests/publish_steps.py
index c15b8d4ee1..7583c07976 100644
--- a/superdesk/tests/publish_steps.py
+++ b/superdesk/tests/publish_steps.py
@@ -10,8 +10,8 @@
 
 
 import requests_mock
-from flask import json
 from behave import when, then  # @UnresolvedImport
+from superdesk.core import json
 from apps.publish.enqueue import enqueue_published
 from superdesk.tests.steps import assert_200, apply_placeholders, json_match, get_json_data, test_json, format_items
 from wooper.general import fail_and_print_body
diff --git a/superdesk/tests/steps.py b/superdesk/tests/steps.py
index 7ccd549b99..5f81b0cc36 100644
--- a/superdesk/tests/steps.py
+++ b/superdesk/tests/steps.py
@@ -31,8 +31,7 @@
 from bson import ObjectId
 from eve.io.mongo import MongoJSONEncoder
 from eve.methods.common import parse
-from eve.utils import ParsedRequest, config
-from flask import json, render_template_string
+from eve.utils import ParsedRequest
 from wooper.assertions import assert_in, assert_equal, assertions
 from wooper.general import fail_and_print_body, apply_path, parse_json_response, WooperAssertionError
 from wooper.expect import (
@@ -44,7 +43,10 @@
     expect_headers_contain,
 )
 
+from superdesk.resource_fields import ID_FIELD, LAST_UPDATED, DATE_CREATED, VERSION, ETAG
 import superdesk
+from superdesk.core import json
+from superdesk.flask import render_template_string
 from superdesk import tests
 from superdesk import get_resource_service, utc, etree
 from superdesk.io import get_feeding_service
@@ -2565,7 +2567,7 @@ def we_assert_content_api_item_is_not_published(context, item_id):
 @then("we ensure that archived schema extra fields are not present in duplicated item")
 def we_ensure_that_archived_schema_extra_fields_are_not_present(context):
     with context.app.test_request_context(context.app.config["URL_PREFIX"]):
-        eve_keys = set([config.ID_FIELD, config.LAST_UPDATED, config.DATE_CREATED, config.VERSION, config.ETAG])
+        eve_keys = set([ID_FIELD, LAST_UPDATED, DATE_CREATED, VERSION, ETAG])
         archived_schema_keys = set(context.app.config["DOMAIN"]["archived"]["schema"].keys()).union(eve_keys)
         archive_schema_keys = set(context.app.config["DOMAIN"]["archive"]["schema"].keys()).union(eve_keys)
         extra_fields = [key for key in archived_schema_keys if key not in archive_schema_keys]
diff --git a/superdesk/text_checkers/ai/imatrics.py b/superdesk/text_checkers/ai/imatrics.py
index 0fa78aebab..7dac1b4755 100644
--- a/superdesk/text_checkers/ai/imatrics.py
+++ b/superdesk/text_checkers/ai/imatrics.py
@@ -11,10 +11,11 @@
 import requests
 import superdesk
 
-from flask import current_app
 from collections import OrderedDict
 from typing import Optional, Dict, List, Tuple
 from urllib.parse import urljoin
+
+from superdesk.core import get_app_config
 from superdesk.text_utils import get_text
 from superdesk.errors import SuperdeskApiError
 from .base import AIServiceBase
@@ -68,23 +69,23 @@ def __init__(self, app):
 
     @property
     def base_url(self):
-        return current_app.config.get("IMATRICS_BASE_URL", os.environ.get("IMATRICS_BASE_URL"))
+        return get_app_config("IMATRICS_BASE_URL", os.environ.get("IMATRICS_BASE_URL"))
 
     @property
     def user(self):
-        return current_app.config.get("IMATRICS_USER", os.environ.get("IMATRICS_USER"))
+        return get_app_config("IMATRICS_USER", os.environ.get("IMATRICS_USER"))
 
     @property
     def key(self):
-        return current_app.config.get("IMATRICS_KEY", os.environ.get("IMATRICS_KEY"))
+        return get_app_config("IMATRICS_KEY", os.environ.get("IMATRICS_KEY"))
 
     @property
     def image_base_url(self):
-        return current_app.config.get("IMATRICS_IMAGE_BASE_URL", os.environ.get("IMATRICS_IMAGE_BASE_URL"))
+        return get_app_config("IMATRICS_IMAGE_BASE_URL", os.environ.get("IMATRICS_IMAGE_BASE_URL"))
 
     @property
     def image_key(self):
-        return current_app.config.get("IMATRICS_IMAGE_KEY", os.environ.get("IMATRICS_IMAGE_KEY"))
+        return get_app_config("IMATRICS_IMAGE_KEY", os.environ.get("IMATRICS_IMAGE_KEY"))
 
     def concept2tag_data(self, concept: dict) -> Tuple[dict, str]:
         """Convert an iMatrics concept to Superdesk friendly data"""
@@ -139,7 +140,7 @@ def concept2tag_data(self, concept: dict) -> Tuple[dict, str]:
         return tag_data, tag_type
 
     def find_subject(self, topic_id):
-        SCHEME_ID = current_app.config.get("IMATRICS_SUBJECT_SCHEME")
+        SCHEME_ID = get_app_config("IMATRICS_SUBJECT_SCHEME")
         if not SCHEME_ID:
             return
         if not self._subjects:
diff --git a/superdesk/text_utils.py b/superdesk/text_utils.py
index 8744749769..78f3084774 100644
--- a/superdesk/text_utils.py
+++ b/superdesk/text_utils.py
@@ -15,7 +15,6 @@
 from superdesk import etree as sd_etree
 from lxml import html as lxml_html
 from lxml.html import clean
-from flask import current_app as app
 import chardet
 import html
 
@@ -139,7 +138,9 @@ def get_reading_time(html, word_count=None, language=None):
     :return int: estimated number of minute to read the text
     """
     if language and language.startswith("ja"):
-        return round(len(re.sub(r"[\s]", "", get_text(html))) / app.config["JAPANESE_CHARACTERS_PER_MINUTE"])
+        from superdesk.core import get_app_config
+
+        return round(len(re.sub(r"[\s]", "", get_text(html))) / get_app_config("JAPANESE_CHARACTERS_PER_MINUTE"))
     if not word_count:
         word_count = get_word_count(html)
     reading_time_float = word_count / 250
diff --git a/superdesk/upload.py b/superdesk/upload.py
index 7465cd53a1..727efe2320 100644
--- a/superdesk/upload.py
+++ b/superdesk/upload.py
@@ -11,10 +11,9 @@
 """Upload module"""
 import logging
 
-from eve.utils import config
-from flask import request, current_app as app, redirect, make_response, jsonify
-
 import superdesk
+from superdesk.core import get_app_config, get_current_app
+from superdesk.flask import request, redirect, make_response, jsonify, Blueprint
 import json
 import os
 from superdesk.errors import SuperdeskApiError
@@ -35,7 +34,7 @@
 from .services import BaseService
 
 
-bp = superdesk.Blueprint("upload_raw", __name__)
+bp = Blueprint("upload_raw", __name__)
 logger = logging.getLogger(__name__)
 
 
@@ -60,6 +59,8 @@ def get_upload_as_data_uri_bc(media_id):
 @bp.route("/upload-raw/<path:media_id>", methods=["GET", "OPTIONS"])
 @blueprint_auth()
 def get_upload_as_data_uri(media_id):
+    app = get_current_app()
+
     if request.method == "OPTIONS":
         return handle_cors()
     if not request.args.get("resource"):
@@ -117,11 +118,11 @@ def upload_config_file():
 
 
 def url_for_media(media_id, mimetype=None):
-    return app.media.url_for_media(media_id, mimetype)
+    return get_current_app().media.url_for_media(media_id, mimetype)
 
 
 def upload_url(media_id, view="upload_raw.get_upload_as_data_uri"):
-    media_prefix = app.config.get("MEDIA_PREFIX").rstrip("/")
+    media_prefix = get_app_config("MEDIA_PREFIX").rstrip("/")
     return "%s/%s" % (media_prefix, media_id)
 
 
@@ -199,7 +200,7 @@ def crop_and_store_file(self, doc, content, filename, content_type):
         try:
             logger.debug("Going to save media file with %s " % file_name)
             out.seek(0)
-            file_id = app.media.put(
+            file_id = get_current_app().media.put(
                 out, filename=file_name, content_type=content_type, resource=self.datasource, metadata=metadata
             )
             doc["media"] = file_id
@@ -207,7 +208,7 @@ def crop_and_store_file(self, doc, content, filename, content_type):
             set_filemeta(doc, decode_metadata(metadata))
             inserted = [doc["media"]]
             file_type = content_type.split("/")[0]
-            rendition_spec = config.RENDITIONS["avatar"]
+            rendition_spec = get_app_config("RENDITIONS", {}).get("avatar")
             renditions = generate_renditions(
                 out, file_id, inserted, file_type, content_type, rendition_spec, url_for_media
             )
diff --git a/superdesk/users/services.py b/superdesk/users/services.py
index d132da81c1..51e75d3312 100644
--- a/superdesk/users/services.py
+++ b/superdesk/users/services.py
@@ -8,11 +8,12 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import logging
 from bson import ObjectId
-from flask import current_app as app
-from eve.utils import config
+
+from superdesk.resource_fields import ID_FIELD, VERSION, LAST_UPDATED
+from superdesk.core import get_app_config
+from superdesk.flask import g
 from superdesk.activity import add_activity, ACTIVITY_CREATE, ACTIVITY_UPDATE
 from superdesk.metadata.item import SIGN_OFF
 from superdesk.services import BaseService
@@ -75,9 +76,9 @@ def current_user_has_privilege(privilege):
 
     :param privilege
     """
-    if not getattr(flask.g, "user", None):  # no user - worker can do it
+    if not getattr(g, "user", None):  # no user - worker can do it
         return True
-    privileges = get_privileges(flask.g.user, getattr(flask.g, "role", None))
+    privileges = get_privileges(g.user, getattr(g, "role", None))
     return privileges.get(privilege, False)
 
 
@@ -103,7 +104,7 @@ def set_sign_off(user):
     """
 
     if SIGN_OFF not in user or user[SIGN_OFF] is None:
-        sign_off_mapping = app.config.get("SIGN_OFF_MAPPING", None)
+        sign_off_mapping = get_app_config("SIGN_OFF_MAPPING", None)
         if sign_off_mapping and sign_off_mapping in user:
             user[SIGN_OFF] = user[sign_off_mapping]
         elif SIGN_OFF in user and user[SIGN_OFF] is None:
@@ -119,7 +120,7 @@ def update_sign_off(updates):
     Update sign_off property on user if the mapped field is changed.
     """
 
-    sign_off_mapping = app.config.get("SIGN_OFF_MAPPING", None)
+    sign_off_mapping = get_app_config("SIGN_OFF_MAPPING", None)
     if sign_off_mapping and sign_off_mapping in updates:
         updates[SIGN_OFF] = updates[sign_off_mapping]
 
@@ -149,20 +150,20 @@ def __is_invalid_operation(self, user, updates, method):
         :return: error message if invalid.
         """
 
-        if "user" in flask.g:
+        if "user" in g:
             if method == "PATCH":
                 if "is_active" in updates or "is_enabled" in updates:
-                    if str(user["_id"]) == str(flask.g.user["_id"]):
+                    if str(user["_id"]) == str(g.user["_id"]):
                         return "Not allowed to change your own status"
                     elif not current_user_has_privilege("users"):
                         return "Insufficient privileges to change user state"
                 if (
-                    str(user["_id"]) != str(flask.g.user["_id"])
+                    str(user["_id"]) != str(g.user["_id"])
                     and user.get("session_preferences")
                     and is_sensitive_update(updates)
                 ):
                     return "Not allowed to change the role/user_type/privileges of a logged-in user"
-            elif method == "DELETE" and str(user["_id"]) == str(flask.g.user["_id"]):
+            elif method == "DELETE" and str(user["_id"]) == str(g.user["_id"]):
                 return "Not allowed to disable your own profile."
 
         if method == "PATCH" and is_sensitive_update(updates) and not current_user_has_privilege("users"):
@@ -327,7 +328,7 @@ def __clear_locked_items(self, user_id):
         if items_locked_by_user and items_locked_by_user.count():
             for item in items_locked_by_user:
                 # delete the item if nothing is saved so far
-                if item[config.VERSION] == 0 and item["state"] == "draft":
+                if item[VERSION] == 0 and item["state"] == "draft":
                     get_resource_service("archive").delete(lookup={"_id": item["_id"]})
                 else:
                     archive_service.update(item["_id"], doc_to_unlock, item)
@@ -364,7 +365,7 @@ def __update_user_defaults(self, doc):
         doc.setdefault("display_name", get_display_name(doc))
         doc.setdefault("is_enabled", doc.get("is_active"))
         doc.setdefault(SIGN_OFF, set_sign_off(doc))
-        doc["dateline_source"] = app.config["ORGANIZATION_NAME_ABBREVIATION"]
+        doc["dateline_source"] = get_app_config("ORGANIZATION_NAME_ABBREVIATION")
 
     def user_is_waiting_activation(self, doc):
         return doc.get("needs_activation", False)
@@ -456,16 +457,16 @@ def update_stage_visibility_for_user(self, user):
         if not self._updating_stage_visibility:
             return
         try:
-            logger.info("Updating Stage Visibility for user {}.".format(user.get(config.ID_FIELD)))
-            stages = self.get_invisible_stages_ids(user.get(config.ID_FIELD))
-            self.system_update(user.get(config.ID_FIELD), {"invisible_stages": stages}, user)
+            logger.info("Updating Stage Visibility for user {}.".format(user.get(ID_FIELD)))
+            stages = self.get_invisible_stages_ids(user.get(ID_FIELD))
+            self.system_update(user.get(ID_FIELD), {"invisible_stages": stages}, user)
             user["invisible_stages"] = stages
-            logger.info("Updated Stage Visibility for user {}.".format(user.get(config.ID_FIELD)))
+            logger.info("Updated Stage Visibility for user {}.".format(user.get(ID_FIELD)))
         except Exception:
-            logger.exception("Failed to update the stage visibility " "for user: {}".format(user.get(config.ID_FIELD)))
+            logger.exception("Failed to update the stage visibility " "for user: {}".format(user.get(ID_FIELD)))
 
     def stop_updating_stage_visibility(self):
-        if not app.config.get("SUPERDESK_TESTING"):
+        if not get_app_config("SUPERDESK_TESTING"):
             raise RuntimeError("Only allowed during testing")
         self._updating_stage_visibility = False
 
@@ -482,13 +483,13 @@ def on_create(self, docs):
         super().on_create(docs)
         for doc in docs:
             if doc.get("password", None) and not is_hashed(doc.get("password")):
-                doc["password"] = get_hash(doc.get("password"), app.config.get("BCRYPT_GENSALT_WORK_FACTOR", 12))
+                doc["password"] = get_hash(doc.get("password"), get_app_config("BCRYPT_GENSALT_WORK_FACTOR", 12))
 
     def on_created(self, docs):
         """Send email to user with reset password token."""
         super().on_created(docs)
         resetService = get_resource_service("reset_user_password")
-        activate_ttl = app.config["ACTIVATE_ACCOUNT_TOKEN_TIME_TO_LIVE"]
+        activate_ttl = get_app_config("ACTIVATE_ACCOUNT_TOKEN_TIME_TO_LIVE")
         for doc in docs:
             if self.user_is_waiting_activation(doc) and doc["user_type"] != "external":
                 tokenDoc = {"user": doc["_id"], "email": doc["email"]}
@@ -526,9 +527,9 @@ def update_password(self, user_id, password):
             raise UserInactiveError()
 
         updates = {
-            "password": get_hash(password, app.config.get("BCRYPT_GENSALT_WORK_FACTOR", 12)),
+            "password": get_hash(password, get_app_config("BCRYPT_GENSALT_WORK_FACTOR", 12)),
             "password_changed_on": utcnow(),
-            app.config["LAST_UPDATED"]: utcnow(),
+            LAST_UPDATED: utcnow(),
         }
 
         if self.user_is_waiting_activation(user):
@@ -551,8 +552,8 @@ def _process_external_data(self, _data, update=False):
             role = get_resource_service("roles").find_one(req=None, name=ignorecase_query(role_name))
             if role:
                 data["role"] = role["_id"]
-        if not update and (data.get("desk") or app.config.get("USER_EXTERNAL_DESK")):
-            desk_name = data.pop("desk", None) or app.config.get("USER_EXTERNAL_DESK")
+        if not update and (data.get("desk") or get_app_config("USER_EXTERNAL_DESK")):
+            desk_name = data.pop("desk", None) or get_app_config("USER_EXTERNAL_DESK")
             desk = get_resource_service("desks").find_one(req=None, name=ignorecase_query(desk_name))
             if desk:
                 data["desk"] = desk["_id"]
@@ -562,7 +563,7 @@ def _process_external_data(self, _data, update=False):
             data.pop("email", None)
             data.pop("username", None)
         elif data.get("username"):
-            if app.config.get("USER_EXTERNAL_USERNAME_STRIP_DOMAIN"):
+            if get_app_config("USER_EXTERNAL_USERNAME_STRIP_DOMAIN"):
                 data["username"] = data["username"].split("@")[0]
             data["username"] = data["username"].replace("@", ".")  # @ breaks mentioning
         validator = self._validator()
diff --git a/superdesk/utils.py b/superdesk/utils.py
index 5df558b1d6..6746ed7f58 100644
--- a/superdesk/utils.py
+++ b/superdesk/utils.py
@@ -8,6 +8,8 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
+from typing import Any, Dict, Iterator, Mapping, Optional, cast
+
 import os
 import re
 import sys
@@ -19,18 +21,17 @@
 import tempfile
 import string
 import logging
-import flask
 
 from uuid import uuid4
 from datetime import datetime, timezone, timedelta
 from bson import ObjectId
 from enum import Enum
 from importlib import import_module
-from eve.utils import config, document_etag
-from typing import Any, Dict, Iterator, Mapping, Optional
+from eve.utils import document_etag
+
+from superdesk.flask import make_response, abort as flask_abort
 from superdesk.default_settings import ELASTIC_DATE_FORMAT, ELASTIC_DATETIME_FORMAT
 from superdesk.text_utils import get_text
-from flask import current_app as app
 
 
 logger = logging.getLogger(__name__)
@@ -219,7 +220,9 @@ def json_serialize_datetime_objectId(obj):
     Serialize so that objectid and date are converted to appropriate format.
     """
     if isinstance(obj, datetime):
-        return str(datetime.strftime(obj, config.DATE_FORMAT))
+        from superdesk.core import get_app_config
+
+        return str(datetime.strftime(obj, get_app_config("DATE_FORMAT")))
 
     if isinstance(obj, ObjectId):
         return str(obj)
@@ -332,26 +335,34 @@ def __iter__(self) -> Iterator[str]:
 
 
 def jwt_encode(payload: Dict, expiry=None) -> str:
+    from superdesk.core import get_app_config
+
     if expiry:
         payload["exp"] = datetime.now(tz=timezone.utc) + timedelta(days=expiry)
-    payload["iss"] = app.config["APPLICATION_NAME"]
-    token = jwt.encode(payload, app.config["SECRET_KEY"], JWT_ALGO)
+    payload["iss"] = get_app_config("APPLICATION_NAME")
+    secret_key = cast(str, get_app_config("SECRET_KEY"))
+    token = jwt.encode(payload, secret_key, JWT_ALGO)
     if isinstance(token, str):
         return token
     return token.decode()
 
 
 def jwt_decode(token) -> Optional[Dict]:
+    from superdesk.core import get_app_config
+
+    secret_key = cast(str, get_app_config("SECRET_KEY"))
     try:
-        return jwt.decode(token, app.config["SECRET_KEY"], algorithms=[JWT_ALGO])
+        return jwt.decode(token, secret_key, algorithms=[JWT_ALGO])
     except jwt.InvalidSignatureError:
         return None
 
 
 def get_cors_headers(methods="*"):
+    from superdesk.core import get_app_config
+
     return [
-        ("Access-Control-Allow-Origin", app.config["CLIENT_URL"]),
-        ("Access-Control-Allow-Headers", ",".join(app.config["X_HEADERS"])),
+        ("Access-Control-Allow-Origin", get_app_config("CLIENT_URL")),
+        ("Access-Control-Allow-Headers", ",".join(get_app_config("X_HEADERS"))),
         ("Access-Control-Allow-Credentials", "true"),
         ("Access-Control-Allow-Methods", methods),
     ]
@@ -359,10 +370,10 @@ def get_cors_headers(methods="*"):
 
 def abort(status: int, message: str) -> None:
     """Will return a json response with proper CORS headers."""
-    response = flask.make_response({"message": message}, status)
+    response = make_response({"message": message}, status)
     for key, val in get_cors_headers():
         response.headers[key] = val
-    flask.abort(response)
+    flask_abort(response)
 
 
 def get_list_chunks(items, chunk_size=100):
diff --git a/superdesk/validator.py b/superdesk/validator.py
index 3127c8c4ad..66db9a713b 100644
--- a/superdesk/validator.py
+++ b/superdesk/validator.py
@@ -16,13 +16,14 @@
 from bson.errors import InvalidId
 from cerberus import errors
 from eve.io.mongo import Validator
-from eve.utils import config
 from eve.auth import auth_field_and_value
-from flask import current_app as app
 from flask_babel import _
 from eve.validation import SingleErrorAsStringErrorHandler
 from werkzeug.datastructures import FileStorage
 
+from superdesk.core import get_app_config, get_current_app
+from superdesk.resource_fields import ID_FIELD
+
 
 ERROR_PATTERN = "pattern"
 ERROR_UNIQUE = "unique"
@@ -158,9 +159,9 @@ def _validate_unique(self, unique, field, value):
     def _set_id_query(self, query):
         if self.document_id:
             try:
-                query[config.ID_FIELD] = {"$ne": ObjectId(self.document_id)}
+                query[ID_FIELD] = {"$ne": ObjectId(self.document_id)}
             except InvalidId:
-                query[config.ID_FIELD] = {"$ne": self.document_id}
+                query[ID_FIELD] = {"$ne": self.document_id}
 
     def _validate_iunique(self, unique, field, value):
         """
@@ -249,7 +250,7 @@ def _validate_unique_template(self, unique, field, value):
         query["template_name"] = re.compile("^{}$".format(re.escape(template_name.strip())), re.IGNORECASE)
 
         if self.document_id:
-            id_field = config.DOMAIN[self.resource]["id_field"]
+            id_field = get_app_config("DOMAIN", {})[self.resource]["id_field"]
             query[id_field] = {"$ne": self.document_id}
 
         if superdesk.get_resource_service(self.resource).find_one(req=None, **query):
@@ -268,8 +269,8 @@ def _validate_username_pattern(self, enabled, field, value):
         """
         if (
             enabled
-            and app.config.get("USER_USERNAME_PATTERN")
-            and not re.match(app.config["USER_USERNAME_PATTERN"], value or "")
+            and get_app_config("USER_USERNAME_PATTERN")
+            and not re.match(get_app_config("USER_USERNAME_PATTERN"), value or "")
         ):
             self._error(field, ERROR_PATTERN)
 
@@ -299,14 +300,14 @@ def _validate_content_type_single_item_type(self, checked, field, value):
         {'type': 'boolean'}
         """
         if checked and value not in {"text", None}:
-            if app.data.find_one("content_types", req=None, type=value) is not None:
+            if get_current_app().data.find_one("content_types", req=None, type=value) is not None:
                 self._error(field, _("Only 1 instance is allowed."))
 
     def _validate_scope(self, checked, field, value):
         """
         {"type": "boolean"}
         """
-        registered = app.config.get("item_scope") or {}
+        registered = get_app_config("item_scope") or {}
         if checked and value not in registered:
             self._error(field, _("Unknown scope %(name)s", name=value))
 
diff --git a/superdesk/vocabularies/keywords.py b/superdesk/vocabularies/keywords.py
index 9d59ee5051..a64004de89 100644
--- a/superdesk/vocabularies/keywords.py
+++ b/superdesk/vocabularies/keywords.py
@@ -1,7 +1,7 @@
-from flask import current_app as app
+from superdesk.core import get_app_config
 from superdesk import get_resource_service
 
 
 def add_missing_keywords(sender, item, **kwargs):
-    if app.config.get("KEYWORDS_ADD_MISSING_ON_PUBLISH") and item.get("keywords"):
+    if get_app_config("KEYWORDS_ADD_MISSING_ON_PUBLISH") and item.get("keywords"):
         get_resource_service("vocabularies").add_missing_keywords(item["keywords"], item.get("language"))
diff --git a/superdesk/vocabularies/vocabularies.py b/superdesk/vocabularies/vocabularies.py
index b861ecc394..c88ad55bd4 100644
--- a/superdesk/vocabularies/vocabularies.py
+++ b/superdesk/vocabularies/vocabularies.py
@@ -13,12 +13,12 @@
 import json
 from typing import List, Any, Dict, Optional
 
-from flask import request, current_app as app
-from eve.utils import config
 from eve.methods.common import serialize_value
 from flask_babel import _, lazy_gettext
-from superdesk.cache import cache
 
+from superdesk.resource_fields import ID_FIELD, ITEMS, LAST_UPDATED, DATE_CREATED
+from superdesk.flask import request
+from superdesk.cache import cache
 from superdesk import privilege, get_resource_service
 from superdesk.notification import push_notification
 from superdesk.resource import Resource
@@ -227,10 +227,8 @@ def on_created(self, docs):
 
     def on_replace(self, document, original):
         self._validate_items(document)
-        document[app.config["LAST_UPDATED"]] = utcnow()
-        document[app.config["DATE_CREATED"]] = (
-            original.get(app.config["DATE_CREATED"], utcnow()) if original else utcnow()
-        )
+        document[LAST_UPDATED] = utcnow()
+        document[DATE_CREATED] = original.get(DATE_CREATED, utcnow()) if original else utcnow()
         logger.info("updating vocabulary item: %s", document["_id"])
 
     def on_fetched(self, doc):
@@ -244,7 +242,7 @@ def on_fetched(self, doc):
             if where_clause.get("type") == "manageable":
                 return doc
 
-        for item in doc[config.ITEMS]:
+        for item in doc[ITEMS]:
             self._filter_inactive_vocabularies(item)
             self._cast_items(item)
 
@@ -336,7 +334,7 @@ def _send_notification(self, updated_vocabulary, event="vocabularies:updated"):
         push_notification(
             event,
             vocabulary=updated_vocabulary.get("display_name"),
-            user=str(user[config.ID_FIELD]) if user else None,
+            user=str(user[ID_FIELD]) if user else None,
             vocabulary_id=updated_vocabulary["_id"],
         )
 
diff --git a/superdesk/websockets_comms.py b/superdesk/websockets_comms.py
index 7f1c5e48d0..278e457682 100644
--- a/superdesk/websockets_comms.py
+++ b/superdesk/websockets_comms.py
@@ -22,12 +22,12 @@
 from typing import Dict, Set, Optional, Union
 from superdesk.types import WebsocketMessageData, WebsocketMessageFilterConditions
 
-from flask import json
 from datetime import timedelta, datetime
 from threading import Thread
 from kombu import Queue, Exchange, Connection
 from kombu.mixins import ConsumerMixin
 from kombu.pools import producers
+from superdesk.core import json
 from superdesk.utc import utcnow
 from superdesk.utils import get_random_string, json_serialize_datetime_objectId
 from superdesk.default_settings import WS_HEART_BEAT
diff --git a/tests/archive/archive_test.py b/tests/archive/archive_test.py
index 77d3cdfa31..faebd0bbe0 100644
--- a/tests/archive/archive_test.py
+++ b/tests/archive/archive_test.py
@@ -17,6 +17,7 @@
 from unittest import mock
 
 import superdesk
+from superdesk.core import json
 from superdesk.metadata.utils import generate_guid, GUID_TAG
 import superdesk.signals as signals
 from superdesk.errors import SuperdeskApiError
@@ -38,7 +39,6 @@
 
 from werkzeug.datastructures import ImmutableMultiDict
 from eve.utils import ParsedRequest
-from flask import json
 
 NOW = utcnow()
 
diff --git a/tests/auth/auth_test.py b/tests/auth/auth_test.py
index 54ca0df73d..ceef7d48b7 100644
--- a/tests/auth/auth_test.py
+++ b/tests/auth/auth_test.py
@@ -1,8 +1,8 @@
 from unittest.mock import patch
 from bson import ObjectId
 from datetime import timedelta
-from flask import session
 
+from superdesk.flask import session
 from superdesk.utc import utcnow
 from superdesk.tests import TestCase
 from apps.auth.session_purge import RemoveExpiredSessions
diff --git a/tests/auth/saml_test.py b/tests/auth/saml_test.py
index 318f7a3372..ed669a923f 100644
--- a/tests/auth/saml_test.py
+++ b/tests/auth/saml_test.py
@@ -1,5 +1,4 @@
-import flask
-import unittest
+from superdesk.flask import session
 import superdesk.tests as tests
 import superdesk.auth.saml as saml
 
@@ -40,8 +39,8 @@ def setUp(self):
     @patch("superdesk.auth.saml.init_saml_auth")
     def test_create_missing_user(self, init_mock):
         with self.app.test_client() as c:
-            flask.session[saml.SESSION_NAME_ID] = "foo.bar@example.com"
-            flask.session[saml.SESSION_USERDATA_KEY] = SAML_DATA
+            session[saml.SESSION_NAME_ID] = "foo.bar@example.com"
+            session[saml.SESSION_USERDATA_KEY] = SAML_DATA
 
             resp = saml.index()
             self.assertIn(ERROR, resp)
@@ -76,9 +75,9 @@ def test_create_missing_user(self, init_mock):
     def test_create_missing_user_missing_userdata(self, init_mock):
         with self.app.test_client() as c:
             # with missing data it can't work
-            flask.session[saml.SESSION_NAME_ID] = "foo.bar@example.com"
-            flask.session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
-            flask.session[saml.SESSION_USERDATA_KEY].update(
+            session[saml.SESSION_NAME_ID] = "foo.bar@example.com"
+            session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
+            session[saml.SESSION_USERDATA_KEY].update(
                 {
                     "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/name": [],
                 }
@@ -91,8 +90,8 @@ def test_create_missing_user_missing_userdata(self, init_mock):
     def test_handle_saml_name_id_not_email(self, init_mock):
         with self.app.test_client() as c:
             # with missing data it can't work
-            flask.session[saml.SESSION_NAME_ID] = "something_weird_like_guid"
-            flask.session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
+            session[saml.SESSION_NAME_ID] = "something_weird_like_guid"
+            session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
             with patch.dict(self.app.config, {"USER_EXTERNAL_CREATE": True}):
                 resp = saml.index()
             self.assertNotIn(ERROR, resp)
@@ -101,8 +100,8 @@ def test_handle_saml_name_id_not_email(self, init_mock):
     def test_update_user_data_when_it_changes(self, init_mock):
         with self.app.test_client() as c:
             # with missing data it can't work
-            flask.session[saml.SESSION_NAME_ID] = "nameId"
-            flask.session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
+            session[saml.SESSION_NAME_ID] = "nameId"
+            session[saml.SESSION_USERDATA_KEY] = SAML_DATA.copy()
             with patch.dict(
                 self.app.config,
                 {
@@ -126,7 +125,7 @@ def test_update_user_data_when_it_changes(self, init_mock):
                 user,
             )
 
-            flask.session[saml.SESSION_USERDATA_KEY].update(
+            session[saml.SESSION_USERDATA_KEY].update(
                 {
                     "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/givenname": ["John"],
                     "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/surname": ["Doe"],
diff --git a/tests/backend_meta_test.py b/tests/backend_meta_test.py
index 9e74da4c42..882386608f 100644
--- a/tests/backend_meta_test.py
+++ b/tests/backend_meta_test.py
@@ -6,19 +6,19 @@
 
 
 class BackendMetaTestCase(unittest.TestCase):
-    @patch.object(backend_meta, "config", object)
-    def test_get_commit_href(self):
+    @patch.object(backend_meta, "get_app_config", return_value={})
+    def test_get_commit_href(self, *args):
         href = backend_meta.BackendMetaService.get_commit_href("superdesk", "xyz")
         self.assertEqual(href, "https://github.com/superdesk/superdesk/commit/xyz")
 
-    @patch.object(backend_meta.config, "REPO_OVERRIDE", {"superdesk": "superdesk-test"}, create=True)
-    def test_get_commit_href_override(self):
+    @patch.object(backend_meta, "get_app_config", return_value={"superdesk": "superdesk-test"})
+    def test_get_commit_href_override(self, *args):
         href = backend_meta.BackendMetaService.get_commit_href("superdesk", "xyz")
         self.assertEqual(href, "https://github.com/superdesk/superdesk-test/commit/xyz")
 
-    @patch.object(backend_meta, "pkg_version")
-    def test_get_package_version_dev(self, pkg_version):
-        pkg_version.return_value = "2.0.123.dev123+g01abcde"
+    @patch.object(backend_meta, "pkg_version", return_value="2.0.123.dev123+g01abcde")
+    @patch.object(backend_meta, "get_app_config", return_value={})
+    def test_get_package_version_dev(self, *args):
         version_data = backend_meta.BackendMetaService.get_package_version("superdesk-core")
         self.assertEqual(
             version_data,
@@ -45,10 +45,13 @@ def test_get_package_version_stable(self, pkg_version):
             },
         )
 
-    @patch.object(backend_meta.BackendMetaService, "find_dir")
-    def test_get_nodemod_version_rev(self, find_dir):
-        fixtures_path = Path(__file__).parent / "fixtures" / "backend_meta" / "dev"
-        find_dir.return_value = fixtures_path
+    @patch.object(
+        backend_meta.BackendMetaService,
+        "find_dir",
+        return_value=Path(__file__).parent / "fixtures" / "backend_meta" / "dev",
+    )
+    @patch.object(backend_meta, "get_app_config", return_value={})
+    def test_get_nodemod_version_rev(self, *args):
         backend_meta_service = backend_meta.BackendMetaService()
         version_data = backend_meta_service.get_nodemod_version("superdesk-core", repo="superdesk-client-core")
         self.assertEqual(
diff --git a/tests/backend_test.py b/tests/backend_test.py
index 6dba57d9f5..173cd0949c 100644
--- a/tests/backend_test.py
+++ b/tests/backend_test.py
@@ -10,6 +10,8 @@
 
 from datetime import timedelta
 from unittest.mock import patch, ANY
+
+from superdesk.resource_fields import DATE_CREATED, LAST_UPDATED, ETAG
 from superdesk.tests import TestCase
 from superdesk import get_backend
 from superdesk.utc import utcnow
@@ -25,7 +27,7 @@ def test_update_change_etag(self):
             doc_old = backend.find_one("ingest", None, _id=ids[0])
             backend.update("ingest", ids[0], updates, doc_old)
             doc_new = backend.find_one("ingest", None, _id=ids[0])
-            self.assertNotEqual(doc_old[self.app.config["ETAG"]], doc_new[self.app.config["ETAG"]])
+            self.assertNotEqual(doc_old[ETAG], doc_new[ETAG])
 
     def test_check_default_dates_on_create(self):
         backend = get_backend()
@@ -33,24 +35,24 @@ def test_check_default_dates_on_create(self):
         with self.app.app_context():
             ids = backend.create("ingest", [item])
             doc = backend.find_one("ingest", None, _id=ids[0])
-            self.assertIn(self.app.config["DATE_CREATED"], doc)
-            self.assertIn(self.app.config["LAST_UPDATED"], doc)
+            self.assertIn(DATE_CREATED, doc)
+            self.assertIn(LAST_UPDATED, doc)
 
     def test_check_default_dates_on_update(self):
         backend = get_backend()
         past = (utcnow() + timedelta(seconds=-2)).replace(microsecond=0)
-        item = {"name": "foo", self.app.config["DATE_CREATED"]: past, self.app.config["LAST_UPDATED"]: past}
+        item = {"name": "foo", DATE_CREATED: past, LAST_UPDATED: past}
         updates = {"name": "bar"}
         with self.app.app_context():
             ids = backend.create("ingest", [item])
             doc_old = backend.find_one("ingest", None, _id=ids[0])
             backend.update("ingest", ids[0], updates, doc_old)
             doc_new = backend.find_one("ingest", None, _id=ids[0])
-            date1 = doc_old[self.app.config["LAST_UPDATED"]]
-            date2 = doc_new[self.app.config["LAST_UPDATED"]]
+            date1 = doc_old[LAST_UPDATED]
+            date2 = doc_new[LAST_UPDATED]
             self.assertGreaterEqual(date2, date1)
-            date1 = doc_old[self.app.config["DATE_CREATED"]]
-            date2 = doc_new[self.app.config["DATE_CREATED"]]
+            date1 = doc_old[DATE_CREATED]
+            date2 = doc_new[DATE_CREATED]
             self.assertEqual(date1, date2)
 
     @patch("superdesk.eve_backend._push_notification")
diff --git a/tests/config_test.py b/tests/config_test.py
index eaaa7a5908..34cf836a21 100644
--- a/tests/config_test.py
+++ b/tests/config_test.py
@@ -1,7 +1,8 @@
 import os
-import flask
 import unittest
 
+from superdesk.flask import Config
+
 
 class TestConfig:
     X = True
@@ -9,7 +10,7 @@ class TestConfig:
 
 class ConfigTestCase(unittest.TestCase):
     def test_config_update(self):
-        config = flask.Config(os.path.abspath(os.path.dirname(__file__)))
+        config = Config(os.path.abspath(os.path.dirname(__file__)))
         config.update({"X": True})
         self.assertTrue(config.get("X"))
         config.update({"X": False})
diff --git a/tests/content_api_test.py b/tests/content_api_test.py
index 58073bb5dc..71a07384a9 100644
--- a/tests/content_api_test.py
+++ b/tests/content_api_test.py
@@ -5,8 +5,10 @@
 
 from bson import ObjectId
 from copy import copy
-from flask import json, request
 from datetime import timedelta
+
+from superdesk.core import json
+from superdesk.flask import request
 from superdesk.tests import TestCase
 from superdesk.utc import utcnow
 from content_api.publish import MONGO_PREFIX
diff --git a/tests/editor_utils_test.py b/tests/editor_utils_test.py
index 7bb2ddd418..48dd00f0cf 100644
--- a/tests/editor_utils_test.py
+++ b/tests/editor_utils_test.py
@@ -12,9 +12,10 @@
 import json
 import uuid
 import unittest
-import flask
 import lxml.etree
 import pathlib
+
+from superdesk.flask import Flask
 import superdesk.editor_utils as editor_utils
 
 from superdesk.editor_utils import Editor3Content
@@ -24,7 +25,7 @@ class Editor3TestCase(unittest.TestCase):
     maxDiff = None
 
     def setUp(self):
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
         self.app.app_context().push()
         super().setUp()
         if "EMBED_PRE_PROCESS" in self.app.config:
diff --git a/tests/emails/superdesk_message_test.py b/tests/emails/superdesk_message_test.py
index eee39e2374..6fd6028ca4 100644
--- a/tests/emails/superdesk_message_test.py
+++ b/tests/emails/superdesk_message_test.py
@@ -1,10 +1,9 @@
 import unittest
-
-import flask
 import flask_mail
 from email.parser import Parser
 from email.header import decode_header
 
+from superdesk.flask import Flask
 from superdesk.emails import SuperdeskMessage
 
 
@@ -12,7 +11,7 @@ class SuperdeskMessageTestCase(unittest.TestCase):
     subject = "темы для выделения выделения выделения"
 
     def test_unicode_subject(self):
-        app = flask.Flask(__name__)
+        app = Flask(__name__)
         flask_mail.Mail(app)
         with app.app_context():
             msg = SuperdeskMessage(self.subject, sender="root", body="test")
diff --git a/tests/http_proxy_tests.py b/tests/http_proxy_tests.py
index 7a4f713742..846b091b5e 100644
--- a/tests/http_proxy_tests.py
+++ b/tests/http_proxy_tests.py
@@ -4,8 +4,9 @@
 from urllib.parse import urlencode
 import requests
 import requests_mock
-from flask import json, url_for, Response as FlaskResponse
 
+from superdesk.core import json
+from superdesk.flask import url_for, Response as FlaskResponse
 from superdesk import __version__ as superdesk_version
 from superdesk.tests import TestCase, setup_auth_user
 from superdesk.http_proxy import HTTPProxy, register_http_proxy
@@ -70,7 +71,6 @@ def test_proxies_request_to_external_service(self, mock_request):
 
     def test_http_methods(self, mock_request):
         mock_request.request(requests_mock.ANY, requests_mock.ANY, status_code=200)
-        self.setupAuthUser()
         second_proxy = HTTPProxy(
             "second_proxy",
             internal_url="test/proxy2",
@@ -78,6 +78,7 @@ def test_http_methods(self, mock_request):
             http_methods=["GET", "DELETE"],
         )
         register_http_proxy(self.app, second_proxy)
+        self.setupAuthUser()
 
         # Test already registered proxy, allowing all methods
         self.assertEqual(self.client.options("/api/test/proxy", headers=self.headers).status_code, 200)
@@ -114,9 +115,9 @@ def test_passes_on_errors_from_external_service(self, mock_request):
     def test_supports_multiple_proxies(self, mock_request):
         mock_request.get("http://localhost:5001/api", status_code=200)
         mock_request.get("http://localhost:5025/api/v3", status_code=201)
-        self.setupAuthUser()
         third_proxy = HTTPProxy("third_proxy", internal_url="test/proxy3", external_url="http://localhost:5025/api/v3")
         register_http_proxy(self.app, third_proxy)
+        self.setupAuthUser()
 
         response = self.client.get("/api/test/proxy", headers=self.headers)
         self.assertEqual(response.status_code, 200)
diff --git a/tests/io/feed_parsers/newsml2_parser_test.py b/tests/io/feed_parsers/newsml2_parser_test.py
index 4fbc740a4f..01af6b50fe 100644
--- a/tests/io/feed_parsers/newsml2_parser_test.py
+++ b/tests/io/feed_parsers/newsml2_parser_test.py
@@ -11,16 +11,17 @@
 
 import os
 import unittest
-import flask
 
 from xml.etree import ElementTree
+
+from superdesk.flask import Flask
 from superdesk.io.feed_parsers.newsml_2_0 import NewsMLTwoFeedParser
 from superdesk.io.subjectcodes import init_app as init_subjects
 
 
 class BaseNewMLTwoTestCase(unittest.TestCase):
     def setUp(self):
-        app = flask.Flask(__name__)
+        app = Flask(__name__)
         app.api_prefix = "/api"
         init_subjects(app)
         dirname = os.path.dirname(os.path.realpath(__file__))
diff --git a/tests/io/feed_parsers/nitf_tests.py b/tests/io/feed_parsers/nitf_tests.py
index f5c0c40a24..2b69e121ea 100644
--- a/tests/io/feed_parsers/nitf_tests.py
+++ b/tests/io/feed_parsers/nitf_tests.py
@@ -10,9 +10,9 @@
 
 
 import os
-from superdesk import config
-from superdesk.tests import TestCase
 
+from superdesk.core import get_current_app
+from superdesk.tests import TestCase
 from superdesk.etree import etree
 from superdesk.io.feed_parsers.nitf import NITFFeedParser
 
@@ -233,7 +233,7 @@ class MappingTestCase(TestCase):
     }
 
     def setUp(self):
-        config.NITF_MAPPING = self.mapping
+        get_current_app().config["NITF_MAPPING"] = self.mapping
         dirname = os.path.dirname(os.path.realpath(__file__))
         fixture = os.path.normpath(os.path.join(dirname, "../fixtures", self.filename))
         provider = {"name": "Test"}
@@ -251,7 +251,7 @@ def test_update_and_hook(self):
         self.assertIn("TEST OK", subjects)
 
     def tearDown(self):
-        del config.NITF_MAPPING
+        get_current_app().config.pop("NITF_MAPPING", None)
 
 
 class HandleInvalidFieldsTestCase(NITFTestCase):
diff --git a/tests/io/update_ingest_tests.py b/tests/io/update_ingest_tests.py
index 8acc76f8be..dfd5292a98 100644
--- a/tests/io/update_ingest_tests.py
+++ b/tests/io/update_ingest_tests.py
@@ -18,6 +18,7 @@
 from eve.utils import ParsedRequest
 from nose.tools import assert_raises
 
+from superdesk.flask import g
 from superdesk import get_resource_service, etree
 from superdesk.utc import utcnow
 from superdesk.errors import SuperdeskApiError, ProviderError
@@ -36,7 +37,6 @@
     get_is_idle,
     ingest_item,
 )
-import flask
 
 reuters_guid = "tag_reuters.com_2014_newsml_KBN0FL0NM:10"
 
@@ -682,7 +682,7 @@ def test_edited_planning_item_is_not_update(self):
                 "all_day": True,
             },
         }
-        flask.g.user = {"_id": "current_user_id"}
+        g.user = {"_id": "current_user_id"}
 
         provider = {
             "_id": "asdnjsandkajsdnjkasnd",
@@ -734,7 +734,7 @@ def test_unpublished_event_is_not_update(self):
                 "all_day": True,
             },
         }
-        flask.g.user = {"_id": "current_user_id"}
+        g.user = {"_id": "current_user_id"}
 
         provider = {
             "_id": "asdnjsandkajsdnjkasnd",
diff --git a/tests/media/media_editor_test.py b/tests/media/media_editor_test.py
index 5182d429df..89c3566b63 100644
--- a/tests/media/media_editor_test.py
+++ b/tests/media/media_editor_test.py
@@ -10,6 +10,8 @@
 
 import mimetypes
 from datetime import datetime
+
+from superdesk.core import get_current_app
 from superdesk.tests import TestCase
 from superdesk import get_resource_service
 from superdesk.media.media_operations import process_file_from_stream
@@ -20,7 +22,6 @@
 from superdesk.metadata import utils
 from superdesk.upload import url_for_media
 from superdesk import filemeta
-from flask import current_app as app
 from PIL import Image
 import json
 
@@ -44,7 +45,7 @@ def setUp(self):
         with open(image_path, "rb") as f:
             _, content_type, file_metadata = process_file_from_stream(f, content_type=content_type)
             f.seek(0)
-            file_id = app.media.put(f, filename=self.filename, content_type=content_type, metadata=file_metadata)
+            file_id = self.app.media.put(f, filename=self.filename, content_type=content_type, metadata=file_metadata)
             filemeta.set_filemeta(self.item, file_metadata)
             f.seek(0)
             rendition_spec = get_renditions_spec()
@@ -84,7 +85,7 @@ def do_edit(self, edit, item=None):
 
     def image(self, item, rendition):
         media_id = item["renditions"][rendition]["media"]
-        media = app.media.get(media_id)
+        media = get_current_app().media.get(media_id)
         return Image.open(media)
 
 
diff --git a/tests/media/media_operations_test.py b/tests/media/media_operations_test.py
index f0f3715285..871a29ab5a 100644
--- a/tests/media/media_operations_test.py
+++ b/tests/media/media_operations_test.py
@@ -1,14 +1,14 @@
 import io
-import flask
 import unittest
 import requests_mock
 
+from superdesk.flask import Flask
 from superdesk.media.media_operations import download_file_from_url
 
 
 class MediaOperationsTestCase(unittest.TestCase):
     def test_download_file_from_url_relative(self):
-        app = flask.Flask(__name__)
+        app = Flask(__name__)
         app.config["SERVER_NAME"] = "localhost"
         body = io.BytesIO(b"data")
         with app.app_context():
diff --git a/tests/pagination_test.py b/tests/pagination_test.py
index 0c350be8d4..e15de0d6b0 100644
--- a/tests/pagination_test.py
+++ b/tests/pagination_test.py
@@ -1,4 +1,4 @@
-from flask import json
+from superdesk.core import json
 from superdesk.tests import TestCase, setup_auth_user
 
 
diff --git a/tests/prepopulate/app_initialization_test.py b/tests/prepopulate/app_initialization_test.py
index f23565a4d5..bb787704e5 100644
--- a/tests/prepopulate/app_initialization_test.py
+++ b/tests/prepopulate/app_initialization_test.py
@@ -1,9 +1,9 @@
 import os
 import shutil
 import tempfile
-from flask import json
 from unittest.mock import patch
 
+from superdesk.core import json
 from apps.prepopulate.app_initialize import AppInitializeWithDataCommand
 from apps.prepopulate.app_scaffold_data import AppScaffoldDataCommand
 from apps.prepopulate.app_initialize import fillEnvironmentVariables
diff --git a/tests/publish/get_queue_items_tests.py b/tests/publish/get_queue_items_tests.py
index 9f6ac673c7..412db35262 100644
--- a/tests/publish/get_queue_items_tests.py
+++ b/tests/publish/get_queue_items_tests.py
@@ -162,8 +162,9 @@ def test_enqueue_dict(self, *mocks):
         # because the tuple should be in (published_seq_num, formatted_item) format
         self.assertFalse(fake_post.called)
 
-    @mock.patch.object(publish_queue, "app")
-    def test_delete_encoded_item(self, fake_app):
+    @mock.patch.object(publish_queue, "get_current_app", return_value=MagicMock())
+    def test_delete_encoded_item(self, get_fake_app):
+        fake_app = get_fake_app()
         fake_storage = fake_app.storage
         fake_storage_delete = fake_storage.delete
         service = publish_queue.PublishQueueService(backend=MagicMock())
diff --git a/tests/publish/transmitters/http_push_transmitter_tests.py b/tests/publish/transmitters/http_push_transmitter_tests.py
index f995fc9b05..8178be18f9 100644
--- a/tests/publish/transmitters/http_push_transmitter_tests.py
+++ b/tests/publish/transmitters/http_push_transmitter_tests.py
@@ -13,10 +13,10 @@
 import os
 import hmac
 import json
-import flask
 import unittest
 import requests
 
+from superdesk.flask import Flask
 from superdesk.publish import SUBSCRIBER_TYPES
 from superdesk.publish.transmitters.http_push import HTTPPushService
 
@@ -107,7 +107,7 @@ def setUp(self):
         }
 
         self.destination = self.item.get("destination", {})
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
 
     def is_item_published(self, item_id):
         """Return True if the item was published, False otherwise.
@@ -199,11 +199,12 @@ def test_server_publish_error_thrown(self, fake_post, fake_notifiers):
             with self.app.app_context():
                 service._push_item(self.destination, json.dumps(self.item))
 
-    @mock.patch("superdesk.publish.transmitters.http_push.app")
+    @mock.patch("superdesk.publish.transmitters.http_push.get_current_app", return_value=mock.MagicMock())
+    @mock.patch("superdesk.publish.transmitters.http_push.get_app_config", return_value=(5, 30))
     @mock.patch("superdesk.publish.transmitters.http_push.requests.Session.send", return_value=CreatedResponse)
     @mock.patch("requests.get", return_value=NotFoundResponse)
-    def test_push_associated_assets(self, get_mock, send_mock, app_mock):
-        app_mock.config = {}
+    def test_push_associated_assets(self, get_mock, send_mock, get_config_mock, get_app_mock):
+        app_mock = get_app_mock()
         app_mock.media.get.return_value = TestMedia(b"bin")
 
         dest = {"config": {"assets_url": "http://example.com"}}
@@ -229,11 +230,12 @@ def test_push_associated_assets(self, get_mock, send_mock, app_mock):
         for media in images:
             get_mock.assert_any_call("http://example.com/%s" % media, timeout=(5, 30))
 
-    @mock.patch("superdesk.publish.transmitters.http_push.app")
+    @mock.patch("superdesk.publish.transmitters.http_push.get_current_app", return_value=mock.MagicMock())
+    @mock.patch("superdesk.publish.transmitters.http_push.get_app_config", return_value=(5, 30))
     @mock.patch("superdesk.publish.transmitters.http_push.requests.Session.send", return_value=CreatedResponse)
     @mock.patch("requests.get", return_value=NotFoundResponse)
-    def test_push_attachments(self, get_mock, send_mock, app_mock):
-        app_mock.config = {}
+    def test_push_attachments(self, get_mock, send_mock, get_config_mock, get_app_mock):
+        app_mock = get_app_mock()
         app_mock.media.get.return_value = TestMedia(b"bin")
 
         dest = {"config": {"assets_url": "http://example.com", "secret_token": "foo"}}
@@ -260,11 +262,11 @@ def test_push_attachments(self, get_mock, send_mock, app_mock):
             request.headers["x-superdesk-signature"], "sha1=%s" % hmac.new(b"foo", request.body, "sha1").hexdigest()
         )
 
-    @mock.patch("superdesk.publish.transmitters.http_push.app")
+    @mock.patch("superdesk.publish.transmitters.http_push.get_current_app", return_value=mock.MagicMock())
+    @mock.patch("superdesk.publish.transmitters.http_push.get_app_config", return_value=(5, 30))
     @mock.patch("superdesk.publish.transmitters.http_push.requests.Session.send", return_value=CreatedResponse)
     @mock.patch("requests.get", return_value=NotFoundResponse)
-    def test_push_binaries(self, get_mock, send_mock, app_mock):
-        app_mock.config = {}
+    def test_push_binaries(self, get_mock, send_mock, *args):
         media = TestMedia(b"content")
         dest = {"config": {"assets_url": "http://example.com", "secret_token": "foo"}}
         service = HTTPPushService()
diff --git a/tests/publish/transmitters/imatrics_test.py b/tests/publish/transmitters/imatrics_test.py
index 0a0ef58273..d63c7abb2e 100644
--- a/tests/publish/transmitters/imatrics_test.py
+++ b/tests/publish/transmitters/imatrics_test.py
@@ -1,5 +1,4 @@
 import pytz
-import flask
 import base64
 import unittest
 import responses
@@ -8,6 +7,9 @@
 from datetime import datetime, timedelta
 from unittest.mock import patch
 from tests.mock import resources
+
+from superdesk.core import json
+from superdesk.flask import Flask
 from superdesk.json_utils import SuperdeskJSONEncoder
 from superdesk.publish.formatters.imatrics import IMatricsFormatter
 from superdesk.publish.transmitters.imatrics import IMatricsTransmitter
@@ -15,7 +17,7 @@
 
 class IMatricsTransmitterTestCase(unittest.TestCase):
     def setUp(self):
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
         self.app.config.update(
             {
                 "IMATRICS_BASE_URL": "https://webdemo.imatrics.com/api/",
@@ -170,5 +172,5 @@ def test_publish_article(self):
                             "another one",
                         ],
                     },
-                    flask.json.loads(responses.calls[0].request.body.decode()),
+                    json.loads(responses.calls[0].request.body.decode()),
                 )
diff --git a/tests/push_notification/push_content_test.py b/tests/push_notification/push_content_test.py
index 0b5a546815..2c3b266cd8 100644
--- a/tests/push_notification/push_content_test.py
+++ b/tests/push_notification/push_content_test.py
@@ -1,12 +1,12 @@
-import flask
 import unittest.mock
 
+from superdesk.flask import Flask
 from apps.content import push_content_notification
 
 
 class PushContentNotificationTestCase(unittest.TestCase):
     def setUp(self):
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
         self.ctx = self.app.app_context()
         self.ctx.push()
 
diff --git a/tests/sentry_tests.py b/tests/sentry_tests.py
index 9114685973..c5b0cb8a05 100644
--- a/tests/sentry_tests.py
+++ b/tests/sentry_tests.py
@@ -1,11 +1,12 @@
-import flask
 import unittest
+
+from superdesk.flask import Flask
 from superdesk.factory.sentry import SuperdeskSentry
 
 
 class SentryTestCase(unittest.TestCase):
     def test_sentry_not_configured(self):
-        app = flask.Flask(__name__)
+        app = Flask(__name__)
         sentry = SuperdeskSentry(app)
         self.assertIsNone(sentry.captureMessage("test"))
         self.assertIsNone(sentry.captureException())
diff --git a/tests/storage/proxy_test.py b/tests/storage/proxy_test.py
index 9737fc30e5..3ab3fcb083 100644
--- a/tests/storage/proxy_test.py
+++ b/tests/storage/proxy_test.py
@@ -1,9 +1,9 @@
 import io
 import bson
-import flask
 import unittest
-
 from unittest.mock import create_autospec, patch
+
+from superdesk.flask import Flask
 from superdesk.storage.proxy import ProxyMediaStorage
 from superdesk.storage.desk_media_storage import SuperdeskGridFSMediaStorage
 from superdesk.storage.amazon_media_storage import AmazonMediaStorage
@@ -11,7 +11,7 @@
 
 class SuperdeskMediaStorageTestCase(unittest.TestCase):
     def setUp(self):
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
         self.app.config.update(
             dict(
                 AMAZON_ACCESS_KEY_ID="foo",
diff --git a/tests/subjectcodes_test.py b/tests/subjectcodes_test.py
index 72689eb80e..8d7e12cf01 100644
--- a/tests/subjectcodes_test.py
+++ b/tests/subjectcodes_test.py
@@ -1,13 +1,12 @@
-import flask
 import unittest
 
-
+from superdesk.flask import Flask
 from superdesk.io.subjectcodes import init_app
 
 
 class SubjectsTestCase(unittest.TestCase):
     def test_app_subjects(self):
-        app = flask.Flask(__name__)
+        app = Flask(__name__)
         app.api_prefix = "/test"
         init_app(app)
         with app.app_context():
diff --git a/tests/templates/filters_test.py b/tests/templates/filters_test.py
index d24600b7e6..401fa7c4ee 100644
--- a/tests/templates/filters_test.py
+++ b/tests/templates/filters_test.py
@@ -8,7 +8,7 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-from flask import render_template_string
+from superdesk.flask import render_template_string
 from superdesk.tests import TestCase
 import datetime
 
diff --git a/tests/templates/render_templates_test.py b/tests/templates/render_templates_test.py
index a76c281c1e..39c938b2c8 100644
--- a/tests/templates/render_templates_test.py
+++ b/tests/templates/render_templates_test.py
@@ -8,19 +8,18 @@
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
 
-import flask
 import unittest
-
 from unittest.mock import patch
 from datetime import datetime, timedelta
 
+from superdesk.flask import Flask
 from apps.templates.filters import format_datetime_filter
 from apps.templates.content_templates import get_item_from_template, render_content_template
 
 
 class RenderTemplateTestCase(unittest.TestCase):
     def setUp(self):
-        self.app = flask.Flask(__name__)
+        self.app = Flask(__name__)
         self.app.app_context().push()
         self.app.jinja_env.filters["format_datetime"] = format_datetime_filter
 
diff --git a/tests/text_checkers/spellcheckers/default_test.py b/tests/text_checkers/spellcheckers/default_test.py
index 3646ce81a6..c03b5a9cbd 100644
--- a/tests/text_checkers/spellcheckers/default_test.py
+++ b/tests/text_checkers/spellcheckers/default_test.py
@@ -10,8 +10,9 @@
 
 from functools import partial
 from unittest.mock import MagicMock, patch
-from flask import Flask
 from .utils import mock_dictionaries
+
+from superdesk.flask import Flask
 from superdesk.tests import TestCase
 from superdesk.text_checkers import tools
 from superdesk.text_checkers import spellcheckers
diff --git a/tests/text_checkers/spellcheckers/grammalecte_test.py b/tests/text_checkers/spellcheckers/grammalecte_test.py
index e5f8e3ad49..5cad84c326 100644
--- a/tests/text_checkers/spellcheckers/grammalecte_test.py
+++ b/tests/text_checkers/spellcheckers/grammalecte_test.py
@@ -10,7 +10,8 @@
 
 from urllib.parse import urljoin
 import responses
-from flask import Flask
+
+from superdesk.flask import Flask
 from superdesk.tests import TestCase
 from superdesk.text_checkers import tools
 from superdesk.text_checkers import spellcheckers
diff --git a/tests/text_checkers/spellcheckers/leuven_dutch_test.py b/tests/text_checkers/spellcheckers/leuven_dutch_test.py
index 6bddcaf4bd..52bf0b8ee3 100644
--- a/tests/text_checkers/spellcheckers/leuven_dutch_test.py
+++ b/tests/text_checkers/spellcheckers/leuven_dutch_test.py
@@ -12,7 +12,8 @@
 from unittest.mock import MagicMock, patch
 from .utils import mock_dictionaries
 import responses
-from flask import Flask
+
+from superdesk.flask import Flask
 from superdesk.tests import TestCase
 from superdesk.text_checkers import tools
 from superdesk.text_checkers.spellcheckers.base import registered_spellcheckers, SpellcheckerBase
diff --git a/tests/users/privileges_test.py b/tests/users/privileges_test.py
index f1d5e2bcf9..ea385a0825 100644
--- a/tests/users/privileges_test.py
+++ b/tests/users/privileges_test.py
@@ -7,8 +7,8 @@
 # For the full copyright and license information, please see the
 # AUTHORS and LICENSE files distributed with this source code, or
 # at https://www.sourcefabric.org/superdesk/license
-import flask
 
+from superdesk.flask import g
 from superdesk import get_backend
 from superdesk.tests import TestCase
 from superdesk.users.services import UsersService, compare_preferences
@@ -35,7 +35,7 @@ def test_user_has_merged_privileges(self):
 
     def test_user_with_privilege_can_change_his_role(self):
         with self.app.app_context():
-            flask.g.user = {"user_type": "administrator"}
+            g.user = {"user_type": "administrator"}
             ids = self.service.create([{"name": "user", "user_type": "administrator"}])
             doc_old = self.service.find_one(None, _id=ids[0])
             self.service.update(ids[0], {"role": "1"}, doc_old)
diff --git a/tests/video_edit/video_edit_test.py b/tests/video_edit/video_edit_test.py
index 5f75b2c88c..ef42f91bde 100644
--- a/tests/video_edit/video_edit_test.py
+++ b/tests/video_edit/video_edit_test.py
@@ -18,7 +18,7 @@
 from werkzeug.datastructures import FileStorage
 
 import superdesk
-from superdesk import config
+from superdesk.resource_fields import ID_FIELD
 from superdesk.errors import SuperdeskApiError
 from superdesk.tests import TestCase
 
@@ -77,13 +77,13 @@ def test_upload_video(self):
         )
 
     def test_missing_video_id(self):
-        doc = {"item": {config.ID_FIELD: "123", "renditions": {"original": {}}}}
+        doc = {"item": {ID_FIELD: "123", "renditions": {"original": {}}}}
         with self.assertRaises(SuperdeskApiError) as ex:
             self.video_edit.create([doc])
         self.assertEqual(ex.exception.message, '"video_editor_id" is required')
 
     def test_missing_action(self):
-        doc = {"item": {config.ID_FIELD: "123", "renditions": {"original": {"video_editor_id": "video_id"}}}}
+        doc = {"item": {ID_FIELD: "123", "renditions": {"original": {"video_editor_id": "video_id"}}}}
         with self.assertRaises(SuperdeskApiError) as ex:
             self.video_edit.create([doc])
         self.assertEqual(ex.exception.message, '"capture" or "edit" is required')
@@ -92,7 +92,7 @@ def test_edit_video(self):
         project_data = copy.deepcopy(self.project_data)
         doc = {
             "item": {
-                config.ID_FIELD: self.item[config.ID_FIELD],
+                ID_FIELD: self.item[ID_FIELD],
                 "renditions": self.item["renditions"],
             },
             "edit": {"crop": "0,0,200,500", "rotate": -90, "trim": "5,15"},
@@ -118,7 +118,7 @@ def test_edit_video(self):
     def test_capture_thumbnail(self):
         doc = {
             "item": {
-                config.ID_FIELD: self.item[config.ID_FIELD],
+                ID_FIELD: self.item[ID_FIELD],
                 "renditions": self.item["renditions"],
             },
             "capture": {"crop": "0,0,200,500", "rotate": -90, "trim": "5,10"},
@@ -173,5 +173,5 @@ def test_capture_timeline(self):
             req = Req()
             setattr(req, "args", {"action": "timeline"})
             res = self.video_edit.find_one(req, _id=self.item["_id"])
-            self.assertEqual(res[config.ID_FIELD], video_info["_id"])
+            self.assertEqual(res[ID_FIELD], video_info["_id"])
             self.assertTrue(res["processing"])