From dda6b0e514b00199638f31ddff06638e4c53c220 Mon Sep 17 00:00:00 2001 From: Chase Yakaboski Date: Tue, 8 Mar 2022 06:50:46 -0500 Subject: [PATCH] Saving work and chp_look_up should work under current build. --- chp_api/{apis => apis-bk}/__init__.py | 0 .../{apis => apis-bk}/chp_core/__init__.py | 0 chp_api/{apis => apis-bk}/chp_core/admin.py | 0 chp_api/{apis => apis-bk}/chp_core/apps.py | 0 chp_api/{apis => apis-bk}/chp_core/hosts | 0 .../chp_core/migrations/__init__.py | 0 chp_api/{apis => apis-bk}/chp_core/models.py | 0 .../{apis => apis-bk}/chp_core/serializers.py | 0 chp_api/{apis => apis-bk}/chp_core/tests.py | 0 chp_api/{apis => apis-bk}/chp_core/urls.py | 0 chp_api/{apis => apis-bk}/chp_core/views.py | 0 chp_api/chp_api/settings/base.py | 18 +- chp_api/chp_api/urls.py | 20 +- chp_api/chp_api/{views.py => views.py.bk} | 0 chp_api/dispatcher/__init__.py | 278 +---------------- chp_api/dispatcher/base.py | 279 ++++++++++++++++++ chp_api/dispatcher/serializers.py | 21 ++ chp_api/dispatcher/urls.py | 3 +- chp_api/dispatcher/views.py | 8 +- .../mixins/chp_core_query_processor_mixin.py | 0 .../trapi_query_processor.py | 0 unittests/test_chp_core_api.py | 3 +- 22 files changed, 323 insertions(+), 307 deletions(-) rename chp_api/{apis => apis-bk}/__init__.py (100%) rename chp_api/{apis => apis-bk}/chp_core/__init__.py (100%) rename chp_api/{apis => apis-bk}/chp_core/admin.py (100%) rename chp_api/{apis => apis-bk}/chp_core/apps.py (100%) rename chp_api/{apis => apis-bk}/chp_core/hosts (100%) rename chp_api/{apis => apis-bk}/chp_core/migrations/__init__.py (100%) rename chp_api/{apis => apis-bk}/chp_core/models.py (100%) rename chp_api/{apis => apis-bk}/chp_core/serializers.py (100%) rename chp_api/{apis => apis-bk}/chp_core/tests.py (100%) rename chp_api/{apis => apis-bk}/chp_core/urls.py (100%) rename chp_api/{apis => apis-bk}/chp_core/views.py (100%) rename chp_api/chp_api/{views.py => views.py.bk} (100%) create mode 100644 chp_api/dispatcher/base.py create mode 100644 chp_api/dispatcher/serializers.py rename chp_api/{utils => utils-bk}/mixins/chp_core_query_processor_mixin.py (100%) rename chp_api/{utils => utils-bk}/trapi_query_processor.py (100%) diff --git a/chp_api/apis/__init__.py b/chp_api/apis-bk/__init__.py similarity index 100% rename from chp_api/apis/__init__.py rename to chp_api/apis-bk/__init__.py diff --git a/chp_api/apis/chp_core/__init__.py b/chp_api/apis-bk/chp_core/__init__.py similarity index 100% rename from chp_api/apis/chp_core/__init__.py rename to chp_api/apis-bk/chp_core/__init__.py diff --git a/chp_api/apis/chp_core/admin.py b/chp_api/apis-bk/chp_core/admin.py similarity index 100% rename from chp_api/apis/chp_core/admin.py rename to chp_api/apis-bk/chp_core/admin.py diff --git a/chp_api/apis/chp_core/apps.py b/chp_api/apis-bk/chp_core/apps.py similarity index 100% rename from chp_api/apis/chp_core/apps.py rename to chp_api/apis-bk/chp_core/apps.py diff --git a/chp_api/apis/chp_core/hosts b/chp_api/apis-bk/chp_core/hosts similarity index 100% rename from chp_api/apis/chp_core/hosts rename to chp_api/apis-bk/chp_core/hosts diff --git a/chp_api/apis/chp_core/migrations/__init__.py b/chp_api/apis-bk/chp_core/migrations/__init__.py similarity index 100% rename from chp_api/apis/chp_core/migrations/__init__.py rename to chp_api/apis-bk/chp_core/migrations/__init__.py diff --git a/chp_api/apis/chp_core/models.py b/chp_api/apis-bk/chp_core/models.py similarity index 100% rename from chp_api/apis/chp_core/models.py rename to chp_api/apis-bk/chp_core/models.py diff --git a/chp_api/apis/chp_core/serializers.py b/chp_api/apis-bk/chp_core/serializers.py similarity index 100% rename from chp_api/apis/chp_core/serializers.py rename to chp_api/apis-bk/chp_core/serializers.py diff --git a/chp_api/apis/chp_core/tests.py b/chp_api/apis-bk/chp_core/tests.py similarity index 100% rename from chp_api/apis/chp_core/tests.py rename to chp_api/apis-bk/chp_core/tests.py diff --git a/chp_api/apis/chp_core/urls.py b/chp_api/apis-bk/chp_core/urls.py similarity index 100% rename from chp_api/apis/chp_core/urls.py rename to chp_api/apis-bk/chp_core/urls.py diff --git a/chp_api/apis/chp_core/views.py b/chp_api/apis-bk/chp_core/views.py similarity index 100% rename from chp_api/apis/chp_core/views.py rename to chp_api/apis-bk/chp_core/views.py diff --git a/chp_api/chp_api/settings/base.py b/chp_api/chp_api/settings/base.py index c7fff17..71d3e5d 100644 --- a/chp_api/chp_api/settings/base.py +++ b/chp_api/chp_api/settings/base.py @@ -13,7 +13,7 @@ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) DATA_UPLOAD_MAX_MEMORY_SIZE = None @@ -26,7 +26,7 @@ # Application definition -INSTALLED_APPS = [ +INSTALLED_BASE_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', @@ -34,14 +34,20 @@ 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', - 'chp_look_up', - 'chp', + 'dispatcher', + 'chp_utils', #'chp.app.apps.ChpApiConfig', #'utils', - #'django_hosts', - + #'django_hosts' ] +INSTALLED_CHP_APPS = [ + 'chp_look_up', + #'chp', + ] + +INSTALLED_APPS = INSTALLED_BASE_APPS + INSTALLED_CHP_APPS + MIDDLEWARE = [ #'django_hosts.middleware.HostsRequestMiddleware', #'django_hosts.middleware.HostsResponseMiddleware', diff --git a/chp_api/chp_api/urls.py b/chp_api/chp_api/urls.py index 4835a81..148c33e 100644 --- a/chp_api/chp_api/urls.py +++ b/chp_api/chp_api/urls.py @@ -17,22 +17,8 @@ from django.urls import path from rest_framework.urlpatterns import format_suffix_patterns -from . import views +from dispatcher import views urlpatterns = [ - path('query/', views.query.as_view()), - path('query', views.query.as_view()), - path('meta_knowledge_graph/', views.meta_knowledge_graph.as_view()), - path('curies/', views.curies.as_view()), - path('versions/', views.versions.as_view()), - path('v1.1/query/', views.query.as_view(trapi_version='1.1')), - path('v1.1/meta_knowledge_graph/', views.meta_knowledge_graph.as_view()), - path('v1.1/curies/', views.curies.as_view(trapi_version='1.1')), - path('v1.1/versions/', views.versions.as_view(trapi_version='1.1')), - path('v1.2/query/', views.query.as_view(trapi_version='1.2')), - path('v1.2/meta_knowledge_graph/', views.meta_knowledge_graph.as_view(trapi_version='1.2')), - path('v1.2/curies/', views.curies.as_view(trapi_version='1.2')), - path('v1.2/versions/', views.versions.as_view(trapi_version='1.2')), - path('transactions/', views.TransactionList.as_view(), name='transaction-list'), - path('transactions//', views.TransactionDetail.as_view(), name='transactions-detail') -] \ No newline at end of file + path('', include('dispatcher.urls')), + ] diff --git a/chp_api/chp_api/views.py b/chp_api/chp_api/views.py.bk similarity index 100% rename from chp_api/chp_api/views.py rename to chp_api/chp_api/views.py.bk diff --git a/chp_api/dispatcher/__init__.py b/chp_api/dispatcher/__init__.py index f812bea..10438f0 100644 --- a/chp_api/dispatcher/__init__.py +++ b/chp_api/dispatcher/__init__.py @@ -1,277 +1 @@ -import logging -from copy import deepcopy -from re import A -from django.http import JsonResponse -from importlib import import_module -from collections import defaultdict -import time -from chp_utils.trapi_query_processor import BaseQueryProcessor -from chp_utils.curie_database import merge_curies_databases -from trapi_model.meta_knowledge_graph import merge_meta_knowledge_graphs -from trapi_model.query import Query -from django.apps import apps -from trapi_model.biolink import TOOLKIT - -# Setup logging -logging.addLevelName(25, "NOTE") -# Add a special logging function -def note(self, message, *args, **kwargs): - self._log(25, message, args, kwargs) -logging.Logger.note = note -logger = logging.getLogger(__name__) - -# Installed CHP Apps -CHP_APPS = [ - "chp.app", - "chp_look_up.app", - ] - -# Import CHP Apps -APPS = [import_module(app) for app in CHP_APPS] - -class Dispatcher(BaseQueryProcessor): - def __init__(self, request, trapi_version): - """ Base API Query Processor class used to abstract the processing infrastructure from - the views. Inherits from the CHP Utilities Trapi Query Processor which handles - node normalization, curie ontology expansion, and semantic operations. - - :param request: Incoming POST request with a TRAPI message. - :type request: requests.request - """ - self.request_data = deepcopy(request.data) - - #self.chp_config, self.passed_subdomain = self.get_app_config(request) - self.trapi_version = trapi_version - super().__init__(None) - - def get_curies(self): - curies_dbs = [] - for app in APPS: - get_app_curies_fn = getattr(app, 'get_curies') - curies_dbs.append(get_app_curies_fn()) - return merge_curies_databases(curies_dbs) - - def get_meta_knowledge_graph(self): - meta_kgs = [] - for app in APPS: - get_app_meta_kg_fn = getattr(app, 'get_meta_knowledge_graph') - meta_kgs.append(get_app_meta_kg_fn()) - return merge_meta_knowledge_graphs(meta_kgs) - - def process_invalid_trapi(self, request): - invalid_query_json = request.data - invalid_query_json['status'] = 'Bad TRAPI.' - return JsonResponse(invalid_query_json, status=400) - - def process_invalid_workflow(self, request, status_msg): - invalid_query_json = request.data - invalid_query_json['status'] = status_msg - return JsonResponse(invalid_query_json, status=400) - - def process_request(self, request, trapi_version): - """ Helper function that extracts the query from the message. - """ - logger.info('Starting query.') - query = Query.load( - self.trapi_version, - biolink_version=None, - query=request.data - ) - - # Setup query in Base Processor - self.setup_query(query) - - logger.info('Query loaded') - - return query - - def get_app_configs(self, query): - """ Should get a base app configuration for your app or nothing. - """ - app_configs = [] - for app in APPS: - get_app_config_fn = getattr(app, 'get_app_config') - app_configs.append(get_app_config_fn(query)) - return app_configs - - def get_trapi_interfaces(self, app_configs): - """ Should load a base interface of your app. - """ - if len(app_configs) != len(APPS): - raise ValueError('You should be loading base configs (if any) so at this point there should be one config per app (or just the app).') - base_interfaces = [] - for app, app_config in zip(APPS, app_configs): - get_trapi_interface_fn = getattr(app, 'get_trapi_interface') - base_interfaces.append(get_trapi_interface_fn(app_config)) - return base_interfaces - - def collect_app_queries(self, queries_list_of_lists): - all_queries = [] - for queries in queries_list_of_lists: - if type(queries) == list: - all_queries.extend(queries) - else: - all_queries.append(queries) - return all_queries - - def get_response(self, query): - """ Main function of the processor that handles primary logic for obtaining - a cached or calculated query response. - """ - query_copy = query.get_copy() - start_time = time.time() - logger.info('Running query.') - - base_app_configs = self.get_app_configs(query_copy) - base_interfaces = self.get_trapi_interfaces(base_app_configs) - - # Expand - expand_queries = self.expand_batch_query(query) - - # For each app run the normalization and semops pipline - - # Make a copy of the expanded queries for each app - app_queries = [expand_queries for _ in range(len(base_interfaces))] - consistent_app_queries = [] - inconsistent_app_queries = [] - app_normalization_maps = [] - for interface, _expand_queries in zip(base_interfaces, app_queries): - _ex_copy = [] - # Normalize to Preferred Curies - normalization_time = time.time() - normalize_queries, normalization_map = self.normalize_to_preferred( - _expand_queries, - meta_knowledge_graph=interface.get_meta_knowledge_graph(), - with_normalization_map=True, - ) - app_normalization_maps.append(normalization_map) - logger.info('Normalizaion time: {} seconds.'.format(time.time() - normalization_time)) - # Conflate - conflation_time = time.time() - - conflate_queries = self.conflate_categories( - normalize_queries, - conflation_map=interface.get_conflation_map(), - ) - logger.info('Conflation time: {} seconds.'.format(time.time() - conflation_time)) - # Onto Expand - onto_time = time.time() - onto_queries = self.expand_supported_ontological_descendants( - conflate_queries, - curies_database=interface.get_curies(), - ) - logger.info('Ontological expansion time: {} seconds.'.format(time.time() - onto_time)) - # Semantic Ops Expand - semops_time = time.time() - semops_queries = self.expand_with_semantic_ops( - onto_queries, - meta_knowledge_graph=interface.get_meta_knowledge_graph(), - ) - logger.info('Sem ops time: {} seconds.'.format(time.time() - semops_time)) - # Filter out inconsistent queries - filter_time = time.time() - consistent_queries, inconsistent_queries = self.filter_queries_inconsistent_with_meta_knowledge_graph( - semops_queries, - meta_knowledge_graph=interface.get_meta_knowledge_graph(), - with_inconsistent_queries=True - ) - logger.info('Consistency filter time: {} seconds.'.format(time.time() - filter_time)) - - logger.info('Number of consistent queries derived from passed query: {}.'.format(len(consistent_queries))) - consistent_app_queries.append(consistent_queries) - inconsistent_app_queries.append(inconsistent_queries) - # Ensure that there are actually consistent queries that have been extracted - if sum([len(_qs) for _qs in consistent_app_queries]) == 0: - # Add all logs from inconsistent queries of all apps - all_inconsistent_queries = self.collect_app_queries(inconsistent_queries) - query_copy = self.add_logs_from_query_list(query_copy, all_inconsistent_queries) - query_copy.set_status('Bad request. See description.') - query_copy.set_description('Could not extract any supported queries from query graph.') - self.add_transaction(query_copy) - return JsonResponse(query_copy.to_dict()) - # Collect responses from each CHP app - app_responses = [] - app_logs = [] - app_status = [] - app_descriptions = [] - for app, consistent_queries in zip(APPS, consistent_app_queries): - get_app_response_fn = getattr(app, 'get_response') - responses, logs, status, description = get_app_response_fn(consistent_queries) - app_responses.extend(responses) - app_logs.extend(logs) - app_status.append(status) - app_descriptions.append(description) - # Check if any responses came back from any apps - if len(app_responses) == 0: - # Add logs from consistent queries of all apps - all_consistent_queries = self.collect_app_queries(consistent_app_queries) - query_copy = self.add_logs_from_query_list(query_copy, all_consistent_queries) - # Add app level logs - query_copy.logger.add_logs(app_logs) - query_copy.set_status('No results.') - self.add_transaction(query_copy) - return JsonResponse(query_copy.to_dict()) - - # Add responses into database - self.add_transactions(app_responses) - - # Construct merged response - response = self.merge_responses(query_copy, app_responses) - - # Now merge all app level log messages from each app - response.logger.add_logs(app_logs) - - # Log any error messages for apps - for app_name, status, description in zip(CHP_APPS, app_status, app_descriptions): - if status != 'Success': - response.warning('CHP App: {} reported a unsuccessful status: {} with description: {}'.format( - app_name, status, description) - ) - - # Unnormalize with each apps normalization map - unnormalized_response = response - for normalization_map in app_normalization_maps: - unnormalized_response = self.undo_normalization(unnormalized_response, normalization_map) - - logger.info('Constructed TRAPI response.') - - logger.info('Responded in {} seconds'.format(time.time() - start_time)) - unnormalized_response.set_status('Success') - - # Add workflow - unnormalized_response.add_workflow("lookup") - - # Set the used biolink version - unnormalized_response.biolink_version = TOOLKIT.get_model_version() - - # Add response to database - self.add_transaction(unnormalized_response) - - return JsonResponse(unnormalized_response.to_dict()) - - def add_logs_from_query_list(self, target_query, query_list): - for query in query_list: - target_query.logger.add_logs(query.logger.to_dict()) - return target_query - - def add_transaction(self, response): - # Save the transaction - transaction = Transaction( - id = response.id, - status = response.status, - query = response.to_dict(), - chp_version = chp.__version__, - chp_data_version = chp_data.__version__, - pybkb_version = pybkb.__version__, - chp_client_version = chp_client.__version__, - chp_utils_version = chp_utils.__version__, - ) - transaction.save() - - def add_transactions(self, responses): - for response in responses: - self.add_transaction(response) - - def add_transaction(self, response): - pass - +#import trapi_model diff --git a/chp_api/dispatcher/base.py b/chp_api/dispatcher/base.py new file mode 100644 index 0000000..9d28c3d --- /dev/null +++ b/chp_api/dispatcher/base.py @@ -0,0 +1,279 @@ +import logging +import time +from copy import deepcopy +from re import A +from django.http import JsonResponse +from django.apps import apps +from django.conf import settings +from importlib import import_module +from collections import defaultdict + +from chp_utils.trapi_query_processor import BaseQueryProcessor +from chp_utils.curie_database import merge_curies_databases +from trapi_model.meta_knowledge_graph import merge_meta_knowledge_graphs +from trapi_model.query import Query +from trapi_model.biolink import TOOLKIT + +# Setup logging +logging.addLevelName(25, "NOTE") +# Add a special logging function +def note(self, message, *args, **kwargs): + self._log(25, message, args, kwargs) +logging.Logger.note = note +logger = logging.getLogger(__name__) + +# Installed CHP Apps +#CHP_APPS = [ +# "chp.app", +# "chp_look_up.app", +# ] + +# Import CHP Apps +APPS = [import_module(app+'.app') for app in settings.INSTALLED_CHP_APPS] + +class Dispatcher(BaseQueryProcessor): + def __init__(self, request, trapi_version): + """ Base API Query Processor class used to abstract the processing infrastructure from + the views. Inherits from the CHP Utilities Trapi Query Processor which handles + node normalization, curie ontology expansion, and semantic operations. + + :param request: Incoming POST request with a TRAPI message. + :type request: requests.request + """ + self.request_data = deepcopy(request.data) + + #self.chp_config, self.passed_subdomain = self.get_app_config(request) + self.trapi_version = trapi_version + super().__init__(None) + + def get_curies(self): + curies_dbs = [] + for app in APPS: + get_app_curies_fn = getattr(app, 'get_curies') + curies_dbs.append(get_app_curies_fn()) + return merge_curies_databases(curies_dbs) + + def get_meta_knowledge_graph(self): + meta_kgs = [] + for app in APPS: + get_app_meta_kg_fn = getattr(app, 'get_meta_knowledge_graph') + meta_kgs.append(get_app_meta_kg_fn()) + return merge_meta_knowledge_graphs(meta_kgs) + + def process_invalid_trapi(self, request): + invalid_query_json = request.data + invalid_query_json['status'] = 'Bad TRAPI.' + return JsonResponse(invalid_query_json, status=400) + + def process_invalid_workflow(self, request, status_msg): + invalid_query_json = request.data + invalid_query_json['status'] = status_msg + return JsonResponse(invalid_query_json, status=400) + + def process_request(self, request, trapi_version): + """ Helper function that extracts the query from the message. + """ + logger.info('Starting query.') + query = Query.load( + self.trapi_version, + biolink_version=None, + query=request.data + ) + + # Setup query in Base Processor + self.setup_query(query) + + logger.info('Query loaded') + + return query + + def get_app_configs(self, query): + """ Should get a base app configuration for your app or nothing. + """ + app_configs = [] + for app in APPS: + get_app_config_fn = getattr(app, 'get_app_config') + app_configs.append(get_app_config_fn(query)) + return app_configs + + def get_trapi_interfaces(self, app_configs): + """ Should load a base interface of your app. + """ + if len(app_configs) != len(APPS): + raise ValueError('You should be loading base configs (if any) so at this point there should be one config per app (or just the app).') + base_interfaces = [] + for app, app_config in zip(APPS, app_configs): + get_trapi_interface_fn = getattr(app, 'get_trapi_interface') + base_interfaces.append(get_trapi_interface_fn(app_config)) + return base_interfaces + + def collect_app_queries(self, queries_list_of_lists): + all_queries = [] + for queries in queries_list_of_lists: + if type(queries) == list: + all_queries.extend(queries) + else: + all_queries.append(queries) + return all_queries + + def get_response(self, query): + """ Main function of the processor that handles primary logic for obtaining + a cached or calculated query response. + """ + query_copy = query.get_copy() + start_time = time.time() + logger.info('Running query.') + + base_app_configs = self.get_app_configs(query_copy) + base_interfaces = self.get_trapi_interfaces(base_app_configs) + + # Expand + expand_queries = self.expand_batch_query(query) + + # For each app run the normalization and semops pipline + + # Make a copy of the expanded queries for each app + app_queries = [expand_queries for _ in range(len(base_interfaces))] + consistent_app_queries = [] + inconsistent_app_queries = [] + app_normalization_maps = [] + for interface, _expand_queries in zip(base_interfaces, app_queries): + _ex_copy = [] + # Normalize to Preferred Curies + normalization_time = time.time() + normalize_queries, normalization_map = self.normalize_to_preferred( + _expand_queries, + meta_knowledge_graph=interface.get_meta_knowledge_graph(), + with_normalization_map=True, + ) + app_normalization_maps.append(normalization_map) + logger.info('Normalizaion time: {} seconds.'.format(time.time() - normalization_time)) + # Conflate + conflation_time = time.time() + + conflate_queries = self.conflate_categories( + normalize_queries, + conflation_map=interface.get_conflation_map(), + ) + logger.info('Conflation time: {} seconds.'.format(time.time() - conflation_time)) + # Onto Expand + onto_time = time.time() + onto_queries = self.expand_supported_ontological_descendants( + conflate_queries, + curies_database=interface.get_curies(), + ) + logger.info('Ontological expansion time: {} seconds.'.format(time.time() - onto_time)) + # Semantic Ops Expand + semops_time = time.time() + semops_queries = self.expand_with_semantic_ops( + onto_queries, + meta_knowledge_graph=interface.get_meta_knowledge_graph(), + ) + logger.info('Sem ops time: {} seconds.'.format(time.time() - semops_time)) + # Filter out inconsistent queries + filter_time = time.time() + consistent_queries, inconsistent_queries = self.filter_queries_inconsistent_with_meta_knowledge_graph( + semops_queries, + meta_knowledge_graph=interface.get_meta_knowledge_graph(), + with_inconsistent_queries=True + ) + logger.info('Consistency filter time: {} seconds.'.format(time.time() - filter_time)) + + logger.info('Number of consistent queries derived from passed query: {}.'.format(len(consistent_queries))) + consistent_app_queries.append(consistent_queries) + inconsistent_app_queries.append(inconsistent_queries) + # Ensure that there are actually consistent queries that have been extracted + if sum([len(_qs) for _qs in consistent_app_queries]) == 0: + # Add all logs from inconsistent queries of all apps + all_inconsistent_queries = self.collect_app_queries(inconsistent_queries) + query_copy = self.add_logs_from_query_list(query_copy, all_inconsistent_queries) + query_copy.set_status('Bad request. See description.') + query_copy.set_description('Could not extract any supported queries from query graph.') + self.add_transaction(query_copy) + return JsonResponse(query_copy.to_dict()) + # Collect responses from each CHP app + app_responses = [] + app_logs = [] + app_status = [] + app_descriptions = [] + for app, consistent_queries in zip(APPS, consistent_app_queries): + get_app_response_fn = getattr(app, 'get_response') + responses, logs, status, description = get_app_response_fn(consistent_queries) + app_responses.extend(responses) + app_logs.extend(logs) + app_status.append(status) + app_descriptions.append(description) + # Check if any responses came back from any apps + if len(app_responses) == 0: + # Add logs from consistent queries of all apps + all_consistent_queries = self.collect_app_queries(consistent_app_queries) + query_copy = self.add_logs_from_query_list(query_copy, all_consistent_queries) + # Add app level logs + query_copy.logger.add_logs(app_logs) + query_copy.set_status('No results.') + self.add_transaction(query_copy) + return JsonResponse(query_copy.to_dict()) + + # Add responses into database + self.add_transactions(app_responses) + + # Construct merged response + response = self.merge_responses(query_copy, app_responses) + + # Now merge all app level log messages from each app + response.logger.add_logs(app_logs) + + # Log any error messages for apps + for app_name, status, description in zip(CHP_APPS, app_status, app_descriptions): + if status != 'Success': + response.warning('CHP App: {} reported a unsuccessful status: {} with description: {}'.format( + app_name, status, description) + ) + + # Unnormalize with each apps normalization map + unnormalized_response = response + for normalization_map in app_normalization_maps: + unnormalized_response = self.undo_normalization(unnormalized_response, normalization_map) + + logger.info('Constructed TRAPI response.') + + logger.info('Responded in {} seconds'.format(time.time() - start_time)) + unnormalized_response.set_status('Success') + + # Add workflow + unnormalized_response.add_workflow("lookup") + + # Set the used biolink version + unnormalized_response.biolink_version = TOOLKIT.get_model_version() + + # Add response to database + self.add_transaction(unnormalized_response) + + return JsonResponse(unnormalized_response.to_dict()) + + def add_logs_from_query_list(self, target_query, query_list): + for query in query_list: + target_query.logger.add_logs(query.logger.to_dict()) + return target_query + + def add_transaction(self, response): + # Save the transaction + transaction = Transaction( + id = response.id, + status = response.status, + query = response.to_dict(), + chp_version = chp.__version__, + chp_data_version = chp_data.__version__, + pybkb_version = pybkb.__version__, + chp_client_version = chp_client.__version__, + chp_utils_version = chp_utils.__version__, + ) + transaction.save() + + def add_transactions(self, responses): + for response in responses: + self.add_transaction(response) + + def add_transaction(self, response): + pass + diff --git a/chp_api/dispatcher/serializers.py b/chp_api/dispatcher/serializers.py new file mode 100644 index 0000000..bfdfd3a --- /dev/null +++ b/chp_api/dispatcher/serializers.py @@ -0,0 +1,21 @@ +from rest_framework import serializers +from .models import Transaction + +class TransactionListSerializer(serializers.ModelSerializer): + class Meta: + model = Transaction + fields = [ + 'id', + 'date_time', + 'status', + ] + +class TransactionDetailSerializer(serializers.ModelSerializer): + class Meta: + model = Transaction + fields = [ + 'id', + 'date_time', + 'query', + 'status', + ] diff --git a/chp_api/dispatcher/urls.py b/chp_api/dispatcher/urls.py index a591dc3..5ca1ed4 100644 --- a/chp_api/dispatcher/urls.py +++ b/chp_api/dispatcher/urls.py @@ -24,7 +24,6 @@ path('query', views.query.as_view()), path('meta_knowledge_graph/', views.meta_knowledge_graph.as_view()), path('curies/', views.curies.as_view()), - path('curies', views.curies.as_view()), path('versions/', views.versions.as_view()), path('v1.1/query/', views.query.as_view(trapi_version='1.1')), path('v1.1/meta_knowledge_graph/', views.meta_knowledge_graph.as_view()), @@ -36,4 +35,4 @@ path('v1.2/versions/', views.versions.as_view(trapi_version='1.2')), path('transactions/', views.TransactionList.as_view(), name='transaction-list'), path('transactions//', views.TransactionDetail.as_view(), name='transactions-detail') -] \ No newline at end of file +] diff --git a/chp_api/dispatcher/views.py b/chp_api/dispatcher/views.py index fb6cf0a..593776b 100644 --- a/chp_api/dispatcher/views.py +++ b/chp_api/dispatcher/views.py @@ -3,8 +3,9 @@ from jsonschema import ValidationError from copy import deepcopy -from apis.chp_core.models import Transaction -from apis.chp_core.serializers import TransactionListSerializer, TransactionDetailSerializer +from .base import Dispatcher +from .models import Transaction +from .serializers import TransactionListSerializer, TransactionDetailSerializer from django.http import HttpResponse, JsonResponse from django.shortcuts import get_object_or_404 @@ -14,7 +15,6 @@ from rest_framework import mixins from rest_framework import generics -from dispatcher import Dispatcher class query(APIView): trapi_version = '1.2' @@ -63,7 +63,7 @@ def get(self, request): if request.method == 'GET': # Initialize Dispatcher dispatcher = Dispatcher(request, self.trapi_version) - + # Get merged meta KG meta_knowledge_graph = dispatcher.get_meta_knowledge_graph() return JsonResponse(meta_knowledge_graph.to_dict()) diff --git a/chp_api/utils/mixins/chp_core_query_processor_mixin.py b/chp_api/utils-bk/mixins/chp_core_query_processor_mixin.py similarity index 100% rename from chp_api/utils/mixins/chp_core_query_processor_mixin.py rename to chp_api/utils-bk/mixins/chp_core_query_processor_mixin.py diff --git a/chp_api/utils/trapi_query_processor.py b/chp_api/utils-bk/trapi_query_processor.py similarity index 100% rename from chp_api/utils/trapi_query_processor.py rename to chp_api/utils-bk/trapi_query_processor.py diff --git a/unittests/test_chp_core_api.py b/unittests/test_chp_core_api.py index 632e0ab..7dec28f 100644 --- a/unittests/test_chp_core_api.py +++ b/unittests/test_chp_core_api.py @@ -5,7 +5,7 @@ import requests from trapi_model import * -LOCAL_URL = 'http://breast.localhost:8000' +LOCAL_URL = 'http://localhost:8000' #LOCAL_URL = 'http://localhost:80' #LOCAL_URL = 'http://chp-dev.thayer.dartmouth.edu' @@ -20,6 +20,7 @@ def setUp(self): def _get(url, params=None): params = params or {} res = requests.get(url, json=params) + print(res.content) ret = res.json() return ret