diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..5a8565682 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Let us know about something that isn't working right +title: '' +labels: bug +assignees: '' + +--- + +### What went wrong? + +Describe what happened. + +### Expected behavior + +What did you expect to happen? + +### Screenshots + +If applicable, please add a screenshot of the problem! + +### Which version? + +Please specify where you encountered the issue: + +- [ ] https://ianalyzer.hum.uu.nl +- [ ] https://peopleandparliament.hum.uu.nl +- [ ] https://peace.sites.uu.nl/ +- [ ] a server hosted elsewhere (i.e. not by the research software lab) +- [ ] a local server + +If this happened on local or third-party server, it helps if you can be more specific about the version. Please include the version number (e.g. "3.2.4") or a commit hash if you know it! + +### To reproduce + +How can a developer replicate the issue? Please provide any information you can. For example: "I went to https://ianalyzer.hum.uu.nl/search/troonredes?date=1814-01-01:1972-01-01 and then clicked on *Download CSV*. I pressed *cancel* and then I clicked *Download CSV* again." diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..042278843 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for something new +title: '' +labels: enhancement +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/backend/addcorpus/es_mappings.py b/backend/addcorpus/es_mappings.py index a2f58418f..54bddbaa0 100644 --- a/backend/addcorpus/es_mappings.py +++ b/backend/addcorpus/es_mappings.py @@ -1,4 +1,4 @@ -def main_content_mapping(token_counts=True, stopword_analysis=False, stemming_analysis=False, updated_highlighting=False): +def main_content_mapping(token_counts=True, stopword_analysis=False, stemming_analysis=False, updated_highlighting=True): ''' Mapping for the main content field. Options: diff --git a/backend/api/tests/test_api_views.py b/backend/api/tests/test_api_views.py index 4cfb5204e..e207dedf5 100644 --- a/backend/api/tests/test_api_views.py +++ b/backend/api/tests/test_api_views.py @@ -2,19 +2,11 @@ from addcorpus.models import Corpus from rest_framework.status import is_success -def test_search_history_view(admin_user, admin_client): - corpus = Corpus.objects.create(name = 'mock-corpus') - - # get search history - response = admin_client.get('/api/search_history/') - assert is_success(response.status_code) - assert len(response.data) == 0 - - # add a query to search history - data = { +def mock_query_data(user, corpus_name): + return { 'aborted': False, - 'corpus': corpus.name, - 'user': admin_user.id, + 'corpus': corpus_name, + 'user': user.id, 'started': datetime.now().isoformat(), 'completed': datetime.now().isoformat(), 'query_json': { @@ -25,6 +17,17 @@ def test_search_history_view(admin_user, admin_client): 'total_results': 10, 'transferred': 0, } + +def test_search_history_view(admin_user, admin_client): + corpus = Corpus.objects.create(name = 'mock-corpus') + + # get search history + response = admin_client.get('/api/search_history/') + assert is_success(response.status_code) + assert len(response.data) == 0 + + # add a query to search history + data = mock_query_data(admin_user, 'mock-corpus') response = admin_client.post('/api/search_history/', data, content_type='application/json') assert is_success(response.status_code) @@ -34,6 +37,20 @@ def test_search_history_view(admin_user, admin_client): assert len(response.data) == 1 +def test_delete_search_history(auth_client, auth_user, db): + mock_corpus = 'mock-corpus' + corpus = Corpus.objects.create(name = mock_corpus) + query = mock_query_data(auth_user, mock_corpus) + auth_client.post('/api/search_history/', query, content_type='application/json') + + assert len(auth_user.queries.all()) == 1 + + response = auth_client.post('/api/search_history/delete_all/') + assert is_success(response.status_code) + + assert len(auth_user.queries.all()) == 0 + + def test_task_status_view(transactional_db, admin_client, celery_worker): bad_request = { 'bad_key': 'data' diff --git a/backend/api/views.py b/backend/api/views.py index b8efc8f2b..2dbff15fb 100644 --- a/backend/api/views.py +++ b/backend/api/views.py @@ -4,8 +4,8 @@ from api.serializers import QuerySerializer from rest_framework.permissions import IsAuthenticated from rest_framework.exceptions import APIException +from rest_framework.decorators import action import logging -from rest_framework.permissions import IsAuthenticated from api.utils import check_json_keys from celery import current_app as celery_app @@ -23,6 +23,12 @@ class QueryViewset(viewsets.ModelViewSet): def get_queryset(self): return self.request.user.queries.all() + @action(detail=False, methods=['post']) + def delete_all(self, request): + queries = self.get_queryset() + queries.delete() + return Response('success') + class TaskStatusView(APIView): ''' Get the status of an array of backend tasks (working/done/failed), diff --git a/backend/corpora/dutchannualreports/dutchannualreports.py b/backend/corpora/dutchannualreports/dutchannualreports.py index d4f4c7038..6a7c89168 100644 --- a/backend/corpora/dutchannualreports/dutchannualreports.py +++ b/backend/corpora/dutchannualreports/dutchannualreports.py @@ -12,8 +12,8 @@ from addcorpus.corpus import XMLCorpusDefinition, FieldDefinition from media.image_processing import get_pdf_info, retrieve_pdf, pdf_pages, build_partial_pdf from addcorpus.load_corpus import corpus_dir - from addcorpus.es_mappings import keyword_mapping, main_content_mapping +from addcorpus.es_settings import es_settings from media.media_url import media_url @@ -48,6 +48,10 @@ class DutchAnnualReports(XMLCorpusDefinition): dutchannualreports_map = {} + @property + def es_settings(self): + return es_settings(self.languages[0], stopword_analyzer=True, stemming_analyzer=True) + with open(op.join(corpus_dir('dutchannualreports'), 'dutchannualreports_mapping.csv')) as f: reader = csv.DictReader(f) for line in reader: diff --git a/backend/corpora/ecco/ecco.py b/backend/corpora/ecco/ecco.py index d23ef196b..a6b517dac 100644 --- a/backend/corpora/ecco/ecco.py +++ b/backend/corpora/ecco/ecco.py @@ -29,11 +29,6 @@ class Ecco(XMLCorpusDefinition): description_page = 'ecco.md' min_date = datetime(year=1700, month=1, day=1) max_date = datetime(year=1800, month=12, day=31) - - @property - def es_settings(self): - return es_settings(self.languages[0], stopword_analyzer=True, stemming_analyzer=True) - data_directory = settings.ECCO_DATA es_index = getattr(settings, 'ECCO_ES_INDEX', 'ecco') image = 'ecco.jpg' @@ -47,6 +42,10 @@ def es_settings(self): meta_pattern = re.compile('^\d+\_DocMetadata\.xml$') + @property + def es_settings(self): + return es_settings(self.languages[0], stopword_analyzer=True, stemming_analyzer=True) + def sources(self, start=min_date, end=max_date): logging.basicConfig(filename='ecco.log', level=logging.INFO) diff --git a/backend/corpora/parliament/finland-old.py b/backend/corpora/parliament/finland-old.py index 59bf354c6..7e654b941 100644 --- a/backend/corpora/parliament/finland-old.py +++ b/backend/corpora/parliament/finland-old.py @@ -14,7 +14,7 @@ class ParliamentFinlandOld(Parliament, CSVCorpusDefinition): title = 'People and Parliament (Finland, 1863-1905)' description = 'Speeches from the early Finnish estates' - max_date = datetime(year=1905, month=12, day=31) + max_date = datetime(year=1906, month=12, day=31) min_date = datetime(year=1863, month=1, day=1) data_directory = settings.PP_FINLAND_OLD_DATA es_index = getattr(settings, 'PP_FINLAND_OLD_INDEX', 'parliament-finland-old') diff --git a/backend/corpora/parliament/netherlands.py b/backend/corpora/parliament/netherlands.py index 49f4ba5a1..7e3f51e6b 100644 --- a/backend/corpora/parliament/netherlands.py +++ b/backend/corpora/parliament/netherlands.py @@ -124,7 +124,7 @@ class ParliamentNetherlands(Parliament, XMLCorpusDefinition): title = "People & Parliament (Netherlands)" description = "Speeches from the Eerste Kamer and Tweede Kamer" min_date = datetime(year=1815, month=1, day=1) - max_date = datetime(year=2020, month=12, day=31) + max_date = datetime(year=2022, month=12, day=31) data_directory = settings.PP_NL_DATA data_directory_recent = settings.PP_NL_RECENT_DATA word_model_path = getattr(settings, 'PP_NL_WM', None) diff --git a/backend/download/conftest.py b/backend/download/conftest.py index 1eb5e9b54..5ef03873d 100644 --- a/backend/download/conftest.py +++ b/backend/download/conftest.py @@ -50,18 +50,22 @@ def index_ml_mock_corpus(es_client, ml_mock_corpus): def index_mock_corpus(es_client, mock_corpus, index_small_mock_corpus, index_large_mock_corpus, index_ml_mock_corpus): yield mock_corpus -def save_all_results_csv(mock_corpus, mock_corpus_specs): +def all_results_request_json(mock_corpus, mock_corpus_specs): fields = mock_corpus_specs['fields'] query = mock_corpus_specs['example_query'] - request_json = { + return { 'corpus': mock_corpus, 'es_query': MATCH_ALL, 'fields': fields, 'route': '/search/{};query={}'.format(mock_corpus, query) } + +def save_all_results_csv(mock_corpus, mock_corpus_specs): + request_json = all_results_request_json(mock_corpus, mock_corpus_specs) results = tasks.download_scroll(request_json) - filename = tasks.make_csv(results, request_json) + fake_id = mock_corpus + '_all_results' + filename = tasks.make_csv(results, request_json, fake_id) return filename diff --git a/backend/download/create_csv.py b/backend/download/create_csv.py index 01e01092b..b807f7f10 100644 --- a/backend/download/create_csv.py +++ b/backend/download/create_csv.py @@ -5,6 +5,7 @@ from django.conf import settings +from visualization.query import get_query_text from visualization.term_frequency import parse_datestring def write_file(filename, fieldnames, rows, dialect = 'excel'): @@ -21,12 +22,10 @@ def write_file(filename, fieldnames, rows, dialect = 'excel'): return filepath -def create_filename(descriptive_part, essential_suffix = '.csv'): - max_length = 255 - (len(essential_suffix) + len(settings.CSV_FILES_PATH)) - truncated = descriptive_part[:min(max_length, len(descriptive_part))] - return truncated + essential_suffix +def create_filename(download_id): + return f'{download_id}.csv' -def search_results_csv(results, fields, query): +def search_results_csv(results, fields, query, download_id): entries = [] field_set = set(fields) field_set.update(['query']) @@ -50,14 +49,14 @@ def search_results_csv(results, fields, query): entry.update({highlight_field_name: soup.get_text()}) entries.append(entry) - filename = create_filename(query) + filename = create_filename(download_id) field_set.discard('context') fieldnames = sorted(field_set) filepath = write_file(filename, fieldnames, entries, dialect = 'resultsDialect') return filepath -def term_frequency_csv(queries, results, field_name, unit = None): +def term_frequency_csv(queries, results, field_name, download_id, unit = None): has_token_counts = results[0].get('token_count', None) != None query_column = ['Query'] if len(queries) > 1 else [] freq_columns = ['Term frequency', 'Relative term frequency (by # documents)', 'Total documents'] @@ -66,17 +65,10 @@ def term_frequency_csv(queries, results, field_name, unit = None): rows = term_frequency_csv_rows(queries, results, field_name, unit) - filename = term_frequency_filename(queries, field_name) + filename = create_filename(download_id) filepath = write_file(filename, fieldnames, rows) return filepath -def term_frequency_filename(queries, field_name): - querystring = '_'.join(queries) - timestamp = datetime.now().isoformat(sep='_', timespec='minutes') # ensure csv filenames are unique with timestamp - suffix = '_' + timestamp + '.csv' - description = 'term_frequency_{}_{}'.format(field_name, querystring) - return create_filename(description, suffix) - def term_frequency_csv_rows(queries, results, field_name, unit): for result in results: field_value = format_field_value(result['key'], unit) @@ -108,4 +100,22 @@ def format_field_value(value, unit): 'week': '%Y-%m-%d', 'day': '%Y-%m-%d' } - return date.strftime(formats[unit]) + return date.strftime(formats[unit]) + +def ngram_csv(results, log_id): + rows = ngram_table(results) + fieldnames = ['date', 'N-gram', 'Frequency'] + filename = create_filename(log_id) + filepath = write_file(filename, fieldnames, rows) + return filepath + +def ngram_table(results): + rows = [] + for index, time_point in enumerate(results['time_points']): + for ngram in results['words']: + rows.append({ + 'date': time_point, + 'N-gram': ngram['label'], + 'Frequency': ngram['data'][index] + }) + return rows diff --git a/backend/download/mail.py b/backend/download/mail.py index 5dc840ec1..0220edd58 100644 --- a/backend/download/mail.py +++ b/backend/download/mail.py @@ -20,8 +20,8 @@ def send_csv_email(user_email, username, download_id): subject = 'I-Analyzer CSV download' from_email = settings.DEFAULT_FROM_EMAIL - path = Download.objects.get(id=download_id).filename - _, filename = os.path.split(path) + download = Download.objects.get(id=download_id) + filename = download.descriptive_filename() context = { 'email_title': 'Download CSV', diff --git a/backend/download/migrations/0002_alter_download_download_type.py b/backend/download/migrations/0002_alter_download_download_type.py new file mode 100644 index 000000000..dd44e9d2f --- /dev/null +++ b/backend/download/migrations/0002_alter_download_download_type.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.10 on 2023-10-18 12:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('download', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='download', + name='download_type', + field=models.CharField(choices=[('search_results', 'Search results'), ('date_term_frequency', 'Term frequency (timeline)'), ('aggregate_term_frequency', 'Term frequency (histogram)'), ('ngram', 'Neighbouring words')], help_text='Type of download (search results or a type of visualisation)', max_length=126), + ), + ] diff --git a/backend/download/models.py b/backend/download/models.py index ca49e8db6..3afe3074f 100644 --- a/backend/download/models.py +++ b/backend/download/models.py @@ -1,8 +1,9 @@ from django.db import models +from django.conf import settings +from django.utils import timezone + from users.models import CustomUser from addcorpus.models import Corpus -from django.conf import settings -from datetime import datetime MAX_LENGTH_FILENAME = 254 @@ -17,6 +18,7 @@ class Download(models.Model): ('search_results', 'Search results'), ('date_term_frequency', 'Term frequency (timeline)'), ('aggregate_term_frequency', 'Term frequency (histogram)'), + ('ngram', 'Neighbouring words') ], help_text='Type of download (search results or a type of visualisation)') corpus = models.ForeignKey(Corpus, on_delete=models.CASCADE, to_field='name', related_name='downloads') @@ -49,5 +51,12 @@ def complete(self, filename = None): ''' self.filename = filename - self.completed = datetime.now() + self.completed = timezone.now() self.save() + + def descriptive_filename(self): + corpus_name = self.corpus.name + type_name = self.download_type + timestamp = self.completed.strftime('%Y-%m-%d %H:%M') + + return f'{type_name}__{corpus_name}__{timestamp}.csv' diff --git a/backend/download/tasks.py b/backend/download/tasks.py index 91c88f358..ecc9b5f0c 100644 --- a/backend/download/tasks.py +++ b/backend/download/tasks.py @@ -2,13 +2,12 @@ import re from django.conf import settings from celery import shared_task, chain, group -from django.urls import reverse from es import download as es_download from download import create_csv from download.models import Download from addcorpus.models import Corpus -from visualization.tasks import histogram_term_frequency_tasks, timeline_term_frequency_tasks +from visualization.tasks import histogram_term_frequency_tasks, timeline_term_frequency_tasks, ngram_data_tasks from visualization import query from download.mail import send_csv_email @@ -37,9 +36,9 @@ def download_scroll(request_json, download_size=10000): return results @shared_task() -def make_csv(results, request_json): +def make_csv(results, request_json, log_id): query = create_query(request_json) - filepath = create_csv.search_results_csv(results, request_json['fields'], query) + filepath = create_csv.search_results_csv(results, request_json['fields'], query, log_id) return filepath @@ -82,7 +81,7 @@ def download_search_results(request_json, user): make_chain = lambda: chain( download_scroll.s(request_json, download_limit), - make_csv.s(request_json), + make_csv.s(request_json, download.id), complete_download.s(download.id), csv_data_email.s(user.email, user.username), ).on_error(complete_failed_download.s(download.id)) @@ -90,12 +89,14 @@ def download_search_results(request_json, user): return try_download(make_chain, download) @shared_task() -def make_term_frequency_csv(results_per_series, parameters_per_series): +def make_full_data_csv(results_per_series, visualization_type, parameters_per_series, log_id): ''' Export term frequency results to a csv. ''' + if visualization_type == 'ngram': + return create_csv.ngram_csv(results_per_series, log_id) query_per_series, field_name, unit = extract_term_frequency_download_metadata(parameters_per_series) - return create_csv.term_frequency_csv(query_per_series, results_per_series, field_name, unit = unit) + return create_csv.term_frequency_csv(query_per_series, results_per_series, field_name, log_id, unit = unit) def term_frequency_full_data_tasks(parameters_per_series, visualization_type): @@ -110,6 +111,10 @@ def term_frequency_full_data_tasks(parameters_per_series, visualization_type): task_function(series_parameters, True) for series_parameters in parameters_unlimited ) +def ngram_full_data_tasks(ngram_parameters, dummy): + ngram_parameters['max_size_per_interval'] = None + return ngram_data_tasks(ngram_parameters) + def extract_term_frequency_download_metadata(parameters_per_series): ''' Get some relevant metadata for a term frequency request: @@ -148,16 +153,16 @@ def download_full_data(request_json, user): ''' Download the full data for a visualisation ''' - visualization_type = request_json['visualization'] task_per_type = { 'date_term_frequency': term_frequency_full_data_tasks, - 'aggregate_term_frequency': term_frequency_full_data_tasks + 'aggregate_term_frequency': term_frequency_full_data_tasks, + 'ngram': ngram_full_data_tasks, } parameters = request_json['parameters'] - corpus_name = request_json['corpus'] + corpus_name = request_json['corpus_name'] corpus = Corpus.objects.get(name=corpus_name) task = task_per_type[visualization_type](parameters, visualization_type) @@ -166,7 +171,7 @@ def download_full_data(request_json, user): make_chain = lambda : chain( task, - make_term_frequency_csv.s(parameters), + make_full_data_csv.s(visualization_type, parameters, download.id), complete_download.s(download.id), csv_data_email.s(user.email, user.username), ).on_error(complete_failed_download.s(download.id)) diff --git a/backend/download/tests/test_csv_results.py b/backend/download/tests/test_csv_results.py index b6ff8b0da..6abc899e7 100644 --- a/backend/download/tests/test_csv_results.py +++ b/backend/download/tests/test_csv_results.py @@ -45,7 +45,7 @@ def result_csv_with_highlights(csv_directory): route = 'parliament-netherlands_query=test' fields = ['speech'] - file = create_csv.search_results_csv(hits(mock_es_result), fields, route) + file = create_csv.search_results_csv(hits(mock_es_result), fields, route, 0) return file def test_create_csv(result_csv_with_highlights): @@ -190,7 +190,7 @@ def test_csv_encoding(ml_mock_corpus_results_csv): @pytest.fixture() def term_frequency_file(index_small_mock_corpus, csv_directory): - filename = create_csv.term_frequency_csv(mock_queries, mock_timeline_result, 'date', unit = 'year') + filename = create_csv.term_frequency_csv(mock_queries, mock_timeline_result, 'date', 0, unit = 'year') return filename @@ -208,3 +208,26 @@ def test_date_format(): for value, unit, expected in cases: assert create_csv.format_field_value(value, unit) == expected + + +mock_ngram_data = { + 'words': [ + {'label': 'ex parrot', 'data': [2, 3]}, + {'label': 'this parrot what', 'data': [4, 8]}, + {'label': 'dead parrot when', 'data': [4, 6]}, + ], + 'time_points': ['1960-1965', '1962-1967'] +} + +expected_csv_table = [ + {'date': '1960-1965', 'N-gram': 'ex parrot', 'Frequency': 2}, + {'date': '1960-1965', 'N-gram': 'this parrot what', 'Frequency': 4}, + {'date': '1960-1965', 'N-gram': 'dead parrot when', 'Frequency': 4}, + {'date': '1962-1967', 'N-gram': 'ex parrot', 'Frequency': 3}, + {'date': '1962-1967', 'N-gram': 'this parrot what', 'Frequency': 8}, + {'date': '1962-1967', 'N-gram': 'dead parrot when', 'Frequency': 6}, +] + +def test_ngram_table(): + table = create_csv.ngram_table(mock_ngram_data) + assert table == expected_csv_table \ No newline at end of file diff --git a/backend/download/tests/test_download_views.py b/backend/download/tests/test_download_views.py index 6d9993815..42607a238 100644 --- a/backend/download/tests/test_download_views.py +++ b/backend/download/tests/test_download_views.py @@ -6,7 +6,7 @@ from download import SEARCH_RESULTS_DIALECT from addcorpus.models import Corpus import io -from visualization.query import MATCH_ALL +from visualization import query from es.search import hits from tag.models import Tag, TaggedDocument @@ -48,21 +48,7 @@ def term_frequency_parameters(mock_corpus, mock_corpus_specs): # TODO: construct query from query module, which is much more convenient query_text = mock_corpus_specs['example_query'] search_field = mock_corpus_specs['content_field'] - query = { - "query": { - "bool": { - "must": { - "simple_query_string": { - "query": query_text, - "fields": [search_field], - "lenient": True, - "default_operator": "or" - } - }, - "filter": [] - } - } - } + query = mock_es_query(query_text, search_field) return { 'es_query': query, 'corpus_name': mock_corpus, @@ -78,14 +64,40 @@ def term_frequency_parameters(mock_corpus, mock_corpus_specs): 'unit': 'year', } +def ngram_parameters(mock_corpus, mock_corpus_specs): + query_text = mock_corpus_specs['example_query'] + search_field = mock_corpus_specs['content_field'] + return { + 'corpus_name': mock_corpus, + 'es_query': mock_es_query(query_text, search_field), + 'field': search_field, + 'ngram_size': 2, + 'term_position': 'any', + 'freq_compensation': True, + 'subfield': 'clean', + 'max_size_per_interval': 50, + 'number_of_ngrams': 10, + 'date_field': 'date' + } + +def mock_es_query(query_text, search_field): + q = query.MATCH_ALL + q = query.set_query_text(q, query_text) + q = query.set_search_fields(q, [search_field]) + return q + +@pytest.mark.parametrize("visualization_type, request_parameters", [('date_term_frequency', term_frequency_parameters), ('ngram', ngram_parameters)]) def test_full_data_download_view(transactional_db, admin_client, small_mock_corpus, index_small_mock_corpus, small_mock_corpus_specs, celery_worker, - csv_directory): - parameters = term_frequency_parameters(small_mock_corpus, small_mock_corpus_specs) + csv_directory, visualization_type, request_parameters): + parameters = request_parameters(small_mock_corpus, small_mock_corpus_specs) + if visualization_type != 'ngram': + # timeline and histogram expect a series of parameters + parameters = [parameters] request_json = { - 'visualization': 'date_term_frequency', - 'parameters': [parameters], - 'corpus': small_mock_corpus + 'visualization': visualization_type, + 'parameters': parameters, + 'corpus_name': small_mock_corpus } response = admin_client.post( '/api/download/full_data', @@ -160,7 +172,7 @@ def test_csv_download_view(admin_client, finished_download): def some_document_id(admin_client, small_mock_corpus, index_small_mock_corpus): search_response = admin_client.post( f'/api/es/{small_mock_corpus}/_search', - {'es_query': MATCH_ALL}, + {'es_query': query.MATCH_ALL}, content_type='application/json' ) @@ -188,7 +200,7 @@ def test_download_with_tag(db, admin_client, small_mock_corpus, index_small_mock encoding = 'utf-8' download_request_json = { 'corpus': small_mock_corpus, - 'es_query': MATCH_ALL, + 'es_query': query.MATCH_ALL, 'tags': [tag_on_some_document.id], 'fields': ['date','content'], 'size': 3, diff --git a/backend/download/tests/test_file_storage.py b/backend/download/tests/test_file_storage.py index 767b23f46..0f96c30b7 100644 --- a/backend/download/tests/test_file_storage.py +++ b/backend/download/tests/test_file_storage.py @@ -1,7 +1,13 @@ +import os from download import tasks +from download.conftest import all_results_request_json +from download.models import Download -def test_format_route_to_filename(): - route = '/search/mock-corpus;query=test' - request_json = { 'route': route } - output = tasks.create_query(request_json) - assert output == 'mock-corpus_query=test' +def test_download_filename(auth_user, small_mock_corpus, index_small_mock_corpus, small_mock_corpus_specs): + request = all_results_request_json(small_mock_corpus, small_mock_corpus_specs) + tasks.download_search_results(request, auth_user).apply() + download = Download.objects.latest('completed') + _, filename = os.path.split(download.filename) + name, ext = os.path.splitext(filename) + assert name == str(download.id) + assert ext == '.csv' diff --git a/backend/download/tests/test_full_data.py b/backend/download/tests/test_full_data.py index 385fb701b..b3334ee39 100644 --- a/backend/download/tests/test_full_data.py +++ b/backend/download/tests/test_full_data.py @@ -21,10 +21,11 @@ def test_timeline_full_data(small_mock_corpus, index_small_mock_corpus, small_mo ], 'unit': 'year' }] - - group = tasks.term_frequency_full_data_tasks(full_data_parameters, 'date_term_frequency') + visualization_type = 'date_term_frequency' + group = tasks.term_frequency_full_data_tasks(full_data_parameters, visualization_type) results = group.apply().get() - filename = tasks.make_term_frequency_csv(results, full_data_parameters) + log_id = 0 # fake ID + filename = tasks.make_full_data_csv(results, visualization_type, full_data_parameters, log_id) with open(filename) as f: reader = csv.DictReader(f) diff --git a/backend/download/views.py b/backend/download/views.py index 273bc672f..3d4a4e326 100644 --- a/backend/download/views.py +++ b/backend/download/views.py @@ -21,14 +21,15 @@ logger = logging.getLogger() -def send_csv_file(directory, filename, download_type, encoding, format=None): +def send_csv_file(download, directory, encoding, format=None): ''' Perform final formatting and send a CSV file as a FileResponse ''' converted_filename = convert_csv.convert_csv( - directory, filename, download_type, encoding, format) + directory, download.filename, download.download_type, encoding, format) path = os.path.join(directory, converted_filename) - return FileResponse(open(path, 'rb'), filename=filename, as_attachment=True) + + return FileResponse(open(path, 'rb'), filename=download.descriptive_filename(), as_attachment=True) class ResultsDownloadView(APIView): ''' @@ -51,13 +52,13 @@ def post(self, request, *args, **kwargs): handle_tags_in_request(request) search_results = es_download.normal_search( corpus_name, request.data['es_query'], request.data['size']) - csv_path = tasks.make_csv(search_results, request.data) - directory, filename = os.path.split(csv_path) - # Create download for download history download = Download.objects.create( download_type='search_results', corpus=corpus, parameters=request.data, user=request.user) + csv_path = tasks.make_csv(search_results, request.data, download.id) + directory, filename = os.path.split(csv_path) + # Create download for download history download.complete(filename=filename) - return send_csv_file(directory, filename, 'search_results', request.data['encoding']) + return send_csv_file(download, directory, request.data['encoding']) except Exception as e: logger.error(e) raise APIException(detail = 'Download failed: could not generate csv file') @@ -97,10 +98,10 @@ class FullDataDownloadTaskView(APIView): permission_classes = [IsAuthenticated, CorpusAccessPermission] def post(self, request, *args, **kwargs): - check_json_keys(request, ['visualization', 'parameters', 'corpus']) + check_json_keys(request, ['visualization', 'parameters', 'corpus_name']) visualization_type = request.data['visualization'] - known_visualisations = ['date_term_frequency', 'aggregate_term_frequency'] + known_visualisations = ['date_term_frequency', 'aggregate_term_frequency', 'ngram'] if visualization_type not in known_visualisations: raise ParseError(f'Download failed: unknown visualisation type "{visualization_type}"') @@ -138,13 +139,13 @@ def get(self, request, *args, **kwargs): encoding = request.query_params.get('encoding', 'utf-8') format = request.query_params.get('table_format', None) - record = Download.objects.get(id=id) - if not record.user == request.user: + download = Download.objects.get(id=id) + if not download.user == request.user: raise PermissionDenied(detail='User has no access to this download') directory = settings.CSV_FILES_PATH - if not os.path.isfile(os.path.join(directory, record.filename)): + if not os.path.isfile(os.path.join(directory, download.filename)): raise NotFound(detail='File does not exist') - return send_csv_file(directory, record.filename, record.download_type, encoding, format) + return send_csv_file(download, directory, encoding, format) diff --git a/backend/es/download.py b/backend/es/download.py index d4da32a63..49e2c5614 100644 --- a/backend/es/download.py +++ b/backend/es/download.py @@ -43,6 +43,6 @@ def normal_search(corpus, query_model, size): result = search( corpus = corpus, query_model=query_model, - size = size, + size=size, ) return hits(result) diff --git a/backend/users/admin.py b/backend/users/admin.py index cf2ee2107..ec9ee1c56 100644 --- a/backend/users/admin.py +++ b/backend/users/admin.py @@ -1,5 +1,11 @@ from django.contrib import admin -from .models import CustomUser +from .models import CustomUser, UserProfile from django.contrib.auth.admin import UserAdmin -admin.site.register(CustomUser, UserAdmin) +class InlineUserProfileAdmin(admin.StackedInline): + model = UserProfile + +class CustomUserAdmin(UserAdmin): + inlines = [InlineUserProfileAdmin] + +admin.site.register(CustomUser, CustomUserAdmin) diff --git a/backend/users/apps.py b/backend/users/apps.py index 746cf1ff6..232707d8d 100644 --- a/backend/users/apps.py +++ b/backend/users/apps.py @@ -1,6 +1,5 @@ from django.apps import AppConfig - class UsersConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'users' diff --git a/backend/users/migrations/0004_userprofile.py b/backend/users/migrations/0004_userprofile.py new file mode 100644 index 000000000..b1237d8d4 --- /dev/null +++ b/backend/users/migrations/0004_userprofile.py @@ -0,0 +1,34 @@ +# Generated by Django 4.1.9 on 2023-08-09 11:41 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + +def add_user_profile(apps, schema_editor): + CustomUser = apps.get_model('users', 'CustomUser') + UserProfile = apps.get_model('users', 'UserProfile') + db_alias = schema_editor.connection.alias + + for user in CustomUser.objects.all(): + UserProfile.objects.get_or_create(user=user) + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0003_sitedomain'), + ] + + operations = [ + migrations.CreateModel( + name='UserProfile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('enable_search_history', models.BooleanField(default=True, help_text='Whether to save the search history of this user')), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.RunPython( + add_user_profile, + reverse_code=migrations.RunPython.noop, + ) + ] diff --git a/backend/users/models.py b/backend/users/models.py index 04abbe933..a0ed7b049 100644 --- a/backend/users/models.py +++ b/backend/users/models.py @@ -18,3 +18,24 @@ def has_access(self, corpus_name): # check if any corpus added to the user's group(s) match the corpus name return any(corpus for group in self.groups.all() for corpus in group.corpora.filter(name=corpus_name)) + + +class UserProfile(models.Model): + ''' User information that is not relevant to authentication. + E.g. settings, preferences, optional personal information. + ''' + + user = models.OneToOneField( + to=CustomUser, + on_delete=models.CASCADE, + related_name='profile', + ) + + enable_search_history = models.BooleanField( + help_text='Whether to save the search history of this user', + default=True, + ) + + def __str__(self): + return f'Profile of {self.user.username}' + diff --git a/backend/users/serializers.py b/backend/users/serializers.py index 06713819e..28b04e199 100644 --- a/backend/users/serializers.py +++ b/backend/users/serializers.py @@ -2,15 +2,32 @@ from dj_rest_auth.registration.serializers import RegisterSerializer from rest_framework import serializers from django.db import transaction +from users.models import UserProfile + + +class UserProfileSerializer(serializers.ModelSerializer): + class Meta: + model = UserProfile + fields = ['enable_search_history'] class CustomUserDetailsSerializer(UserDetailsSerializer): is_admin = serializers.BooleanField(source='is_staff') + profile = UserProfileSerializer() class Meta(UserDetailsSerializer.Meta): fields = ('id', 'username', 'email', 'saml', - 'download_limit', 'is_admin') + 'download_limit', 'is_admin', 'profile') + + + def update(self, instance, validated_data): + profile_data = validated_data.pop('profile', None) + + if profile_data: + profile_serializer = UserProfileSerializer() + profile_serializer.update(instance.profile, profile_data) + return super().update(instance, validated_data) class CustomRegistrationSerializer(RegisterSerializer): saml = serializers.BooleanField(default=False) diff --git a/backend/users/signals.py b/backend/users/signals.py index 6a4119254..2939086e6 100644 --- a/backend/users/signals.py +++ b/backend/users/signals.py @@ -3,7 +3,7 @@ from django.db.models.signals import post_save from django.dispatch import receiver -from .models import CustomUser +from .models import CustomUser, UserProfile @receiver(post_save, sender=CustomUser) @@ -28,3 +28,10 @@ def ensure_admin_email(sender, instance, created, **kwargs): print(f'Automatically verified email {instance.email} for {instance}') except Exception as e: print('Failed to automatically verify admin email', e, sep='\n') + +@receiver(post_save, sender=CustomUser) +def create_user_profile(sender, instance, created, **kwargs): + if created: + UserProfile.objects.create( + user=instance + ) diff --git a/backend/users/tests/test_user_models.py b/backend/users/tests/test_user_models.py index f6ac7c7be..3d9153b89 100644 --- a/backend/users/tests/test_user_models.py +++ b/backend/users/tests/test_user_models.py @@ -10,11 +10,13 @@ def test_user_crud(db, user_credentials, admin_credentials): assert len(User.objects.all()) == 2 assert admin.username == 'admin' assert user.email == 'basicuser@ianalyzer.com' + assert admin.profile admin.is_superuser = True admin.is_staff = True admin.save() + admin.delete() user.delete() diff --git a/backend/users/tests/test_user_serializer.py b/backend/users/tests/test_user_serializer.py index 700716486..bd3858e82 100644 --- a/backend/users/tests/test_user_serializer.py +++ b/backend/users/tests/test_user_serializer.py @@ -11,7 +11,10 @@ def test_user_serializer(auth_client, 'email': user_credentials['email'], 'download_limit': 10000, 'is_admin': False, - 'saml': False + 'saml': False, + 'profile': { + 'enable_search_history': True, + }, } @@ -24,6 +27,20 @@ def test_admin_serializer(admin_client, admin_credentials): 'email': admin_credentials['email'], 'download_limit': 10000, 'is_admin': True, - 'saml': False - + 'saml': False, + 'profile': { + 'enable_search_history': True, + }, } + +def test_user_updates(auth_client): + route = '/users/user/' + details = lambda: auth_client.get(route) + search_history_enabled = lambda: details().data.get('profile').get('enable_search_history') + + assert search_history_enabled() + + response = auth_client.patch(route, {'profile': {'enable_search_history': False}}, content_type='application/json') + assert response.status_code == 200 + + assert not search_history_enabled() diff --git a/backend/users/views.py b/backend/users/views.py index 778d774ca..0e5268c53 100644 --- a/backend/users/views.py +++ b/backend/users/views.py @@ -6,8 +6,10 @@ from rest_framework.response import Response from rest_framework.status import HTTP_404_NOT_FOUND from rest_framework.views import APIView - +from rest_framework.viewsets import ModelViewSet +from rest_framework.permissions import IsAuthenticated from djangosaml2.views import LogoutView +from .serializers import CustomUserDetailsSerializer def redirect_confirm(request, key): @@ -44,3 +46,8 @@ class SamlLogoutView(LogoutView): @csrf_exempt def post(self, request, *args, **kwargs): return super().post(request, *args, **kwargs) + +class UserViewSet(ModelViewSet): + permission_classes = [IsAuthenticated] + serializer_class = CustomUserDetailsSerializer + diff --git a/backend/visualization/ngram.py b/backend/visualization/ngram.py index 3e568a612..07d54108b 100644 --- a/backend/visualization/ngram.py +++ b/backend/visualization/ngram.py @@ -5,6 +5,7 @@ from addcorpus.models import CorpusConfiguration from datetime import datetime from es.search import get_index, search +from es.download import scroll from ianalyzer.elasticsearch import elasticsearch from visualization import query, termvectors @@ -50,7 +51,7 @@ def get_total_time_interval(es_query, corpus): def get_time_bins(es_query, corpus): """Wide bins for a query. Depending on the total time range of the query, time intervervals are 10 years (>100 yrs), 5 years (100-20 yrs) of 1 year (<20 yrs).""" - + min_date, max_date = get_total_time_interval(es_query, corpus) min_year, max_year = min_date.year, max_date.year time_range = max_year - min_year @@ -77,9 +78,9 @@ def get_time_bins(es_query, corpus): return bins -def tokens_by_time_interval(corpus, es_query, field, bin, ngram_size, term_position, freq_compensation, subfield, max_size_per_interval, date_field): - index = get_index(corpus) - client = elasticsearch(corpus) +def tokens_by_time_interval(corpus_name, es_query, field, bin, ngram_size, term_position, freq_compensation, subfield, max_size_per_interval, date_field, **kwargs): + index = get_index(corpus_name) + client = elasticsearch(corpus_name) positions_dict = { 'any': list(range(ngram_size)), 'first': [0], @@ -100,21 +101,21 @@ def tokens_by_time_interval(corpus, es_query, field, bin, ngram_size, term_posit date_filter = query.make_date_filter(start_date, end_date, date_field) narrow_query = query.add_filter(es_query, date_filter) #search for the query text - search_results = search( - corpus=corpus, - query_model = narrow_query, - client = client, - size = max_size_per_interval, + search_results, _total = scroll( + corpus=corpus_name, + query_model=narrow_query, + client=client, + download_size=max_size_per_interval, ) bin_ngrams = Counter() - for hit in search_results['hits']['hits']: + for hit in search_results: identifier = hit['_id'] # get the term vectors for the hit result = client.termvectors( index=index, id=identifier, term_statistics=freq_compensation, - fields = [field] + fields=[field] ) terms = termvectors.get_terms(result, field) if terms: diff --git a/backend/visualization/tasks.py b/backend/visualization/tasks.py index 3b7dcf88a..51658115d 100644 --- a/backend/visualization/tasks.py +++ b/backend/visualization/tasks.py @@ -25,7 +25,7 @@ def ngram_data_tasks(request_json): return chord(group([ get_ngram_data_bin.s( - corpus=corpus, + corpus_name=corpus, es_query=es_query, field=request_json['field'], bin=b, @@ -40,7 +40,7 @@ def ngram_data_tasks(request_json): ]), integrate_ngram_results.s( number_of_ngrams=request_json['number_of_ngrams'] ) - )() + ) @shared_task() def get_histogram_term_frequency_bin(es_query, corpus_name, field_name, field_value, size, include_query_in_result = False): diff --git a/backend/visualization/tests/test_ngrams.py b/backend/visualization/tests/test_ngrams.py index 93de4bbdb..16a797d3d 100644 --- a/backend/visualization/tests/test_ngrams.py +++ b/backend/visualization/tests/test_ngrams.py @@ -111,10 +111,10 @@ def test_top_10_ngrams(): for w in target_data } assert dataset_relative['data'] == relative_frequencies[word] -def get_binned_results(corpus, query, time_bins=CENTURY_BINS, ngram_size=2, term_position='any', freq_compensation=None, subfield='none', max_size_per_interval=20, date_field='date'): +def get_binned_results(corpus_name, query, time_bins=CENTURY_BINS, ngram_size=2, term_position='any', freq_compensation=None, subfield='none', max_size_per_interval=20, date_field='date'): return [ ngram.tokens_by_time_interval( - corpus, query, 'content', bin, ngram_size, term_position, freq_compensation, subfield, max_size_per_interval, date_field) + corpus_name, query, 'content', bin, ngram_size, term_position, freq_compensation, subfield, max_size_per_interval, date_field) for bin in time_bins ] diff --git a/backend/visualization/views.py b/backend/visualization/views.py index 034a7d584..c608b784e 100644 --- a/backend/visualization/views.py +++ b/backend/visualization/views.py @@ -57,7 +57,7 @@ def post(self, request, *args, **kwargs): try: handle_tags_in_request(request) - chord = tasks.ngram_data_tasks(request.data) + chord = tasks.ngram_data_tasks(request.data)() subtasks = [chord, *chord.parent.children] return Response({'task_ids': [task.id for task in subtasks]}) except Exception as e: diff --git a/frontend/src/app/app.module.ts b/frontend/src/app/app.module.ts index dd7e5dcd4..fb5f2acd6 100644 --- a/frontend/src/app/app.module.ts +++ b/frontend/src/app/app.module.ts @@ -37,6 +37,8 @@ import { ResetPasswordComponent } from './login/reset-password/reset-password.co import { VerifyEmailComponent } from './login/verify-email/verify-email.component'; import { ManualComponent } from './manual/manual.component'; import { ManualModule } from './manual/manual.module'; +import { SettingsComponent } from './settings/settings.component'; +import { SettingsModule } from './settings/settings.module'; import { MenuComponent } from './menu/menu.component'; import { NotificationsComponent } from './notifications/notifications.component'; import { PrivacyComponent } from './privacy/privacy.component'; @@ -116,6 +118,11 @@ export const appRoutes: Routes = [ path: 'confirm-email/:key', component: VerifyEmailComponent, }, + { + path: 'settings', + component: SettingsComponent, + canActivate: [LoggedOnGuard], + }, { path: '', redirectTo: 'home', @@ -144,6 +151,7 @@ export const imports: any[] = [ ManualModule, MenuModule, SearchModule, + SettingsModule, WordModelsModule, RouterModule.forRoot(appRoutes, { relativeLinkResolution: 'legacy' }), ]; diff --git a/frontend/src/app/document-view/document-view.component.html b/frontend/src/app/document-view/document-view.component.html index dde15325d..c7505664b 100644 --- a/frontend/src/app/document-view/document-view.component.html +++ b/frontend/src/app/document-view/document-view.component.html @@ -29,7 +29,7 @@
- + diff --git a/frontend/src/app/document-view/document-view.component.spec.ts b/frontend/src/app/document-view/document-view.component.spec.ts index 863dd2631..6fa470a3a 100644 --- a/frontend/src/app/document-view/document-view.component.spec.ts +++ b/frontend/src/app/document-view/document-view.component.spec.ts @@ -41,4 +41,11 @@ describe('DocumentViewComponent', () => { const element = debug[0].nativeElement; expect(element.textContent).toBe('Hello world!'); }); + + it('should create tabs', () => { + const debug = fixture.debugElement.queryAll(By.css('a[role=tab]')); + expect(debug.length).toBe(2); + expect(debug[0].attributes['id']).toBe('tab-speech'); + expect(debug[1].attributes['id']).toBe('tab-scan'); + }); }); diff --git a/frontend/src/app/history/download-history/download-history.component.ts b/frontend/src/app/history/download-history/download-history.component.ts index 24b495aab..bf926fdcb 100644 --- a/frontend/src/app/history/download-history/download-history.component.ts +++ b/frontend/src/app/history/download-history/download-history.component.ts @@ -39,7 +39,8 @@ export class DownloadHistoryComponent extends HistoryDirective implements OnInit const displayNames = { search_results: 'Search results', date_term_frequency: 'Term frequency', - aggregate_term_frequency: 'Term frequency' + aggregate_term_frequency: 'Term frequency', + ngram: 'Neighbouring words' // timeline/histogram distinction is relevant for backend but not for the user }; return displayNames[type]; diff --git a/frontend/src/app/menu/menu.component.ts b/frontend/src/app/menu/menu.component.ts index e7645c1e3..c031b3ed8 100644 --- a/frontend/src/app/menu/menu.component.ts +++ b/frontend/src/app/menu/menu.component.ts @@ -104,6 +104,13 @@ export class MenuComponent implements OnDestroy, OnInit { this.router.navigate(['download-history']); }, }, + { + label: 'Settings', + icon: 'fa fa-cog', + command: (click) => { + this.router.navigate(['settings']); + } + }, ...(this.isAdmin ? [ { diff --git a/frontend/src/app/models/search-results.ts b/frontend/src/app/models/search-results.ts index 150df3c74..a9d5edbff 100644 --- a/frontend/src/app/models/search-results.ts +++ b/frontend/src/app/models/search-results.ts @@ -102,7 +102,7 @@ export type TermFrequencyDownloadParameters = DateTermFrequencyParameters[] | Ag export type LimitedResultsDownloadParameters = ResultsDownloadParameters & { size: number } & DownloadOptions; -export type DownloadType = 'search_results' | 'aggregate_term_frequency' | 'date_term_frequency'; +export type DownloadType = 'search_results' | 'aggregate_term_frequency' | 'date_term_frequency' | 'ngram'; export type DownloadStatus = 'done' | 'working' | 'error'; export type DownloadParameters = TermFrequencyDownloadParameters | ResultsDownloadParameters; diff --git a/frontend/src/app/models/user.ts b/frontend/src/app/models/user.ts index 9d266096c..481558979 100644 --- a/frontend/src/app/models/user.ts +++ b/frontend/src/app/models/user.ts @@ -1,5 +1,9 @@ import * as _ from 'lodash'; +interface UserProfileResponse { + enable_search_history: boolean; +} + export interface UserResponse { id: number; username: string; @@ -7,6 +11,7 @@ export interface UserResponse { download_limit: number; is_admin: boolean; saml: boolean; + profile: UserProfileResponse; } export class User { @@ -15,6 +20,7 @@ export class User { public name, public isAdmin: boolean, public downloadLimit: number = 0, // The download limit for this user, will be 0 if there is no limit. - public isSamlLogin: boolean + public isSamlLogin: boolean, + public enableSearchHistory: boolean, ) {} } diff --git a/frontend/src/app/services/api.service.ts b/frontend/src/app/services/api.service.ts index 383d10661..47ced1eb4 100644 --- a/frontend/src/app/services/api.service.ts +++ b/frontend/src/app/services/api.service.ts @@ -2,7 +2,7 @@ import { Injectable } from '@angular/core'; import { HttpClient } from '@angular/common/http'; -import { timer } from 'rxjs'; +import { Observable, timer } from 'rxjs'; import { filter, switchMap, take, tap } from 'rxjs/operators'; import { ImageInfo } from '../image-view/image-view.component'; import { @@ -53,6 +53,11 @@ export class ApiService { constructor(private http: HttpClient) {} + + public deleteSearchHistory(): Observable { + return this.http.post('/api/search_history/delete_all/', {}); + } + // General / misc public saveQuery(options: QueryDb) { return this.http.post('/api/search_history/', options).toPromise(); @@ -170,12 +175,18 @@ export class ApiService { | { visualization: 'date_term_frequency'; parameters: DateTermFrequencyParameters[]; - corpus: string; + corpus_name: string; } | { visualization: 'aggregate_term_frequency'; parameters: AggregateTermFrequencyParameters[]; - corpus: string; + corpus_name: string; + } + | + { + visualization: 'ngram'; + parameters: NGramRequestParameters; + corpus_name: string; } ): Promise { const url = this.apiRoute(this.downloadApiURL, 'full_data'); @@ -294,6 +305,14 @@ export class ApiService { ); } + /** send PATCH request to update settings for the user */ + public updateUserSettings(details: Partial): Observable { + return this.http.patch( + this.authApiRoute('user'), + details + ); + } + public solisLogin(data: any): Promise { return this.http.get('/api/solislogin').toPromise(); } diff --git a/frontend/src/app/services/auth.service.spec.ts b/frontend/src/app/services/auth.service.spec.ts index c81025273..63ca3a5d8 100644 --- a/frontend/src/app/services/auth.service.spec.ts +++ b/frontend/src/app/services/auth.service.spec.ts @@ -24,4 +24,5 @@ describe('AuthService', () => { it('should be created', () => { expect(service).toBeTruthy(); }); + }); diff --git a/frontend/src/app/services/auth.service.ts b/frontend/src/app/services/auth.service.ts index 0d60c73e2..4852e35a3 100644 --- a/frontend/src/app/services/auth.service.ts +++ b/frontend/src/app/services/auth.service.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/member-ordering */ import { Injectable, OnDestroy } from '@angular/core'; import { Router } from '@angular/router'; import { @@ -16,6 +17,8 @@ import { environment } from '../../environments/environment'; import { User, UserResponse } from '../models'; import { ApiService } from './api.service'; import { SessionService } from './session.service'; +import * as _ from 'lodash'; +import { encodeUserData, parseUserData } from '../utils/user'; @Injectable({ providedIn: 'root', @@ -77,7 +80,7 @@ export class AuthService implements OnDestroy { .getUser() .pipe(takeUntil(this.destroy$)) .subscribe( - (result) => this.setAuth(this.transformUserResponse(result)), + (result) => this.setAuth(parseUserData(result)), () => this.purgeAuth() ); } @@ -91,41 +94,6 @@ export class AuthService implements OnDestroy { return Promise.resolve(currentUser); } - /** - * Transforms backend user response to User object - * - * @param result User response data - * @returns User object - */ - private transformUserResponse( - result: UserResponse - ): User { - return new User( - result.id, - result.username, - result.is_admin, - result.download_limit == null ? 0 : result.download_limit, - result.saml - ); - } - - /** - * Deserializes localStorage user - * - * @param serializedUser serialized currentUser - * @returns User object - */ - private deserializeUser(serializedUser: string): User { - const parsed = JSON.parse(serializedUser); - return new User( - parsed['id'], - parsed['username'], - parsed['is_admin'], - parsed['download_limit'], - parsed['isSamlLogin'] - ); - } - checkUser(): Observable { return this.apiService.getUser(); } @@ -137,7 +105,7 @@ export class AuthService implements OnDestroy { const loginRequest$ = this.apiService.login(username, password); return loginRequest$.pipe( mergeMap(() => this.checkUser()), - tap((res) => this.setAuth(this.transformUserResponse(res))), + tap((res) => this.setAuth(parseUserData(res))), catchError((error) => { console.error(error); return throwError(error); @@ -200,4 +168,15 @@ export class AuthService implements OnDestroy { newPassword2 ); } + + public updateSettings(update: Partial) { + return this.apiService.updateUserSettings(encodeUserData(update)).pipe( + tap((res) => this.setAuth(parseUserData(res))), + catchError((error) => { + console.error(error); + return throwError(error); + }) + ); + } + } diff --git a/frontend/src/app/services/corpus.service.spec.ts b/frontend/src/app/services/corpus.service.spec.ts index 26364246a..3d4d30dff 100644 --- a/frontend/src/app/services/corpus.service.spec.ts +++ b/frontend/src/app/services/corpus.service.spec.ts @@ -199,6 +199,8 @@ describe('CorpusService', () => { expect(items.length).toBe(1); const corpus = _.first(items); + expect(corpus.scan_image_type).toBe('png'); + const fieldData = [ { description: 'Banking concern to which the report belongs.', @@ -275,6 +277,7 @@ describe('CorpusService', () => { expect(result[key]).toEqual(expected[key]); }); }); + }); }); }); diff --git a/frontend/src/app/services/search.service.spec.ts b/frontend/src/app/services/search.service.spec.ts index 1d466d068..0b4e67275 100644 --- a/frontend/src/app/services/search.service.spec.ts +++ b/frontend/src/app/services/search.service.spec.ts @@ -12,6 +12,10 @@ import { SessionService } from './session.service'; import { WordmodelsService } from './wordmodels.service'; import { WordmodelsServiceMock } from '../../mock-data/wordmodels'; import { HttpClientTestingModule } from '@angular/common/http/testing'; +import { QueryModel } from '../models'; +import { mockCorpus } from '../../mock-data/corpus'; +import { AuthService } from './auth.service'; +import { AuthServiceMock } from '../../mock-data/auth'; describe('SearchService', () => { beforeEach(() => { @@ -23,6 +27,7 @@ describe('SearchService', () => { providers: [ SearchService, ApiRetryService, + { provide: AuthService, useValue: new AuthServiceMock() }, { provide: ApiService, useValue: new ApiServiceMock() }, { provide: ElasticSearchService, @@ -30,10 +35,6 @@ describe('SearchService', () => { }, QueryService, SessionService, - { - provide: WordmodelsService, - useValue: new WordmodelsServiceMock(), - }, ], }); }); @@ -41,4 +42,12 @@ describe('SearchService', () => { it('should be created', inject([SearchService], (service: SearchService) => { expect(service).toBeTruthy(); })); + + it('should search', inject([SearchService], async (service: SearchService) => { + const queryModel = new QueryModel(mockCorpus); + const results = await service.search(queryModel); + expect(results).toBeTruthy(); + expect(results.total.value).toBeGreaterThan(0); + })); + }); diff --git a/frontend/src/app/services/search.service.ts b/frontend/src/app/services/search.service.ts index e82b7bc58..4f446a0b9 100644 --- a/frontend/src/app/services/search.service.ts +++ b/frontend/src/app/services/search.service.ts @@ -5,7 +5,7 @@ import { ElasticSearchService } from './elastic-search.service'; import { QueryService } from './query.service'; import { Corpus, QueryModel, SearchResults, - AggregateQueryFeedback, QueryDb + AggregateQueryFeedback, QueryDb, User } from '../models/index'; import { AuthService } from './auth.service'; @@ -41,21 +41,15 @@ export class SearchService { public async search(queryModel: QueryModel ): Promise { const user = await this.authService.getCurrentUserPromise(); - const esQuery = queryModel.toEsQuery(); - const query = new QueryDb(esQuery, queryModel.corpus.name, user.id); - query.started = new Date(Date.now()); - const results = await this.elasticSearchService.search( - queryModel - ); - query.total_results = results.total.value; - query.completed = new Date(Date.now()); - this.queryService.save(query); + const request = () => this.elasticSearchService.search(queryModel); - return { - fields: queryModel.corpus.fields.filter((field) => field.resultsOverview), - total: results.total, - documents: results.documents, - } as SearchResults; + const resultsPromise = user.enableSearchHistory ? + this.searchAndSave(queryModel, user, request) : + request(); + + return resultsPromise.then(results => + this.filterResultsFields(results, queryModel) + ); } public async aggregateSearch( @@ -84,4 +78,40 @@ export class SearchService { ); } + /** execute a search request and save the action to the search history log */ + private searchAndSave(queryModel: QueryModel, user: User, searchRequest: () => Promise): Promise { + return this.recordTime(searchRequest).then(([results, started, completed]) => { + this.saveQuery(queryModel, user, results, started, completed); + return results; + }); + } + + /** execute a promise while noting the start and end time */ + private recordTime(makePromise: () => Promise): Promise<[result: T, started: Date, completed: Date]> { + const started = new Date(Date.now()); + + return makePromise().then(result => { + const completed = new Date(Date.now()); + return [result, started, completed]; + }); + } + + /** save query data to search history */ + private saveQuery(queryModel: QueryModel, user: User, results: SearchResults, started: Date, completed: Date) { + const esQuery = queryModel.toEsQuery(); + const query = new QueryDb(esQuery, queryModel.corpus.name, user.id); + query.started = started; + query.total_results = results.total.value; + query.completed = completed; + this.queryService.save(query); + } + + /** filter search results for fields included in resultsOverview of the corpus */ + private filterResultsFields(results: SearchResults, queryModel: QueryModel): SearchResults { + return { + fields: queryModel.corpus.fields.filter((field) => field.resultsOverview), + total: results.total, + documents: results.documents, + } as SearchResults; + } } diff --git a/frontend/src/app/services/visualization.service.ts b/frontend/src/app/services/visualization.service.ts index 5b3d4245d..945f379bf 100644 --- a/frontend/src/app/services/visualization.service.ts +++ b/frontend/src/app/services/visualization.service.ts @@ -4,13 +4,13 @@ import { AggregateTermFrequencyParameters, Corpus, DateTermFrequencyParameters, + NGramRequestParameters, NgramParameters, QueryModel, TaskResult, TimeCategory, } from '../models'; import { ApiService } from './api.service'; -import { ElasticSearchService } from './elastic-search.service'; @Injectable({ providedIn: 'root' @@ -71,17 +71,14 @@ export class VisualizationService { }; } - public async dateTermFrequencySearch( - corpus: Corpus, queryModel: QueryModel, fieldName: string, bins: {size: number; start_date: Date; end_date?: Date}[], - unit: TimeCategory, - ): Promise { - const params = this.makeDateTermFrequencyParameters(corpus, queryModel, fieldName, bins, unit); - return this.apiService.getDateTermFrequency(params); - } - - getNgramTasks(queryModel: QueryModel, corpus: Corpus, field: string, params: NgramParameters): Promise { + public makeNgramRequestParameters( + corpus: Corpus, + queryModel: QueryModel, + field: string, + params: NgramParameters + ): NGramRequestParameters { const esQuery = queryModel.toEsQuery(); - return this.apiService.ngramTasks({ + return { es_query: esQuery, corpus_name: corpus.name, field, @@ -91,8 +88,21 @@ export class VisualizationService { subfield: params.analysis, max_size_per_interval: params.maxDocuments, number_of_ngrams: params.numberOfNgrams, - date_field: params.dateField, - }); + date_field: params.dateField + }; + } + + public async dateTermFrequencySearch( + corpus: Corpus, queryModel: QueryModel, fieldName: string, bins: {size: number; start_date: Date; end_date?: Date}[], + unit: TimeCategory, + ): Promise { + const params = this.makeDateTermFrequencyParameters(corpus, queryModel, fieldName, bins, unit); + return this.apiService.getDateTermFrequency(params); + } + + getNgramTasks(queryModel: QueryModel, corpus: Corpus, field: string, params: NgramParameters): Promise { + const ngramRequestParams = this.makeNgramRequestParameters(corpus, queryModel, field, params); + return this.apiService.ngramTasks(ngramRequestParams); } diff --git a/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.html b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.html new file mode 100644 index 000000000..45f55cec3 --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.html @@ -0,0 +1,25 @@ +
+
+ +
+
+ + diff --git a/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.scss b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.spec.ts b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.spec.ts new file mode 100644 index 000000000..e3094c04e --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.spec.ts @@ -0,0 +1,23 @@ +import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; + +import { DeleteSearchHistoryComponent } from './delete-search-history.component'; +import { commonTestBed } from '../../../common-test-bed'; + +describe('DeleteSearchHistoryComponent', () => { + let component: DeleteSearchHistoryComponent; + let fixture: ComponentFixture; + + beforeEach(waitForAsync(() => { + commonTestBed().testingModule.compileComponents(); + })); + + beforeEach(() => { + fixture = TestBed.createComponent(DeleteSearchHistoryComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.ts b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.ts new file mode 100644 index 000000000..c44008584 --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/delete-search-history/delete-search-history.component.ts @@ -0,0 +1,26 @@ +import { Component, OnInit } from '@angular/core'; +import { faTrash } from '@fortawesome/free-solid-svg-icons'; +import { ApiService, NotificationService } from '../../../services'; +import { tap } from 'rxjs/operators'; + +@Component({ + selector: 'ia-delete-search-history', + templateUrl: './delete-search-history.component.html', + styleUrls: ['./delete-search-history.component.scss'] +}) +export class DeleteSearchHistoryComponent { + faTrash = faTrash; + + showConfirm = false; + + constructor(private apiService: ApiService, private notificationService: NotificationService) { } + + deleteHistory() { + this.apiService.deleteSearchHistory().pipe( + tap(() => this.showConfirm = false) + ).subscribe( + res => this.notificationService.showMessage('Search history deleted', 'success'), + err => this.notificationService.showMessage('Deleting search history failed', 'danger'), + ); + } +} diff --git a/frontend/src/app/settings/search-history-settings/search-history-settings.component.html b/frontend/src/app/settings/search-history-settings/search-history-settings.component.html new file mode 100644 index 000000000..16177e1db --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/search-history-settings.component.html @@ -0,0 +1,25 @@ +

Search history

+ +
+ More information + +

+ Saving your search history allows you to look up earlier queries. + You can view your saved history on the search history page. + It can be used to quickly get back to earlier queries, or to log your research process. +

+

+ Search histories are stored on the I-analyzer server. + They are not shared with others, but developers may use search history + statistics to assess the level of interest in different corpora. + See our privacy statement for more information. +

+
+ +
+ +
+ +
+ +
diff --git a/frontend/src/app/settings/search-history-settings/search-history-settings.component.scss b/frontend/src/app/settings/search-history-settings/search-history-settings.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/frontend/src/app/settings/search-history-settings/search-history-settings.component.spec.ts b/frontend/src/app/settings/search-history-settings/search-history-settings.component.spec.ts new file mode 100644 index 000000000..b2de05b2b --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/search-history-settings.component.spec.ts @@ -0,0 +1,23 @@ +import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; + +import { SearchHistorySettingsComponent } from './search-history-settings.component'; +import { commonTestBed } from '../../common-test-bed'; + +describe('SearchHistorySettingsComponent', () => { + let component: SearchHistorySettingsComponent; + let fixture: ComponentFixture; + + beforeEach(waitForAsync(() => { + commonTestBed().testingModule.compileComponents(); + })); + + beforeEach(() => { + fixture = TestBed.createComponent(SearchHistorySettingsComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/settings/search-history-settings/search-history-settings.component.ts b/frontend/src/app/settings/search-history-settings/search-history-settings.component.ts new file mode 100644 index 000000000..4096cfee3 --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/search-history-settings.component.ts @@ -0,0 +1,12 @@ +import { Component } from '@angular/core'; + +@Component({ + selector: 'ia-search-history-settings', + templateUrl: './search-history-settings.component.html', + styleUrls: ['./search-history-settings.component.scss'] +}) +export class SearchHistorySettingsComponent { + + constructor() { } + +} diff --git a/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.html b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.html new file mode 100644 index 000000000..9c7ad1e92 --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.html @@ -0,0 +1,14 @@ +
+
+ + +

+ You can change this setting at any time. Doing so will not delete + your existing search history. +

+
+
diff --git a/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.scss b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.spec.ts b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.spec.ts new file mode 100644 index 000000000..109ca90b6 --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.spec.ts @@ -0,0 +1,23 @@ +import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; + +import { ToggleSearchHistoryComponent } from './toggle-search-history.component'; +import { commonTestBed } from '../../../common-test-bed'; + +describe('ToggleSearchHistoryComponent', () => { + let component: ToggleSearchHistoryComponent; + let fixture: ComponentFixture; + + beforeEach(waitForAsync(() => { + commonTestBed().testingModule.compileComponents(); + })); + + beforeEach(() => { + fixture = TestBed.createComponent(ToggleSearchHistoryComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.ts b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.ts new file mode 100644 index 000000000..c6e6d1f7e --- /dev/null +++ b/frontend/src/app/settings/search-history-settings/toggle-search-history/toggle-search-history.component.ts @@ -0,0 +1,32 @@ +import { Component } from '@angular/core'; +import { AuthService, NotificationService } from '../../../services'; +import { map } from 'rxjs/operators'; +import { Observable } from 'rxjs'; + +@Component({ + selector: 'ia-toggle-search-history', + templateUrl: './toggle-search-history.component.html', + styleUrls: ['./toggle-search-history.component.scss'] +}) +export class ToggleSearchHistoryComponent { + searchHistoryEnabled$: Observable; + + constructor(private authService: AuthService, private notificationService: NotificationService) { + this.searchHistoryEnabled$ = this.authService.currentUser$.pipe( + map(user => user?.enableSearchHistory) + ); + } + + emitChange(setting: boolean) { + const data = { enableSearchHistory: setting }; + const succesMessage = `Search history will ${setting ? '' : 'not '} be saved from now on`; + this.authService.updateSettings(data).subscribe( + res => this.notificationService.showMessage(succesMessage, 'success'), + err => this.notificationService.showMessage( + 'An error occured while trying to save your search history setting', + 'danger' + ), + ); + } + +} diff --git a/frontend/src/app/settings/settings.component.html b/frontend/src/app/settings/settings.component.html new file mode 100644 index 000000000..13910a889 --- /dev/null +++ b/frontend/src/app/settings/settings.component.html @@ -0,0 +1,9 @@ +
+
+

Settings

+ +
+ +
+
+
diff --git a/frontend/src/app/settings/settings.component.scss b/frontend/src/app/settings/settings.component.scss new file mode 100644 index 000000000..e69de29bb diff --git a/frontend/src/app/settings/settings.component.spec.ts b/frontend/src/app/settings/settings.component.spec.ts new file mode 100644 index 000000000..365de8e9b --- /dev/null +++ b/frontend/src/app/settings/settings.component.spec.ts @@ -0,0 +1,24 @@ +import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; + +import { SettingsComponent } from './settings.component'; +import { commonTestBed } from '../common-test-bed'; + +describe('SettingsComponent', () => { + let component: SettingsComponent; + let fixture: ComponentFixture; + + + beforeEach(waitForAsync(() => { + commonTestBed().testingModule.compileComponents(); + })); + + beforeEach(() => { + fixture = TestBed.createComponent(SettingsComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('should create', () => { + expect(component).toBeTruthy(); + }); +}); diff --git a/frontend/src/app/settings/settings.component.ts b/frontend/src/app/settings/settings.component.ts new file mode 100644 index 000000000..855fc747c --- /dev/null +++ b/frontend/src/app/settings/settings.component.ts @@ -0,0 +1,12 @@ +import { Component } from '@angular/core'; + +@Component({ + selector: 'ia-settings', + templateUrl: './settings.component.html', + styleUrls: ['./settings.component.scss'] +}) +export class SettingsComponent { + + constructor() { } + +} diff --git a/frontend/src/app/settings/settings.module.ts b/frontend/src/app/settings/settings.module.ts new file mode 100644 index 000000000..6f1f833e3 --- /dev/null +++ b/frontend/src/app/settings/settings.module.ts @@ -0,0 +1,23 @@ +import { NgModule } from '@angular/core'; +import { SharedModule } from '../shared/shared.module'; +import { SettingsComponent } from './settings.component'; +import { SearchHistorySettingsComponent } from './search-history-settings/search-history-settings.component'; +import { DeleteSearchHistoryComponent } from './search-history-settings/delete-search-history/delete-search-history.component'; +import { ToggleSearchHistoryComponent } from './search-history-settings/toggle-search-history/toggle-search-history.component'; + + + +@NgModule({ + declarations: [ + DeleteSearchHistoryComponent, + SearchHistorySettingsComponent, + SettingsComponent, + ToggleSearchHistoryComponent, + ], + imports: [ + SharedModule + ], exports: [ + SettingsComponent, + ] +}) +export class SettingsModule { } diff --git a/frontend/src/app/utils/user.spec.ts b/frontend/src/app/utils/user.spec.ts new file mode 100644 index 000000000..a43600813 --- /dev/null +++ b/frontend/src/app/utils/user.spec.ts @@ -0,0 +1,79 @@ +import * as _ from 'lodash'; +import { User, UserResponse } from '../models'; +import { parseUserData, encodeUserData } from './user'; + +/** + * check if an object is a partial version of another object + * + * Verify that each key in `part` has the same value in `whole`, + * but ignore any properties of `whole` that are ommitted in `part`. + */ +const isPartialOf = (part: Partial, whole: T): boolean => { + const picked = _.pick(whole, _.keys(part)); + return _.isEqual(part, picked); +}; + +const customMatchers = { + /** expect an object to be a partial version of another object */ + toBePartialOf: (matchersUtil) => ({ + compare: (actual: Partial, expected: T) => { + const pass = isPartialOf(actual, expected); + return { pass }; + } + }) +}; + +describe('user API conversion', () => { + let user: User; + let userResponse: UserResponse; + + beforeEach(() => { + jasmine.addMatchers(customMatchers); + }); + + beforeEach(() => { + user = new User( + 1, + 'Hamlet', + false, + 10000, + false, + true, + ); + }); + + beforeEach(() => { + userResponse = { + id: 1, + username: 'Hamlet', + email: 'hamlet@elsinore.dk', + download_limit: 10000, + is_admin: false, + saml: false, + profile: { + enable_search_history: true, + } + }; + }); + + it('should convert a user response to a user object', () => { + expect(parseUserData(userResponse)).toEqual(user); + }); + + it('should convert a user to a user response object', () => { + const encoded = encodeUserData(user); + (expect(encoded) as any).toBePartialOf(userResponse); + }); + + it('should define inverse functions', () => { + const encoded = encodeUserData(user); + const decoded = parseUserData(encoded as UserResponse); + expect(decoded).toEqual(user); + + const parsed = parseUserData(userResponse); + const unparsed = encodeUserData(parsed); + // this has to be a partial match because User contains a subset of the information + // in the API + (expect(unparsed) as any).toBePartialOf(userResponse); + }); +}); diff --git a/frontend/src/app/utils/user.ts b/frontend/src/app/utils/user.ts new file mode 100644 index 000000000..c0d44e644 --- /dev/null +++ b/frontend/src/app/utils/user.ts @@ -0,0 +1,44 @@ +import * as _ from 'lodash'; +import { User, UserResponse } from '../models'; + +/* Transforms backend user response to User object +* +* @param result User response data +* @returns User object +*/ +export const parseUserData = (result: UserResponse): User => new User( + result.id, + result.username, + result.is_admin, + result.download_limit == null ? 0 : result.download_limit, + result.saml, + result.profile.enable_search_history, +); + +/** + * Transfroms User data to backend UserResponse object + * + * Because this is used for patching, the data can be partial + * + * @param data (partial) User object + * @returns UserResponse object + */ +export const encodeUserData = (data: Partial): Partial => { + const changeKeys = { + name: 'username', + isAdmin: 'is_admin', + downloadLimit: 'download_limit', + isSamlLogin: 'saml', + enableSearchHistory: 'profile.enable_search_history' + }; + + const encoded = {}; + + _.keys(data).forEach(key => { + const value = data[key]; + const path = changeKeys[key] ? _.toPath(changeKeys[key]) : key; + _.set(encoded, path, value); + }); + + return encoded; +}; diff --git a/frontend/src/app/visualization/barchart/histogram.component.ts b/frontend/src/app/visualization/barchart/histogram.component.ts index 6847288c9..74a74c736 100644 --- a/frontend/src/app/visualization/barchart/histogram.component.ts +++ b/frontend/src/app/visualization/barchart/histogram.component.ts @@ -87,7 +87,7 @@ export class HistogramComponent extends BarchartDirective im return this.apiService.requestFullData({ visualization: 'aggregate_term_frequency', parameters: paramsPerSeries, - corpus: this.corpus.name, + corpus_name: this.corpus.name, }); } diff --git a/frontend/src/app/visualization/barchart/timeline.component.ts b/frontend/src/app/visualization/barchart/timeline.component.ts index 60292d2e1..0e3341da8 100644 --- a/frontend/src/app/visualization/barchart/timeline.component.ts +++ b/frontend/src/app/visualization/barchart/timeline.component.ts @@ -113,7 +113,7 @@ export class TimelineComponent extends BarchartDirective impl return this.apiService.requestFullData({ visualization: 'date_term_frequency', parameters: paramsPerSeries, - corpus: this.corpus.name, + corpus_name: this.corpus.name, }); } diff --git a/frontend/src/app/visualization/ngram/ngram.component.html b/frontend/src/app/visualization/ngram/ngram.component.html index 80e71570b..52fbb94ec 100644 --- a/frontend/src/app/visualization/ngram/ngram.component.html +++ b/frontend/src/app/visualization/ngram/ngram.component.html @@ -115,3 +115,4 @@ + diff --git a/frontend/src/app/visualization/ngram/ngram.component.ts b/frontend/src/app/visualization/ngram/ngram.component.ts index 5f0c432b9..479f73d0b 100644 --- a/frontend/src/app/visualization/ngram/ngram.component.ts +++ b/frontend/src/app/visualization/ngram/ngram.component.ts @@ -1,7 +1,7 @@ import { Component, ElementRef, EventEmitter, Input, OnChanges, Output, SimpleChanges, ViewChild } from '@angular/core'; import * as _ from 'lodash'; import { Corpus, FreqTableHeaders, QueryModel, CorpusField, NgramResults, NgramParameters } from '../../models'; -import { ApiService, ParamService, VisualizationService } from '../../services'; +import { ApiService, NotificationService, ParamService, VisualizationService } from '../../services'; import { faCheck, faTimes } from '@fortawesome/free-solid-svg-icons'; import { ParamDirective } from '../../param/param-directive'; import { ActivatedRoute, ParamMap, Params, Router } from '@angular/router'; @@ -58,9 +58,10 @@ export class NgramComponent extends ParamDirective implements OnChanges { constructor( private apiService: ApiService, private visualizationService: VisualizationService, + private notificationService: NotificationService, route: ActivatedRoute, router: Router, - paramService: ParamService + paramService: ParamService, ) { super(route, router, paramService); this.currentParameters = new NgramParameters( @@ -251,4 +252,30 @@ export class NgramComponent extends ParamDirective implements OnChanges { return `${value}`; } + + requestFullData() { + const parameters = this.visualizationService.makeNgramRequestParameters( + this.corpus, + this.queryModel, + this.visualizedField.name, + this.currentParameters + ); + this.apiService.requestFullData({ + corpus_name: this.corpus.name, + visualization: 'ngram', + parameters + }).then(() => + this.notificationService.showMessage( + 'Full data requested! You will receive an email when your download is ready.', + 'success', + { + text: 'view downloads', + route: ['/download-history'] + } + ) + ).catch(error => { + console.error(error); + this.notificationService.showMessage('Could not set up data generation.', 'danger'); + }); + } } diff --git a/frontend/src/mock-data/api.ts b/frontend/src/mock-data/api.ts index ed635bebd..a7ddcf108 100644 --- a/frontend/src/mock-data/api.ts +++ b/frontend/src/mock-data/api.ts @@ -63,7 +63,11 @@ export class ApiServiceMock { return Promise.resolve({}); } - requestMedia() { + saveQuery() { + return Promise.resolve(); + } + + requestMeQdia() { return Promise.resolve({}); } } diff --git a/frontend/src/mock-data/auth.ts b/frontend/src/mock-data/auth.ts new file mode 100644 index 000000000..b4385c88f --- /dev/null +++ b/frontend/src/mock-data/auth.ts @@ -0,0 +1,5 @@ +import { mockUser } from './user'; + +export class AuthServiceMock { + getCurrentUserPromise = () => Promise.resolve(mockUser); +} diff --git a/frontend/src/mock-data/elastic-search.ts b/frontend/src/mock-data/elastic-search.ts index 13c61af23..4a983a201 100644 --- a/frontend/src/mock-data/elastic-search.ts +++ b/frontend/src/mock-data/elastic-search.ts @@ -1,4 +1,4 @@ -import { FoundDocument } from '../app/models'; +import { FoundDocument, SearchResults } from '../app/models'; import { makeDocument } from './constructor-helpers'; export class ElasticSearchServiceMock { @@ -11,4 +11,15 @@ export class ElasticSearchServiceMock { getDocumentById(): Promise { return Promise.resolve(makeDocument({content: 'Hello world!'})); } + + search(): Promise { + return Promise.resolve({ + total: { + relation: 'eq', + value: 1 + }, + documents: [makeDocument({content: 'Hello world!'})] + }); + } + } diff --git a/frontend/src/mock-data/user.ts b/frontend/src/mock-data/user.ts index a56a6c07c..564cb28da 100644 --- a/frontend/src/mock-data/user.ts +++ b/frontend/src/mock-data/user.ts @@ -1,7 +1,7 @@ import { User, UserResponse } from '../app/models/index'; -export const mockUser: User = new User(42, 'mouse', false, 10000, false); +export const mockUser: User = new User(42, 'mouse', false, 10000, false, true); export const mockUserResponse: UserResponse = { id: 42, @@ -10,4 +10,7 @@ export const mockUserResponse: UserResponse = { email: 'mighty@mouse.com', download_limit: 10000, saml: false, + profile: { + enable_search_history: true, + }, }; diff --git a/package.json b/package.json index fbae7a7a5..33cae8816 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "i-analyzer", - "version": "5.0.1", + "version": "5.1.0", "license": "MIT", "scripts": { "postinstall": "yarn install-back && yarn install-front",