From aede0d1280d5f03966c83a100fd0a3bdaab80470 Mon Sep 17 00:00:00 2001 From: Martijn de Vos Date: Tue, 11 Oct 2022 15:34:23 +0200 Subject: [PATCH] Added tests for snippet generation when searching --- .../restapi/tests/test_search_endpoint.py | 67 ++++++++++++++++++- 1 file changed, 66 insertions(+), 1 deletion(-) diff --git a/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py b/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py index 0d362410fa6..734a677ee78 100644 --- a/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py +++ b/src/tribler/core/components/metadata_store/restapi/tests/test_search_endpoint.py @@ -1,5 +1,6 @@ import os -from typing import Set +from binascii import unhexlify +from typing import Set, List from unittest.mock import patch from aiohttp.web_app import Application @@ -8,6 +9,7 @@ import pytest +from tribler.core.components.metadata_store.db.serialization import SNIPPET, REGULAR_TORRENT from tribler.core.components.metadata_store.restapi.search_endpoint import SearchEndpoint from tribler.core.components.restapi.rest.base_api_test import do_request from tribler.core.components.tag.db.tag_db import TagDatabase @@ -161,3 +163,66 @@ async def test_search_with_space(rest_api, metadata_store): parsed = await do_request(rest_api, f'search?txt_filter={s2}', expected_code=200) results = {item["name"] for item in parsed["results"]} assert results == {'abc.def', 'abc def', 'abc defxyz'} # but not 'abcxyz def' + + +async def test_single_snippet_in_search(rest_api, metadata_store, tags_db): + """ + Test building a simple snippet of a single item. + """ + with db_session: + content_ih = random_infohash() + metadata_store.TorrentMetadata(title='abc', infohash=content_ih) + + def mocked_get_subjects(*_, **__) -> List[str]: + return ["Abc"] + + with patch.object(TagDatabase, 'get_subjects', wraps=mocked_get_subjects): + s1 = to_fts_query("abc") + results = await do_request(rest_api, f'search?txt_filter={s1}', expected_code=200) + + assert len(results["results"]) == 1 + snippet = results["results"][0] + assert snippet["type"] == SNIPPET + assert snippet["torrents"] == 1 + assert len(snippet["torrents_in_snippet"]) == 1 + assert snippet["torrents_in_snippet"][0]["infohash"] == hexlify(content_ih) + + +async def test_multiple_snippets_in_search(rest_api, metadata_store, tags_db): + """ + Test two snippets with two torrents in each snippet. + """ + with db_session: + infohashes = [random_infohash() for _ in range(5)] + for ind, infohash in enumerate(infohashes): + torrent_state = metadata_store.TorrentState(infohash=infohash, seeders=ind) + metadata_store.TorrentMetadata(title='abc %d' % ind, infohash=infohash, health=torrent_state) + + def mocked_get_subjects(obj, *_, **__) -> List[str]: + obj = unhexlify(obj) + if obj == infohashes[0] or obj == infohashes[1]: + return ["Content item 1"] + elif obj == infohashes[2] or obj == infohashes[3]: + return ["Content item 2"] + return [] + + with patch.object(TagDatabase, 'get_subjects', wraps=mocked_get_subjects): + s1 = to_fts_query("abc") + parsed = await do_request(rest_api, f'search?txt_filter={s1}', expected_code=200) + results = parsed["results"] + + assert len(results) == 3 + for snippet in results[:2]: + assert snippet["type"] == SNIPPET + assert snippet["torrents"] == 2 + + # Test that the right torrents have been assigned to the appropriate content items, and that they are in the + # right sorted order. + assert results[0]["torrents_in_snippet"][0]["infohash"] == hexlify(infohashes[3]) + assert results[0]["torrents_in_snippet"][1]["infohash"] == hexlify(infohashes[2]) + assert results[1]["torrents_in_snippet"][0]["infohash"] == hexlify(infohashes[1]) + assert results[1]["torrents_in_snippet"][1]["infohash"] == hexlify(infohashes[0]) + + # There is one item that has not been assigned to the snippet. + assert results[2]["type"] == REGULAR_TORRENT + assert results[2]["infohash"] == hexlify(infohashes[4])