diff --git a/laboratory/Makefile b/laboratory/Makefile index be69491..551199c 100755 --- a/laboratory/Makefile +++ b/laboratory/Makefile @@ -6,8 +6,8 @@ server: coverage: coverage run --source='.' manage.py test - coverage report --omit=laboratory/asgi.py,laboratory/wsgi.py,manage.py --fail-under=100 - coverage html --omit=laboratory/asgi.py,laboratory/wsgi.py,manage.py + coverage report --omit=laboratory/asgi.py,laboratory/wsgi.py,manage.py,analysis/management/* --fail-under=100 + coverage html --omit=laboratory/asgi.py,laboratory/wsgi.py,manage.py,analysis/management/* migrate: python manage.py migrate \ No newline at end of file diff --git a/laboratory/README.md b/laboratory/README.md index 8165141..923e6d6 100644 --- a/laboratory/README.md +++ b/laboratory/README.md @@ -30,4 +30,25 @@ Or if you don't like UPPER_CASE: from django.conf import settings find_similar = settings.FIND_SIMILAR find_similar('none', ['one', 'two']) +``` + +## Management commands + +### Get tokens from one text + +Input: +```commandline +python manage.py tokenize_one "some text" "other text" +``` + +Output: +```commandline +Get tokens for some text... +Done: +{'text', 'some'} +End +Get tokens for other text... +Done: +{'text', 'other'} +End ``` \ No newline at end of file diff --git a/laboratory/analysis/functions.py b/laboratory/analysis/functions.py new file mode 100644 index 0000000..8c40cef --- /dev/null +++ b/laboratory/analysis/functions.py @@ -0,0 +1,16 @@ +""" +Analysis functions +""" +from django.conf import settings + + +def analyze_one_item(item, dictionary=None, language="russian", printer=print): + """ + Analyze one item for tokenize + """ + printer(f'Get tokens for {item}...') + tokens = settings.TOKENIZE(item, language=language, dictionary=dictionary) + printer('Done:') + printer(tokens) + printer('End') + return tokens diff --git a/laboratory/analysis/management/__init__.py b/laboratory/analysis/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/laboratory/analysis/management/commands/__init__.py b/laboratory/analysis/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/laboratory/analysis/management/commands/tokenize_one.py b/laboratory/analysis/management/commands/tokenize_one.py new file mode 100644 index 0000000..085dc13 --- /dev/null +++ b/laboratory/analysis/management/commands/tokenize_one.py @@ -0,0 +1,33 @@ +""" +Command to get tokens from one text +""" +from django.core.management.base import BaseCommand +from analysis.functions import analyze_one_item + + +class Command(BaseCommand): + """ + >> python manage.py tokenize_one "some text" "other text" + Get tokens for some text... + Done: + {'text', 'some'} + End + Get tokens for other text... + Done: + {'other', 'text'} + End + """ + help = "Get tokens from one text" + + def add_arguments(self, parser): + """ + Add arguments to console command + """ + parser.add_argument("text", nargs="+", type=str) + + def handle(self, *args, **options): + """ + Run command handler + """ + for text_item in options["text"]: + analyze_one_item(text_item) diff --git a/laboratory/analysis/models.py b/laboratory/analysis/models.py index 1841c22..7dbc312 100644 --- a/laboratory/analysis/models.py +++ b/laboratory/analysis/models.py @@ -1,5 +1,5 @@ """ -Analysis models +Analisys models """ # from django.db import models diff --git a/laboratory/analysis/tests/tests_functions.py b/laboratory/analysis/tests/tests_functions.py new file mode 100644 index 0000000..db31baa --- /dev/null +++ b/laboratory/analysis/tests/tests_functions.py @@ -0,0 +1,53 @@ +""" +Tests for Analysis functions +""" +from django.test import SimpleTestCase + +from analysis.functions import analyze_one_item + + +class TestFunctions(SimpleTestCase): + """ + Class for test all functions + """ + def setUp(self): + self.printer = print + + def mock_printer(*args, **kwargs): # pylint: disable=unused-argument + """ + This is mock printer. This printer do nothing + """ + + self.mock_printer = mock_printer + + class TestingPrinter: + """ + Save prints to variable. To check the results + """ + + def __init__(self): + """ + Init printer + """ + self.results = [] + + def __call__(self, text, *args, **kwargs): + self.results.append(str(text)) + + self.testing_printer = TestingPrinter() + + def test_analyze_one_item(self): + """ + Test for analyze one item + """ + text = 'one two' + tokens = analyze_one_item('one two', printer=self.testing_printer) + expected_tokens = {'one', 'two'} + self.assertEqual(tokens, expected_tokens) + excepted_prints = [ + f'Get tokens for {text}...', + 'Done:', + f'{expected_tokens}', + 'End', + ] + self.assertEqual(self.testing_printer.results, excepted_prints) diff --git a/laboratory/analysis/tests/tests_views.py b/laboratory/analysis/tests/tests_views.py index d4b27e5..45942c0 100644 --- a/laboratory/analysis/tests/tests_views.py +++ b/laboratory/analysis/tests/tests_views.py @@ -1,11 +1,11 @@ """ Tests form views """ -from django.test import TestCase +from django.test import SimpleTestCase from analysis.forms import OneTextForm -class TestTokenizeOneView(TestCase): +class TestTokenizeOneView(SimpleTestCase): """ Test TokenizeOneView """ diff --git a/laboratory/laboratory/settings.py b/laboratory/laboratory/settings.py index 4491729..935290d 100644 --- a/laboratory/laboratory/settings.py +++ b/laboratory/laboratory/settings.py @@ -16,7 +16,9 @@ sys.path.append("../") from find_similar import find_similar # pylint: disable=wrong-import-position +from find_similar.tokenize import tokenize # pylint: disable=wrong-import-position FIND_SIMILAR = find_similar +TOKENIZE = tokenize # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent