diff --git a/src/dsw_seed_maker/cli.py b/src/dsw_seed_maker/cli.py index 8d23ef0..552b7f2 100644 --- a/src/dsw_seed_maker/cli.py +++ b/src/dsw_seed_maker/cli.py @@ -59,20 +59,11 @@ def example(): ' locales, document_templates, projects, documents)') def list_resources(output, resource_type): Config.check() - # TODO: Implement list command (do it in logic, import & use here) resources = list_logic(resource_type) json_output = json.dumps(resources, indent=4) output.write(json_output) -# just for testing the download -# @cli.command(help='List all available seed resources', name='download') -# def download_resources(): -# Config.check() -# # TODO: Implement list command (do it in logic, import & use here) -# download_file_logic("documents/1034a4b0-d867-4b4b-b2a0-a3956b43cf95", "test.pdf") - - @cli.command(help='Create a seed package from input', name='make-seed') @click.option('-i', '--input', 'input_fp', type=click.File('r', encoding=DEFAULT_ENCODING), default='-', @@ -83,7 +74,4 @@ def list_resources(output, resource_type): def make_seed(input_fp, output_dir): Config.check() data = json.load(input_fp) - out_dir = pathlib.Path(output_dir) - out_dir.mkdir(parents=True, exist_ok=True) - # TODO: Implement list command (do it in logic, import & use here) - process_input(data, output_dir) + process_input(data, pathlib.Path(output_dir)) diff --git a/src/dsw_seed_maker/config.py b/src/dsw_seed_maker/config.py index e0ea2ad..1769286 100644 --- a/src/dsw_seed_maker/config.py +++ b/src/dsw_seed_maker/config.py @@ -17,16 +17,18 @@ class Config: - API_ROOT_PATH = os.getenv('API_ROOT_PATH', '') - DSW_DB_CONN_STR = os.getenv('DSW_DB_CONN_STR') - DSW_S3_URL = os.getenv('DSW_S3_URL') - DSW_S3_USERNAME = os.getenv('DSW_S3_USERNAME') - DSW_S3_PASSWORD = os.getenv('DSW_S3_PASSWORD') - DSW_S3_BUCKET = os.getenv('DSW_S3_BUCKET') - DSW_S3_REGION = os.getenv('DSW_S3_REGION', 'eu-central-1') + API_ROOT_PATH: str = os.getenv('API_ROOT_PATH', '') + DSW_DB_CONN_STR: str = os.getenv('DSW_DB_CONN_STR', '') + DSW_S3_URL: str = os.getenv('DSW_S3_URL', '') + DSW_S3_USERNAME: str = os.getenv('DSW_S3_USERNAME', '') + DSW_S3_PASSWORD: str = os.getenv('DSW_S3_PASSWORD', '') + DSW_S3_BUCKET: str = os.getenv('DSW_S3_BUCKET', '') + DSW_S3_REGION: str = os.getenv('DSW_S3_REGION', 'eu-central-1') - LOG_LEVEL = os.getenv('LOG_LEVEL', DEFAULT_LOG_LEVEL) - LOG_FORMAT = os.getenv('LOG_FORMAT', DEFAULT_LOG_FORMAT) + OUT_DIR: pathlib.Path = pathlib.Path.cwd() / 'out' + + LOG_LEVEL: str = os.getenv('LOG_LEVEL', DEFAULT_LOG_LEVEL) + LOG_FORMAT: str = os.getenv('LOG_FORMAT', DEFAULT_LOG_FORMAT) @classmethod def check(cls): @@ -48,3 +50,7 @@ def apply_logging(cls): format=cls.LOG_FORMAT, ) LOG.debug('Logging configured with level: %s', cls.LOG_LEVEL) + + @classmethod + def ensure_out_dir(cls): + cls.OUT_DIR.mkdir(parents=True, exist_ok=True) diff --git a/src/dsw_seed_maker/logic.py b/src/dsw_seed_maker/logic.py index 1a58392..eae1964 100644 --- a/src/dsw_seed_maker/logic.py +++ b/src/dsw_seed_maker/logic.py @@ -1,26 +1,29 @@ +import datetime import json import os +import pathlib import re +import typing import uuid -from datetime import datetime -from dotenv import load_dotenv -import pathlib -from typing import Any -from .comm import S3Storage +from .comm import Database, S3Storage +from .config import Config +from .consts import DEFAULT_ENCODING from .models import ExampleRequestDTO, ExampleResponseDTO -from .comm.db import Database - -load_dotenv() def connect_to_db_logic() -> Database: - return Database(name=os.getenv("DSW_DB_CONN_NAME"), dsn=os.getenv("DSW_DB_CONN_STR")) + return Database( + name='main', + dsn=Config.DSW_DB_CONN_STR, + ) + + +RECIPE_TEMPLATE = pathlib.Path(__file__).parent / 'recipe_template.json' processed_resources = set() db = connect_to_db_logic() -output_dir = "-" def example_logic(req_dto: ExampleRequestDTO) -> ExampleResponseDTO: @@ -31,45 +34,49 @@ def example_logic(req_dto: ExampleRequestDTO) -> ExampleResponseDTO: def connect_to_s3_logic() -> S3Storage: return S3Storage( - url=os.getenv("DSW_S3_URL"), - username=os.getenv("DSW_S3_USERNAME"), - password=os.getenv("DSW_S3_PASSWORD"), - bucket=os.getenv("DSW_S3_BUCKET"), - region=os.getenv("DSW_S3_REGION"), + url=Config.DSW_S3_URL, + username=Config.DSW_S3_USERNAME, + password=Config.DSW_S3_PASSWORD, + bucket=Config.DSW_S3_BUCKET, + region=Config.DSW_S3_REGION, multi_tenant=True ) def generate_insert_query(data, table): columns = ', '.join(data.keys()) - values = ", ".join(format_for_sql(data)) - return f"INSERT INTO {table} ({columns}) VALUES ({values})\;" + values = ', '.join(format_for_sql(data)) + return f'INSERT INTO {table} ({columns}) VALUES ({values})' def generate_select_query(resource_type, attr, value): - return "SELECT * FROM {table} WHERE {attr} = '{value}'".format(value=value, table=resource_tables[resource_type], attr=attr) + table = resource_tables[resource_type] + return f'SELECT * FROM {table} WHERE {attr} = \'{value}\'' def generate_select_all_query(resource_type): - return "SELECT * FROM {table}".format(table=resource_tables[resource_type]) + table = resource_tables[resource_type] + return f'SELECT * FROM {table}' -def list_logic(resource_type: str) -> dict[str, list[dict[str, Any]]] | list[dict[str, Any]]: - if resource_type == "all": - resource = {} - for each in resource_attributes.keys(): - resource[each] = list_resource(each, resource_attributes[each]) - return resource - else: - return {resource_type: list_resource(resource_type, resource_attributes[resource_type])} +def list_logic(resource_type: str) -> dict[str, list[dict[str, typing.Any]]]: + if resource_type == 'all': + return { + resource_key: list_resource(resource_key, attributes) + for resource_key, attributes in resource_attributes.items() + } + return { + resource_type: list_resource(resource_type, resource_attributes[resource_type]) + } -def list_resource(resource_type, attributes): +def list_resource(resource_type, attributes) -> list[dict[str, typing.Any]]: query = generate_select_all_query(resource_type) resources = db.execute_query(query) + # Convert 'uuid' to string; others as-is parsed_resources = [ { - attr: str(row[attr]) if attr == 'uuid' else row[attr] # Convert 'uuid' to string; others as-is + attr: str(row[attr]) if attr == 'uuid' else row[attr] for attr in attributes if attr in row } for row in resources @@ -81,36 +88,35 @@ def list_resource(resource_type, attributes): def download_file_s3(s3_path: str) -> bool: s3 = connect_to_s3_logic() s3.ensure_bucket() - target_path = str(output_dir + "/app/" + s3_path).replace(":", "_") + target_path = (Config.OUT_DIR / 'app' / s3_path).as_posix().replace(':', '_') target = pathlib.Path(target_path) downloaded_file = s3.download_file(s3_path, target) - if downloaded_file: - return downloaded_file - else: - print(f"File '{s3_path}' not found in bucket.") + if not downloaded_file: + print(f'File \'{s3_path}\' not found.') + return downloaded_file # Create a copy of tmp.js to output_dir def create_recipe_file(): - if not os.path.exists(output_dir): - os.makedirs(output_dir) - with open("recipe_tmp.json", 'r') as template_recipe: - data = template_recipe.read() - with open(os.path.join(output_dir, 'recipe.json'), 'w') as recipe: - recipe.write(data) + Config.ensure_out_dir() + + data = RECIPE_TEMPLATE.read_text(encoding=DEFAULT_ENCODING) + recipe_file = Config.OUT_DIR / 'recipe.json' + recipe_file.write_text(data, encoding=DEFAULT_ENCODING) # Add a seed file (its name) to the recipe (the structure) def add_seed_file_to_recipe(recipe_path, db_file_name): - with open(recipe_path, 'r') as recipe_file: + with open(recipe_path, 'r', encoding=DEFAULT_ENCODING) as recipe_file: recipe_data = json.load(recipe_file) - if not any(script.get("filename") == db_file_name for script in recipe_data["db"]["scripts"]): + if not any(script.get('filename') == db_file_name + for script in recipe_data['db']['scripts']): # If not, append it to the scripts list - recipe_data["db"]["scripts"].append({"filename": db_file_name}) + recipe_data['db']['scripts'].append({'filename': db_file_name}) - with open(recipe_path, 'w') as recipe_file: + with open(recipe_path, 'w', encoding=DEFAULT_ENCODING) as recipe_file: json.dump(recipe_data, recipe_file, ensure_ascii=False, indent=4) @@ -118,34 +124,37 @@ def create_seed_files_db(resource_type, output_dir): if not os.path.exists(output_dir): os.makedirs(output_dir) - file_path = os.path.join(output_dir, f"add_{resource_type}.sql") - file = open(file_path, 'w', encoding='utf-8') - return file + file_path = os.path.join(output_dir, f'add_{resource_type}.sql') + with open(file_path, 'w', encoding=DEFAULT_ENCODING) as _: + pass def process_input(data, output): - global output_dir - output_dir = output + Config.OUT_DIR = output create_recipe_file() for resource_type, items in data.items(): - create_seed_files_db(resource_type, output_dir) + create_seed_files_db(resource_type, Config.OUT_DIR) for item in items: handle_resource(resource_type, item[resource_identification[resource_type]]) -def write_seed_files_db(output_dir, resource_type, query): - with open(os.path.join(output_dir, f"add_{resource_type}.sql"), 'a', encoding='utf-8') as file: +def write_seed_files_db(output_dir: pathlib.Path, resource_type: str, query: str): + with open( + file=output_dir / f'add_{resource_type}.sql', + mode='a', + encoding=DEFAULT_ENCODING + ) as file: if file is None: - print("File not found") - file.write(query + "\n") + print('File not found') + file.write(query + '\n') -def has_placeholder_in_s3_objects(resource_s3_objects): - # Regular expression to match placeholders, e.g., "{some_placeholder}" - placeholder_pattern = re.compile(r"{placeholder}") +def has_placeholder_in_s3_objects(x_resource_s3_objects): + # Regular expression to match placeholders, e.g., '{some_placeholder}' + placeholder_pattern = re.compile(r'{placeholder}') # Check if the input is a single string - if placeholder_pattern.search(resource_s3_objects): + if placeholder_pattern.search(x_resource_s3_objects): return True return False @@ -154,19 +163,18 @@ def has_placeholder_in_s3_objects(resource_s3_objects): # TODO needs help a lot def format_for_sql(data_dict): sql_values = [] - for key, value in data_dict.items(): + for value in data_dict.values(): if isinstance(value, uuid.UUID): # UUID should be wrapped in single quotes in the SQL query - sql_values.append(f"'{str(value)}'") + sql_values.append(f'\'{str(value)}\'') - elif isinstance(value, datetime): + elif isinstance(value, datetime.datetime): # Format datetime as 'YYYY-MM-DD HH:MM:SS' (no timezone) - sql_values.append(f"'{value.strftime('%Y-%m-%d %H:%M:%S')}'") + sql_values.append(f'\'{value.strftime('%Y-%m-%d %H:%M:%S')}\'') elif isinstance(value, list): # Handle lists, convert to PostgreSQL-style array format (e.g., {'value1', 'value2'}) - formatted_list = '\'{' + ', '.join( - [f"{item}" if isinstance(item, str) else str(item) for item in value]) + '}\'' + formatted_list = '\'{' + ', '.join([str(item) for item in value]) + '}\'' sql_values.append(formatted_list) elif value is None: @@ -178,7 +186,7 @@ def format_for_sql(data_dict): else: # For any other data type (strings, numbers), ensure they are wrapped in single quotes - sql_values.append(f"'{str(value)}'") + sql_values.append(f'\'{str(value)}\'') return sql_values @@ -190,164 +198,188 @@ def return_fkey_dependency(resource_type, dependent_resource_type): def handle_resource(resource_type, resource_id): - if resource_id not in processed_resources: - processed_resources.add(resource_id) - query = generate_select_query(resource_type, resource_identification[resource_type], resource_id) - resources = db.execute_query(query) - - for resource in resources: - # Dependencies - for dependency in resource_dependencies.get(resource_type, []): - dep_type = dependency - dep_id_key = return_fkey_dependency(resource_type, dep_type) - if dep_id_key in resource: - dep_id = resource[dep_id_key] - handle_resource(dep_type, dep_id) - - # S3 objects - if resource_id != 'wizard:default:1.0.0' and resource_s3_objects[resource_type] != "": - s3_object = resource_s3_objects[resource_type] - # If the S3 object contains a placeholder, replace it with the dependent resource's value - if has_placeholder_in_s3_objects(resource_s3_objects[resource_type]): - dependent_key = return_fkey_dependency(resource_type, resource_dependencies[resource_type][0]) - dependent_value = resource.get(dependent_key) - s3_object = s3_object.format(placeholder=dependent_value) - download_file_s3(s3_object + str(resource_id)) - - else: - download_file_s3(s3_object + str(resource_id)) - - add_seed_file_to_recipe(output_dir + "/recipe.json", "add_" + resource_type + ".sql") - insert_query = generate_insert_query(resource, resource_tables[resource_type]) - write_seed_files_db(output_dir, resource_type, insert_query) - - # Dependent resources of this one, that users can't see (document_template_asset, document_template_file) - for dependent_resource_type in resources_part_of.get(resource_type, []): - dependent_resource_id_key = return_fkey_dependency(dependent_resource_type, resource_type) - query = generate_select_query(dependent_resource_type, dependent_resource_id_key, - resource[resource_identification[resource_type]]) - dependent_resources = db.execute_query(query) - for dependent_resource in dependent_resources: - handle_resource(dependent_resource_type, dependent_resource[resource_identification[dependent_resource_type]]) - return - else: + if resource_id in processed_resources: return + processed_resources.add(resource_id) + query = generate_select_query( + resource_type, + resource_identification[resource_type], + resource_id, + ) + resources = db.execute_query(query) + + for resource in resources: + process_resource(resource, resource_id, resource_type) + + +def process_resource(resource, resource_id, resource_type): + # Dependencies + for dependency in resource_dependencies.get(resource_type, []): + dep_type = dependency + dep_id_key = return_fkey_dependency( + resource_type=resource_type, + dependent_resource_type=dep_type, + ) + if dep_id_key in resource: + dep_id = resource[dep_id_key] + handle_resource(dep_type, dep_id) + + process_resource_s3(resource, resource_id, resource_type) + + add_seed_file_to_recipe(Config.OUT_DIR / 'recipe.json', f'add_{resource_type}.sql') + insert_query = generate_insert_query(resource, resource_tables[resource_type]) + write_seed_files_db(Config.OUT_DIR, resource_type, insert_query) + + # Dependent resources of this one, that users can't + # see (document_template_asset, document_template_file) + for dependent_resource_type in resources_part_of.get(resource_type, []): + dependent_resource_id_key = return_fkey_dependency( + resource_type=dependent_resource_type, + dependent_resource_type=resource_type, + ) + query = generate_select_query(dependent_resource_type, dependent_resource_id_key, + resource[resource_identification[resource_type]]) + dependent_resources = db.execute_query(query) + for dependent_resource in dependent_resources: + handle_resource( + dependent_resource_type, + dependent_resource[resource_identification[dependent_resource_type]], + ) + + +def process_resource_s3(resource, resource_id, resource_type): + if resource_id != 'wizard:default:1.0.0' and resource_s3_objects[resource_type] != '': + s3_object = resource_s3_objects[resource_type] + # If the S3 object contains a placeholder, replace + # it with the dependent resource's value + if has_placeholder_in_s3_objects(resource_s3_objects[resource_type]): + dependent_key = return_fkey_dependency( + resource_type=resource_type, + dependent_resource_type=resource_dependencies[resource_type][0], + ) + dependent_value = resource.get(dependent_key) + s3_object = s3_object.format(placeholder=dependent_value) + download_file_s3(s3_object + str(resource_id)) + + else: + download_file_s3(s3_object + str(resource_id)) + # Map resources to their dependencies -resources_part_of = { - "users": [], - "projects": [], - "documents": [], - "project_importers": [], - "knowledge_models": [], - "locales": [], - "document_templates": ["document_template_asset", "document_template_file"], - "document_template_asset": [], - "document_template_file": [] +resources_part_of: dict[str, list] = { + 'users': [], + 'projects': [], + 'documents': [], + 'project_importers': [], + 'knowledge_models': [], + 'locales': [], + 'document_templates': ['document_template_asset', 'document_template_file'], + 'document_template_asset': [], + 'document_template_file': [] } # Map resources to their dependencies -resource_dependencies = { - "users": [], - "projects": ["knowledge_models", "document_templates"], - "documents": ["document_templates", "projects"], - "project_importers": [], - "knowledge_models": ["knowledge_models"], - "locales": [], - "document_templates": [], - "document_template_asset": ["document_templates"], - "document_template_file": ["document_templates"] +resource_dependencies: dict[str, list] = { + 'users': [], + 'projects': ['knowledge_models', 'document_templates'], + 'documents': ['document_templates', 'projects'], + 'project_importers': [], + 'knowledge_models': ['knowledge_models'], + 'locales': [], + 'document_templates': [], + 'document_template_asset': ['document_templates'], + 'document_template_file': ['document_templates'] } # Map resources to their dependencies -resource_dependencies_keys = { - "users": [], - "projects": [ - {"knowledge_models": "package_id"}, - {"document_templates": "document_template_id"} +resource_dependencies_keys: dict[str, list] = { + 'users': [], + 'projects': [ + {'knowledge_models': 'package_id'}, + {'document_templates': 'document_template_id'} ], - "documents": [ - {"document_templates": "document_template_id"}, - {"projects": "questionnaire_uuid"} + 'documents': [ + {'document_templates': 'document_template_id'}, + {'projects': 'questionnaire_uuid'} ], - "project_importers" : [], - "knowledge_models" : [ - {"knowledge_models": "previous_package_id"} + 'project_importers': [], + 'knowledge_models': [ + {'knowledge_models': 'previous_package_id'} ], - "locales": [], - "document_templates": [], - "document_template_asset" : [ - {"document_templates": "document_template_id"} + 'locales': [], + 'document_templates': [], + 'document_template_asset': [ + {'document_templates': 'document_template_id'} ], - "document_template_file" : [ - {"document_templates": "document_template_id"} + 'document_template_file': [ + {'document_templates': 'document_template_id'} ] } # Map resources to their s3 objects resource_s3_objects = { - "users": "", - "projects": "", - "documents": "documents/", - "project_importers": "", - "knowledge_models": "", - "locales": "locales/", - "document_templates": "", - "document_template_asset": "templates/{placeholder}/", - "document_template_file": "" + 'users': '', + 'projects': '', + 'documents': 'documents/', + 'project_importers': '', + 'knowledge_models': '', + 'locales': 'locales/', + 'document_templates': '', + 'document_template_asset': 'templates/{placeholder}/', + 'document_template_file': '' } # Map resources to their s3 objects' file names resource_s3_objects_fileNames = { - "locales": "name", - "document_templates": [], - "document_template_asset": ["templates/"], - "document_template_file": ["templates/"] + 'locales': 'name', + 'document_templates': [], + 'document_template_asset': ['templates/'], + 'document_template_file': ['templates/'] } # Map resources to their identification attribute resource_identification = { - "users": "uuid", - "projects": "uuid", - "documents": "uuid", - "project_importers": "id", - "knowledge_models": "id", - "locales": "id", - "document_templates": "id", - "document_template_asset": "uuid", - "document_template_file": "uuid" + 'users': 'uuid', + 'projects': 'uuid', + 'documents': 'uuid', + 'project_importers': 'id', + 'knowledge_models': 'id', + 'locales': 'id', + 'document_templates': 'id', + 'document_template_asset': 'uuid', + 'document_template_file': 'uuid' } # Map resources to their table names resource_tables = { - "users": "user_entity", - "projects": "questionnaire", - "documents": "document", - "project_importers": "questionnaire_importer", - "knowledge_models": "package", - "locales": "locale", - "document_templates": "document_template", - "document_template_asset": "document_template_asset", - "document_template_file": "document_template_file" + 'users': 'user_entity', + 'projects': 'questionnaire', + 'documents': 'document', + 'project_importers': 'questionnaire_importer', + 'knowledge_models': 'package', + 'locales': 'locale', + 'document_templates': 'document_template', + 'document_template_asset': 'document_template_asset', + 'document_template_file': 'document_template_file' } # Map resources to attributes visible to users resource_attributes = { - "users": ['uuid', 'first_name', 'last_name', 'role', 'email'], - "projects": ['uuid', 'name'], - "documents": ['uuid', 'name'], - "project_importers": ['id', 'name', 'description'], - "knowledge_models": ['id', 'name', 'km_id', 'description'], - "locales": ['id', 'name', 'code', 'description'], - "document_templates": ['id', 'name', 'template_id'], - "document_template_asset": ['uuid', 'document_template_id'], - "document_template_file": ['uuid', 'document_template_id'] + 'users': ['uuid', 'first_name', 'last_name', 'role', 'email'], + 'projects': ['uuid', 'name'], + 'documents': ['uuid', 'name'], + 'project_importers': ['id', 'name', 'description'], + 'knowledge_models': ['id', 'name', 'km_id', 'description'], + 'locales': ['id', 'name', 'code', 'description'], + 'document_templates': ['id', 'name', 'template_id'], + 'document_template_asset': ['uuid', 'document_template_id'], + 'document_template_file': ['uuid', 'document_template_id'] } diff --git a/src/dsw_seed_maker/recipe_tmp.json b/src/dsw_seed_maker/recipe_template.json similarity index 100% rename from src/dsw_seed_maker/recipe_tmp.json rename to src/dsw_seed_maker/recipe_template.json diff --git a/src/dsw_seed_maker/tmp.json b/src/dsw_seed_maker/tmp.json deleted file mode 100644 index 5efd9fa..0000000 --- a/src/dsw_seed_maker/tmp.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "users": [ - { - "uuid": "e1c58e52-0824-4526-8ebe-ec38eec67030", - "first_name": "Isaac", - "last_name": "Newton", - "role": "researcher" - }, - { - "uuid": "30d48cf4-8c8a-496f-bafe-585bd238f798", - "first_name": "Nikola", - "last_name": "Tesla", - "role": "dataSteward" - }, - { - "uuid": "00000000-0000-0000-0000-000000000000", - "first_name": "System", - "last_name": "User", - "role": "admin" - }, - { - "uuid": "ec6f8e90-2a91-49ec-aa3f-9eab2267fc66", - "first_name": "Albert", - "last_name": "Einstein", - "role": "admin" - } - ], - "project_importers": [ - { - "id": "dsw:replies-importer:0.1.0", - "name": "DSW Replies (JSON)", - "description": "Import from replies in JSON exported from DSW" - } - ], - "knowledge_models": [ - { - "id": "myorg:km-for-seeding:0.0.1", - "name": "KM for seeding", - "km_id": "km-for-seeding", - "description": "" - } - ], - "locales": [ - { - "id": "wizard:default:1.0.0", - "name": "English", - "code": "en", - "description": "Default English locale for Wizard UI" - } - ], - "document_templates": [ - { - "id": "myorg:dsw-seeding:0.0.1", - "name": "DSW seeding", - "template_id": "dsw-seeding" - }, - { - "id": "myorg:dsw-seeding2:0.0.1", - "name": "DSW seeding 2", - "template_id": "dsw-seeding2" - }, - { - "id": "myorg:dsw-seeding2:0.1.0", - "name": "DSW seeding 2", - "template_id": "dsw-seeding2" - }, - { - "id": "dsw:questionnaire-report:2.12.0", - "name": "Questionnaire Report", - "template_id": "questionnaire-report" - } - ], - "projects": [ - { - "uuid": "7ec5c86a-946a-4386-ba3c-b27481288a62", - "name": "Jana Mart\u00ednkov\u00e1" - } - ], - "documents": [ - { - "uuid": "1034a4b0-d867-4b4b-b2a0-a3956b43cf95", - "name": "Jana Mart\u00ednkov\u00e1" - } - ] -} \ No newline at end of file