From 0e4cb1dd11b7867876287b91c7e66b06aad22235 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Tue, 24 Nov 2020 16:29:02 +0200 Subject: [PATCH 1/3] Update setup.py for tuf-client Modify setup.py with tuf-client information Signed-off-by: Teodora Sechkova --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index c87e2292b4..3881fbcbd1 100755 --- a/setup.py +++ b/setup.py @@ -77,7 +77,7 @@ setup( - name = 'tuf', + name = 'tuf-client', version = '0.15.0', # If updating version, also update it in tuf/__init__.py description = 'A secure updater framework for Python', long_description = long_description, @@ -124,7 +124,6 @@ ], packages = find_packages(exclude=['tests']), scripts = [ - 'tuf/scripts/repo.py', 'tuf/scripts/client.py' ] ) From b61982ca77985848a5be7f6b775c1cc4ea257670 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Tue, 24 Nov 2020 16:33:38 +0200 Subject: [PATCH 2/3] Remove repository source code Split TUF in client and repository side: remove repository-only source code. Signed-off-by: Teodora Sechkova --- tuf/developer_tool.py | 1029 ------------- tuf/repository_lib.py | 2314 ---------------------------- tuf/repository_tool.py | 3302 ---------------------------------------- tuf/scripts/repo.py | 1154 -------------- 4 files changed, 7799 deletions(-) delete mode 100755 tuf/developer_tool.py delete mode 100644 tuf/repository_lib.py delete mode 100755 tuf/repository_tool.py delete mode 100755 tuf/scripts/repo.py diff --git a/tuf/developer_tool.py b/tuf/developer_tool.py deleted file mode 100755 index e3269b088b..0000000000 --- a/tuf/developer_tool.py +++ /dev/null @@ -1,1029 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - developer_tool.py - - - Santiago Torres - Zane Fisher - - Based on the work done for 'repository_tool.py' by Vladimir Diaz. - - - January 22, 2014. - - - See LICENCE-MIT OR LICENCE for licensing information. - - - See 'tuf/README-developer-tools.md' for a complete guide on using - 'developer_tool.py'. -""" - -# Help with Python 3 compatibility, where the print statement is a function, an -# implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division - -import os -import errno -import logging -import shutil -import tempfile -import json - -import tuf -import tuf.formats -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.repository_lib as repo_lib -import tuf.repository_tool - -import securesystemslib -import securesystemslib.util -import securesystemslib.keys - -import six - -from tuf.repository_tool import Targets -from tuf.repository_lib import _check_role_keys -from tuf.repository_lib import _metadata_is_partially_loaded - - -# Copy API -# pylint: disable=unused-import - -# Copy generic repository API functions to be used via `developer_tool` -from tuf.repository_lib import ( - generate_targets_metadata, - create_tuf_client_directory, - disable_console_log_messages) - -# Copy key-related API functions to be used via `developer_tool` -from tuf.repository_lib import ( - import_rsa_privatekey_from_file) - -from securesystemslib.keys import ( - format_keyval_to_metadata) - -from securesystemslib.interface import ( - generate_and_write_rsa_keypair, - generate_and_write_rsa_keypair_with_prompt, - generate_and_write_unencrypted_rsa_keypair, - generate_and_write_ecdsa_keypair, - generate_and_write_ecdsa_keypair_with_prompt, - generate_and_write_unencrypted_ecdsa_keypair, - generate_and_write_ed25519_keypair, - generate_and_write_ed25519_keypair_with_prompt, - generate_and_write_unencrypted_ed25519_keypair, - import_rsa_publickey_from_file, - import_ed25519_publickey_from_file, - import_ed25519_privatekey_from_file) - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The extension of TUF metadata. -from tuf.repository_lib import METADATA_EXTENSION as METADATA_EXTENSION - -# Project configuration filename. This file is intended to hold all of the -# supporting information about the project that's not contained in a usual -# TUF metadata file. 'project.cfg' consists of the following fields: -# -# targets_location: the location of the targets folder. -# -# prefix: the directory location to prepend to the metadata so it -# matches the metadata signed in the repository. -# -# metadata_location: the location of the metadata files. -# -# threshold: the threshold for this project object, it is fixed to -# one in the current version. -# -# public_keys: a list of the public keys used to verify the metadata -# in this project. -# -# layout_type: a field describing the directory layout: -# -# repo-like: matches the layout of the repository tool. -# the targets and metadata folders are -# located under a common directory for the -# project. -# -# flat: the targets directory and the -# metadata directory are located in different -# paths. -# -# project_name: The name of the current project, this value is used to -# match the resulting filename with the one in upstream. -PROJECT_FILENAME = 'project.cfg' - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -from tuf.repository_tool import METADATA_DIRECTORY_NAME -from tuf.repository_tool import TARGETS_DIRECTORY_NAME - - -class Project(Targets): - """ - - Simplify the publishing process of third-party projects by handling all of - the bookkeeping, signature handling, and integrity checks of delegated TUF - metadata. 'repository_tool.py' is responsible for publishing and - maintaining metadata of the top-level roles, and 'developer_tool.py' is - used by projects that have been delegated responsibility for a delegated - projects role. Metadata created by this module may then be added to other - metadata available in a TUF repository. - - Project() is the representation of a project's metadata file(s), with the - ability to modify this data in an OOP manner. Project owners do not have to - manually verify that metadata files are properly formatted or that they - contain valid data. - - - project_name: - The name of the metadata file as it should be named in the upstream - repository. - - metadata_directory: - The metadata sub-directory contains the metadata file(s) of this project, - including any of its delegated roles. - - targets_directory: - The targets sub-directory contains the project's target files that are - downloaded by clients and are referenced in its metadata. The hashes and - file lengths are listed in Metadata files so that they are securely - downloaded. Metadata files are similarly referenced in the top-level - metadata. - - file_prefix: - The path string that will be prepended to the generated metadata - (e.g., targets/foo -> targets/prefix/foo) so that it matches the actual - targets location in the upstream repository. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Creates a project Targets role object, with the same object attributes of - the top-level targets role. - - - None. - """ - - def __init__(self, project_name, metadata_directory, targets_directory, - file_prefix, repository_name='default'): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly - # formatted. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(file_prefix) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - self.metadata_directory = metadata_directory - self.targets_directory = targets_directory - self.project_name = project_name - self.prefix = file_prefix - self.repository_name = repository_name - - # Layout type defaults to "flat" unless explicitly specified in - # create_new_project(). - self.layout_type = 'flat' - - # Set the top-level Targets object. Set the rolename to be the project's - # name. - super(Project, self).__init__(self.targets_directory, project_name) - - - - - - def write(self, write_partial=False): - """ - - Write all the JSON Metadata objects to their corresponding files. - write() raises an exception if any of the role metadata to be written to - disk is invalid, such as an insufficient threshold of signatures, missing - private keys, etc. - - - write_partial: - A boolean indicating whether partial metadata should be written to - disk. Partial metadata may be written to allow multiple maintainters - to independently sign and update role metadata. write() raises an - exception if a metadata role cannot be written due to not having enough - signatures. - - - securesystemslib.exceptions.Error, if any of the project roles do not - have a minimum threshold of signatures. - - - Creates metadata files in the project's metadata directory. - - - None. - """ - - # Does 'write_partial' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_partial) - - # At this point the tuf.keydb and tuf.roledb stores must be fully - # populated, otherwise write() throwns a 'tuf.Repository' exception if - # any of the project roles are missing signatures, keys, etc. - - # Write the metadata files of all the delegated roles of the project. - delegated_rolenames = tuf.roledb.get_delegated_rolenames(self.project_name, - self.repository_name) - - for delegated_rolename in delegated_rolenames: - delegated_filename = os.path.join(self.metadata_directory, - delegated_rolename + METADATA_EXTENSION) - - # Ensure the parent directories of 'metadata_filepath' exist, otherwise an - # IO exception is raised if 'metadata_filepath' is written to a - # sub-directory. - securesystemslib.util.ensure_parent_dir(delegated_filename) - - _generate_and_write_metadata(delegated_rolename, delegated_filename, - write_partial, self.targets_directory, prefix=self.prefix, - repository_name=self.repository_name) - - - # Generate the 'project_name' metadata file. - targets_filename = self.project_name + METADATA_EXTENSION - targets_filename = os.path.join(self.metadata_directory, targets_filename) - junk, targets_filename = _generate_and_write_metadata(self.project_name, - targets_filename, write_partial, self.targets_directory, - prefix=self.prefix, repository_name=self.repository_name) - - # Save configuration information that is not stored in the project's - # metadata - _save_project_configuration(self.metadata_directory, - self.targets_directory, self.keys, self.prefix, self.threshold, - self.layout_type, self.project_name) - - - - - - def add_verification_key(self, key, expires=None): - """ - - Function as a thin wrapper call for the project._targets call - with the same name. This wrapper is only for usability purposes. - - - key: - The role key to be added, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. Adding a public key to a - role means that its corresponding private key must generate and add - its signture to the role. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the project already contains a key. - - - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are updated. - - - None - """ - - # Verify that this role does not already contain a key. The parent project - # role is restricted to one key. Any of its delegated roles may have - # more than one key. - # TODO: Add condition check for the requirement stated above. - if len(self.keys) > 0: - raise securesystemslib.exceptions.Error("This project already contains a key.") - - super(Project, self).add_verification_key(key, expires) - - - - - - def status(self): - """ - - Determine the status of the project, including its delegated roles. - status() checks if each role provides sufficient public keys, signatures, - and that a valid metadata file is generated if write() were to be called. - Metadata files are temporarily written to check that proper metadata files - is written, where file hashes and lengths are calculated and referenced - by the project. status() does not do a simple check for number of - threshold keys and signatures. - - - None. - - - securesystemslib.exceptions.Error, if the project, or any of its - delegated roles, do not have a minimum threshold of signatures. - - - Generates and writes temporary metadata files. - - - None. - """ - - temp_project_directory = None - - try: - temp_project_directory = tempfile.mkdtemp() - - metadata_directory = os.path.join(temp_project_directory, 'metadata') - targets_directory = self.targets_directory - - os.makedirs(metadata_directory) - - # TODO: We should do the schema check. - filenames = {} - filenames['targets'] = os.path.join(metadata_directory, self.project_name) - - # Delegated roles. - delegated_roles = tuf.roledb.get_delegated_rolenames(self.project_name, - self.repository_name) - insufficient_keys = [] - insufficient_signatures = [] - - for delegated_role in delegated_roles: - try: - _check_role_keys(delegated_role, self.repository_name) - - except tuf.exceptions.InsufficientKeysError: - insufficient_keys.append(delegated_role) - continue - - try: - signable = _generate_and_write_metadata(delegated_role, - filenames['targets'], False, targets_directory, False, - repository_name=self.repository_name) - self._log_status(delegated_role, signable[0], self.repository_name) - - except securesystemslib.exceptions.Error: - insufficient_signatures.append(delegated_role) - - if len(insufficient_keys): - message = 'Delegated roles with insufficient keys: ' +\ - repr(insufficient_keys) - logger.info(message) - return - - if len(insufficient_signatures): - message = 'Delegated roles with insufficient signatures: ' +\ - repr(insufficient_signatures) - logger.info(message) - return - - # Targets role. - try: - _check_role_keys(self.rolename, self.repository_name) - - except tuf.exceptions.InsufficientKeysError as e: - logger.info(str(e)) - return - - try: - signable, junk = _generate_and_write_metadata(self.project_name, - filenames['targets'], False, targets_directory, metadata_directory, - self.repository_name) - self._log_status(self.project_name, signable, self.repository_name) - - except tuf.exceptions.UnsignedMetadataError as e: - # This error is raised if the metadata has insufficient signatures to - # meet the threshold. - self._log_status(self.project_name, e.signable, self.repository_name) - return - - finally: - shutil.rmtree(temp_project_directory, ignore_errors=True) - - - - - - def _log_status(self, rolename, signable, repository_name): - """ - Non-public function prints the number of (good/threshold) signatures of - 'rolename'. - """ - - status = tuf.sig.get_signature_status(signable, rolename, repository_name) - - message = repr(rolename) + ' role contains ' +\ - repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) +\ - ' signatures.' - logger.info(message) - - - - - -def _generate_and_write_metadata(rolename, metadata_filename, write_partial, - targets_directory, prefix='', repository_name='default'): - """ - Non-public function that can generate and write the metadata of the - specified 'rolename'. It also increments version numbers if: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. - """ - - metadata = None - - # Retrieve the roleinfo of 'rolename' to extract the needed metadata - # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - - metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], - roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], - False) - - # Prepend the prefix to the project's filepath to avoid signature errors in - # upstream. - for element in list(metadata['targets']): - junk, relative_target = os.path.split(element) - prefixed_path = os.path.join(prefix, relative_target) - metadata['targets'][prefixed_path] = metadata['targets'][element] - if prefix != '': - del(metadata['targets'][element]) - - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # Check if the version number of 'rolename' may be automatically incremented, - # depending on whether if partial metadata is loaded or if the metadata is - # written with write() / write_partial(). - # Increment the version number if this is the first partial write. - if write_partial: - temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, - repository_name) - temp_signable['signatures'].extend(roleinfo['signatures']) - status = tuf.sig.get_signature_status(temp_signable, rolename, - repository_name) - if len(status['good_sigs']) == 0: - metadata['version'] = metadata['version'] + 1 - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # non-partial write() - else: - if tuf.sig.verify(signable, rolename, repository_name): - metadata['version'] = metadata['version'] + 1 - signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], - metadata_filename, repository_name) - - # Write the metadata to file if contains a threshold of signatures. - signable['signatures'].extend(roleinfo['signatures']) - - if tuf.sig.verify(signable, rolename, repository_name) or write_partial: - repo_lib._remove_invalid_and_duplicate_signatures(signable, repository_name) - storage_backend = securesystemslib.storage.FilesystemBackend() - filename = repo_lib.write_metadata_file(signable, metadata_filename, - metadata['version'], False, storage_backend) - - # 'signable' contains an invalid threshold of signatures. - else: - message = 'Not enough signatures for ' + repr(metadata_filename) - raise securesystemslib.exceptions.Error(message, signable) - - return signable, filename - - - - -def create_new_project(project_name, metadata_directory, - location_in_repository = '', targets_directory=None, key=None, - repository_name='default'): - """ - - Create a new project object, instantiate barebones metadata for the - targets, and return a blank project object. On disk, create_new_project() - only creates the directories needed to hold the metadata and targets files. - The project object returned can be directly modified to meet the designer's - criteria and then written using the method project.write(). - - The project name provided is the one that will be added to the resulting - metadata file as it should be named in upstream. - - - project_name: - The name of the project as it should be called in upstream. For example, - targets/unclaimed/django should have its project_name set to "django" - - metadata_directory: - The directory that will eventually hold the metadata and target files of - the project. - - location_in_repository: - An optional argument to hold the "prefix" or the expected location for - the project files in the "upstream" repository. This value is only - used to sign metadata in a way that it matches the future location - of the files. - - For example, targets/unclaimed/django should have its project name set to - "targets/unclaimed" - - targets_directory: - An optional argument to point the targets directory somewhere else than - the metadata directory if, for example, a project structure already - exists and the user does not want to move it. - - key: - The public key to verify the project's metadata. Projects can only - handle one key with a threshold of one. If a project were to modify it's - key it should be removed and updated. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted or if the public key is not a valid one (if it's not none.) - - OSError, if the filepaths provided do not have write permissions. - - - The 'metadata_directory' and 'targets_directory' directories are created - if they do not exist. - - - A 'tuf.developer_tool.Project' object. - """ - - # Does 'metadata_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - - # Do the same for the location in the repo and the project name, we must - # ensure they are valid pathnames. - securesystemslib.formats.NAME_SCHEMA.check_match(project_name) - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(location_in_repository) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # for the targets directory we do the same, but first, let's find out what - # layout the user needs, layout_type is a variable that is usually set to - # 1, which means "flat" (i.e. the cfg file is where the metadata folder is - # located), with a two, the cfg file goes to the "metadata" folder, and a - # new metadata folder is created inside the tree, to separate targets and - # metadata. - layout_type = 'flat' - if targets_directory is None: - targets_directory = os.path.join(metadata_directory, TARGETS_DIRECTORY_NAME) - metadata_directory = \ - os.path.join(metadata_directory, METADATA_DIRECTORY_NAME) - layout_type = 'repo-like' - - if targets_directory is not None: - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - - if key is not None: - securesystemslib.formats.KEY_SCHEMA.check_match(key) - - # Set the metadata and targets directories. These directories - # are created if they do not exist. - metadata_directory = os.path.abspath(metadata_directory) - targets_directory = os.path.abspath(targets_directory) - - # Try to create the metadata directory that will hold all of the metadata - # files, such as 'root.txt' and 'release.txt'. - try: - message = 'Creating ' + repr(metadata_directory) - logger.info(message) - os.makedirs(metadata_directory) - - # 'OSError' raised if the leaf directory already exists or cannot be created. - # Check for case where 'repository_directory' has already been created. - except OSError as e: - if e.errno == errno.EEXIST: - # Should check if we have write permissions here. - pass - - # Testing of non-errno.EEXIST exceptions have been verified on all - # supported # OSs. An unexpected exception (the '/' directory exists, - # rather than disallowed path) is possible on Travis, so the '#pragma: no - # branch' below is included to prevent coverage failure. - else: #pragma: no branch - raise - - # Try to create the targets directory that will hold all of the target files. - try: - message = 'Creating ' + repr(targets_directory) - logger.info(message) - os.mkdir(targets_directory) - - except OSError as e: - if e.errno == errno.EEXIST: - pass - - else: - raise - - # Create the bare bones project object, where project role contains default - # values (e.g., threshold of 1, expires 1 year into the future, etc.) - project = Project(project_name, metadata_directory, targets_directory, - location_in_repository, repository_name) - - # Add 'key' to the project. - # TODO: Add check for expected number of keys for the project (must be 1) and - # its delegated roles (may be greater than one.) - if key is not None: - project.add_verification_key(key) - - # Save the layout information. - project.layout_type = layout_type - - return project - - - - - - -def _save_project_configuration(metadata_directory, targets_directory, - public_keys, prefix, threshold, layout_type, project_name): - """ - - Persist the project's information to a file. The saved project information - can later be loaded with Project.load_project(). - - - metadata_directory: - Where the project's metadata is located. - - targets_directory: - The location of the target files for this project. - - public_keys: - A list containing the public keys for the project role. - - prefix: - The project's prefix (if any.) - - threshold: - The threshold value for the project role. - - layout_type: - The layout type being used by the project, "flat" stands for separated - targets and metadata directories, "repo-like" emulates the layout used - by the repository tools - - project_name: - The name given to the project, this sets the metadata filename so it - matches the one stored in upstream. - - - securesystemslib.exceptions.FormatError are also expected if any of the arguments are malformed. - - OSError may rise if the metadata_directory/project.cfg file exists and - is non-writeable - - - A 'project.cfg' configuration file is created or overwritten. - - - None. - """ - - # Schema check for the arguments. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(prefix) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.RELPATH_SCHEMA.check_match(project_name) - - cfg_file_directory = metadata_directory - - # Check whether the layout type is 'flat' or 'repo-like'. - # If it is, the .cfg file should be saved in the previous directory. - if layout_type == 'repo-like': - cfg_file_directory = os.path.dirname(metadata_directory) - junk, targets_directory = os.path.split(targets_directory) - - junk, metadata_directory = os.path.split(metadata_directory) - - # Can the file be opened? - project_filename = os.path.join(cfg_file_directory, PROJECT_FILENAME) - - # Build the fields of the configuration file. - project_config = {} - project_config['prefix'] = prefix - project_config['public_keys'] = {} - project_config['metadata_location'] = metadata_directory - project_config['targets_location'] = targets_directory - project_config['threshold'] = threshold - project_config['layout_type'] = layout_type - project_config['project_name'] = project_name - - # Build a dictionary containing the actual keys. - for key in public_keys: - key_info = tuf.keydb.get_key(key) - key_metadata = format_keyval_to_metadata(key_info['keytype'], - key_info['scheme'], key_info['keyval']) - project_config['public_keys'][key] = key_metadata - - # Save the actual file. - with open(project_filename, 'wt') as fp: - json.dump(project_config, fp) - - - - - -def load_project(project_directory, prefix='', new_targets_location=None, - repository_name='default'): - """ - - Return a Project object initialized with the contents of the metadata - files loaded from 'project_directory'. - - - project_directory: - The path to the project's metadata and configuration file. - - prefix: - The prefix for the metadata, if defined. It will replace the current - prefix, by first removing the existing one (saved). - - new_targets_location: - For flat project configurations, project owner might want to reload the - project with a new location for the target files. This overwrites the - previous path to search for the target files. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'project_directory' or any of - the metadata files are improperly formatted. - - - All the metadata files found in the project are loaded and their contents - stored in a libtuf.Repository object. - - - A tuf.developer_tool.Project object. - """ - - # Does 'repository_directory' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(project_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # Do the same for the prefix - securesystemslib.formats.ANY_STRING_SCHEMA.check_match(prefix) - - # Clear the role and key databases since we are loading in a new project. - tuf.roledb.clear_roledb(clear_all=True) - tuf.keydb.clear_keydb(clear_all=True) - - # Locate metadata filepaths and targets filepath. - project_directory = os.path.abspath(project_directory) - - # Load the cfg file and the project. - config_filename = os.path.join(project_directory, PROJECT_FILENAME) - - project_configuration = securesystemslib.util.load_json_file(config_filename) - tuf.formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) - - targets_directory = os.path.join(project_directory, - project_configuration['targets_location']) - - if project_configuration['layout_type'] == 'flat': - project_directory, junk = os.path.split(project_directory) - targets_directory = project_configuration['targets_location'] - - if new_targets_location is not None: - targets_directory = new_targets_location - - metadata_directory = os.path.join(project_directory, - project_configuration['metadata_location']) - - new_prefix = None - - if prefix != '': - new_prefix = prefix - - prefix = project_configuration['prefix'] - - # Load the project's filename. - project_name = project_configuration['project_name'] - project_filename = project_name + METADATA_EXTENSION - - # Create a blank project on the target directory. - project = Project(project_name, metadata_directory, targets_directory, prefix, - repository_name) - - project.threshold = project_configuration['threshold'] - project.prefix = project_configuration['prefix'] - project.layout_type = project_configuration['layout_type'] - - # Traverse the public keys and add them to the project. - keydict = project_configuration['public_keys'] - - for keyid in keydict: - key, junk = securesystemslib.keys.format_metadata_to_key(keydict[keyid]) - project.add_verification_key(key) - - # Load the project's metadata. - targets_metadata_path = os.path.join(project_directory, metadata_directory, - project_filename) - signable = securesystemslib.util.load_json_file(targets_metadata_path) - try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - targets_metadata = signable['signed'] - - # Remove the prefix from the metadata. - targets_metadata = _strip_prefix_from_targets_metadata(targets_metadata, - prefix) - for signature in signable['signatures']: - project.add_signature(signature) - - # Update roledb.py containing the loaded project attributes. - roleinfo = tuf.roledb.get_roleinfo(project_name, repository_name) - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = targets_metadata['version'] - roleinfo['paths'] = targets_metadata['targets'] - roleinfo['delegations'] = targets_metadata['delegations'] - roleinfo['partial_loaded'] = False - - # Check if the loaded metadata was partially written and update the - # flag in 'roledb.py'. - if _metadata_is_partially_loaded(project_name, signable, - repository_name=repository_name): - roleinfo['partial_loaded'] = True - - tuf.roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - for key_metadata in targets_metadata['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) - tuf.keydb.add_key(key_object, repository_name=repository_name) - - for role in targets_metadata['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'signing_keyids': [], 'signatures': [], 'partial_loaded':False, - 'delegations': {'keys':{}, 'roles':[]} - } - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) - - # Load the delegated metadata and generate their fileinfo. - targets_objects = {} - loaded_metadata = [project_name] - targets_objects[project_name] = project - metadata_directory = os.path.join(project_directory, metadata_directory) - - if os.path.exists(metadata_directory) and \ - os.path.isdir(metadata_directory): - for metadata_role in os.listdir(metadata_directory): - metadata_path = os.path.join(metadata_directory, metadata_role) - metadata_name = \ - metadata_path[len(metadata_directory):].lstrip(os.path.sep) - - # Strip the extension. The roledb does not include an appended '.json' - # extension for each role. - if metadata_name.endswith(METADATA_EXTENSION): - extension_length = len(METADATA_EXTENSION) - metadata_name = metadata_name[:-extension_length] - - else: - continue - - if metadata_name in loaded_metadata: - continue - - signable = None - signable = securesystemslib.util.load_json_file(metadata_path) - - # Strip the prefix from the local working copy, it will be added again - # when the targets metadata is written to disk. - metadata_object = signable['signed'] - metadata_object = _strip_prefix_from_targets_metadata(metadata_object, - prefix) - - roleinfo = tuf.roledb.get_roleinfo(metadata_name, repository_name) - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = metadata_object['version'] - roleinfo['expires'] = metadata_object['expires'] - roleinfo['paths'] = {} - - for filepath, fileinfo in six.iteritems(metadata_object['targets']): - roleinfo['paths'].update({filepath: fileinfo.get('custom', {})}) - roleinfo['delegations'] = metadata_object['delegations'] - roleinfo['partial_loaded'] = False - - # If the metadata was partially loaded, update the roleinfo flag. - if _metadata_is_partially_loaded(metadata_name, signable, - repository_name=repository_name): - roleinfo['partial_loaded'] = True - - - tuf.roledb.update_roleinfo(metadata_name, roleinfo, - mark_role_as_dirty=False, repository_name=repository_name) - - # Append to list of elements to avoid reloading repeated metadata. - loaded_metadata.append(metadata_name) - - # Generate the Targets objects of the delegated roles. - new_targets_object = Targets(targets_directory, metadata_name, roleinfo, - repository_name=repository_name) - targets_object = targets_objects[project_name] - - targets_object._delegated_roles[metadata_name] = new_targets_object - - # Add the keys specified in the delegations field of the Targets role. - for key_metadata in metadata_object['delegations']['keys'].values(): - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) - - try: - tuf.keydb.add_key(key_object, repository_name=repository_name) - - except tuf.exceptions.KeyAlreadyExistsError: - pass - - for role in metadata_object['delegations']['roles']: - rolename = role['name'] - roleinfo = {'name': role['name'], 'keyids': role['keyids'], - 'threshold': role['threshold'], - 'signing_keyids': [], 'signatures': [], - 'partial_loaded': False, - 'delegations': {'keys': {}, - 'roles': []}} - tuf.roledb.add_role(rolename, roleinfo, repository_name=repository_name) - - if new_prefix: - project.prefix = new_prefix - - return project - - - - - -def _strip_prefix_from_targets_metadata(targets_metadata, prefix): - """ - Non-public method that removes the prefix from each of the target paths in - 'targets_metadata' so they can be used again in compliance with the local - copies. The prefix is needed in metadata to match the layout of the remote - repository. - """ - - unprefixed_targets_metadata = {} - - for targets in targets_metadata['targets'].keys(): - unprefixed_target = os.path.relpath(targets, prefix) - unprefixed_targets_metadata[unprefixed_target] = \ - targets_metadata['targets'][targets] - targets_metadata['targets'] = unprefixed_targets_metadata - - return targets_metadata - - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running 'developer_tool.py' as a standalone module: - # $ python developer_tool.py - import doctest - doctest.testmod() diff --git a/tuf/repository_lib.py b/tuf/repository_lib.py deleted file mode 100644 index 1f64f66413..0000000000 --- a/tuf/repository_lib.py +++ /dev/null @@ -1,2314 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2014 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repository_lib.py - - - Vladimir Diaz - - - June 1, 2014. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a library for the repository tool that can create a TUF repository. - The repository tool can be used with the Python interpreter in interactive - mode, or imported directly into a Python module. See 'tuf/README' for the - complete guide to using 'tuf.repository_tool.py'. -""" - -# Help with Python 3 compatibility, where the print statement is a function, an -# implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -import os -import errno -import time -import logging -import shutil -import json -import tempfile - -import tuf -import tuf.formats -import tuf.exceptions -import tuf.keydb -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.settings - -import securesystemslib -import securesystemslib.hash -import securesystemslib.interface -import securesystemslib.util -import six - -import securesystemslib.storage - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# The extension of TUF metadata. -METADATA_EXTENSION = '.json' - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -METADATA_STAGED_DIRECTORY_NAME = 'metadata.staged' -METADATA_DIRECTORY_NAME = 'metadata' -TARGETS_DIRECTORY_NAME = 'targets' - -# The metadata filenames of the top-level roles. -ROOT_FILENAME = 'root' + METADATA_EXTENSION -TARGETS_FILENAME = 'targets' + METADATA_EXTENSION -SNAPSHOT_FILENAME = 'snapshot' + METADATA_EXTENSION -TIMESTAMP_FILENAME = 'timestamp' + METADATA_EXTENSION - -# Log warning when metadata expires in n days, or less. -# root = 1 month, snapshot = 1 day, targets = 10 days, timestamp = 1 day. -ROOT_EXPIRES_WARN_SECONDS = 2630000 -SNAPSHOT_EXPIRES_WARN_SECONDS = 86400 -TARGETS_EXPIRES_WARN_SECONDS = 864000 -TIMESTAMP_EXPIRES_WARN_SECONDS = 86400 - -# Supported key types. -SUPPORTED_KEY_TYPES = ['rsa', 'ed25519', 'ecdsa-sha2-nistp256'] - -# The algorithm used by the repository to generate the path hash prefixes -# of hashed bin delegations. Please see delegate_hashed_bins() -HASH_FUNCTION = tuf.settings.DEFAULT_HASH_ALGORITHM - - - - -def _generate_and_write_metadata(rolename, metadata_filename, - targets_directory, metadata_directory, storage_backend, - consistent_snapshot=False, filenames=None, allow_partially_signed=False, - increment_version_number=True, repository_name='default', - use_existing_fileinfo=False, use_timestamp_length=True, - use_timestamp_hashes=True, use_snapshot_length=False, - use_snapshot_hashes=False): - """ - Non-public function that can generate and write the metadata for the - specified 'rolename'. It also increments the version number of 'rolename' if - the 'increment_version_number' argument is True. - """ - - metadata = None - - # Retrieve the roleinfo of 'rolename' to extract the needed metadata - # attributes, such as version number, expiration, etc. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - previous_keyids = roleinfo.get('previous_keyids', []) - previous_threshold = roleinfo.get('previous_threshold', 1) - signing_keyids = sorted(set(roleinfo['signing_keyids'])) - - # Generate the appropriate role metadata for 'rolename'. - if rolename == 'root': - metadata = generate_root_metadata(roleinfo['version'], roleinfo['expires'], - consistent_snapshot, repository_name) - - _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], - ROOT_EXPIRES_WARN_SECONDS) - - - - elif rolename == 'snapshot': - metadata = generate_snapshot_metadata(metadata_directory, - roleinfo['version'], roleinfo['expires'], - storage_backend, consistent_snapshot, repository_name, - use_length=use_snapshot_length, use_hashes=use_snapshot_hashes) - - - _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], - SNAPSHOT_EXPIRES_WARN_SECONDS) - - elif rolename == 'timestamp': - # If filenames don't have "snapshot_filename" key, defaults to "snapshot.json" - snapshot_file_path = (filenames and filenames['snapshot']) \ - or SNAPSHOT_FILENAME - - metadata = generate_timestamp_metadata(snapshot_file_path, roleinfo['version'], - roleinfo['expires'], storage_backend, repository_name, - use_length=use_timestamp_length, use_hashes=use_timestamp_hashes) - - _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], - TIMESTAMP_EXPIRES_WARN_SECONDS) - - # All other roles are either the top-level 'targets' role, or - # a delegated role. - else: - # Only print a warning if the top-level 'targets' role expires soon. - if rolename == 'targets': - _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], - TARGETS_EXPIRES_WARN_SECONDS) - - # Don't hash-prefix consistent target files if they are handled out of band - consistent_targets = consistent_snapshot and not use_existing_fileinfo - - metadata = generate_targets_metadata(targets_directory, - roleinfo['paths'], roleinfo['version'], roleinfo['expires'], - roleinfo['delegations'], consistent_targets, use_existing_fileinfo, - storage_backend, repository_name) - - # Update roledb with the latest delegations info collected during - # generate_targets_metadata() - tuf.roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - - # Before writing 'rolename' to disk, automatically increment its version - # number (if 'increment_version_number' is True) so that the caller does not - # have to manually perform this action. The version number should be - # incremented in both the metadata file and roledb (required so that Snapshot - # references the latest version). - - # Store the 'current_version' in case the version number must be restored - # (e.g., if 'rolename' cannot be written to disk because its metadata is not - # properly signed). - current_version = metadata['version'] - if increment_version_number: - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - metadata['version'] = metadata['version'] + 1 - roleinfo['version'] = roleinfo['version'] + 1 - tuf.roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - else: - logger.debug('Not incrementing ' + repr(rolename) + '\'s version number.') - - if rolename in tuf.roledb.TOP_LEVEL_ROLES and not allow_partially_signed: - # Verify that the top-level 'rolename' is fully signed. Only a delegated - # role should not be written to disk without full verification of its - # signature(s), since it can only be considered fully signed depending on - # the delegating role. - signable = sign_metadata(metadata, signing_keyids, metadata_filename, - repository_name) - - - def should_write(): - # Root must be signed by its previous keys and threshold. - if rolename == 'root' and len(previous_keyids) > 0: - if not tuf.sig.verify(signable, rolename, repository_name, - previous_threshold, previous_keyids): - return False - - else: - logger.debug('Root is signed by a threshold of its previous keyids.') - - # In the normal case, we should write metadata if the threshold is met. - return tuf.sig.verify(signable, rolename, repository_name, - roleinfo['threshold'], roleinfo['signing_keyids']) - - - if should_write(): - _remove_invalid_and_duplicate_signatures(signable, repository_name) - - # Root should always be written as if consistent_snapshot is True (i.e., - # write .root.json and root.json to disk). - if rolename == 'root': - consistent_snapshot = True - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot, storage_backend) - - # 'signable' contains an invalid threshold of signatures. - else: - # Since new metadata cannot be successfully written, restore the current - # version number. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - roleinfo['version'] = current_version - tuf.roledb.update_roleinfo(rolename, roleinfo, - repository_name=repository_name) - - # Note that 'signable' is an argument to tuf.UnsignedMetadataError(). - raise tuf.exceptions.UnsignedMetadataError('Not enough' - ' signatures for ' + repr(metadata_filename), signable) - - # 'rolename' is a delegated role or a top-level role that is partially - # signed, and thus its signatures should not be verified. - else: - signable = sign_metadata(metadata, signing_keyids, metadata_filename, - repository_name) - _remove_invalid_and_duplicate_signatures(signable, repository_name) - - # Root should always be written as if consistent_snapshot is True (i.e., - # .root.json and root.json). - if rolename == 'root': - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot=True, - storage_backend=storage_backend) - - else: - filename = write_metadata_file(signable, metadata_filename, - metadata['version'], consistent_snapshot, storage_backend) - - return signable, filename - - - - - -def _metadata_is_partially_loaded(rolename, signable, repository_name): - """ - Non-public function that determines whether 'rolename' is loaded with - at least zero good signatures, but an insufficient threshold (which means - 'rolename' was written to disk with repository.write_partial()). A repository - maintainer may write partial metadata without including a valid signature. - However, the final repository.write() must include a threshold number of - signatures. - - If 'rolename' is found to be partially loaded, mark it as partially loaded in - its 'tuf.roledb' roleinfo. This function exists to assist in deciding whether - a role's version number should be incremented when write() or write_parital() - is called. Return True if 'rolename' was partially loaded, False otherwise. - """ - - # The signature status lists the number of good signatures, including - # bad, untrusted, unknown, etc. - status = tuf.sig.get_signature_status(signable, rolename, repository_name) - - if len(status['good_sigs']) < status['threshold'] and \ - len(status['good_sigs']) >= 0: - return True - - else: - return False - - - - - -def _check_role_keys(rolename, repository_name): - """ - Non-public function that verifies the public and signing keys of 'rolename'. - If either contain an invalid threshold of keys, raise an exception. - """ - - # Extract the total number of public and private keys of 'rolename' from its - # roleinfo in 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - total_keyids = len(roleinfo['keyids']) - threshold = roleinfo['threshold'] - total_signatures = len(roleinfo['signatures']) - total_signing_keys = len(roleinfo['signing_keyids']) - - # Raise an exception for an invalid threshold of public keys. - if total_keyids < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' - ' ' + repr(total_keyids) + ' / ' + repr(threshold) + ' public keys.') - - # Raise an exception for an invalid threshold of signing keys. - if total_signatures == 0 and total_signing_keys < threshold: - raise tuf.exceptions.InsufficientKeysError(repr(rolename) + ' role contains' - ' ' + repr(total_signing_keys) + ' / ' + repr(threshold) + ' signing keys.') - - - - - -def _remove_invalid_and_duplicate_signatures(signable, repository_name): - """ - Non-public function that removes invalid or duplicate signatures from - 'signable'. 'signable' may contain signatures (invalid) from previous - versions of the metadata that were loaded with load_repository(). Invalid, - or duplicate signatures, are removed from 'signable'. - """ - - # Store the keyids of valid signatures. 'signature_keyids' is checked for - # duplicates rather than comparing signature objects because PSS may generate - # duplicate valid signatures for the same data, yet contain different - # signatures. - signature_keyids = [] - - for signature in signable['signatures']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - keyid = signature['keyid'] - key = None - - # Remove 'signature' from 'signable' if the listed keyid does not exist - # in 'tuf.keydb'. - try: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) - - except tuf.exceptions.UnknownKeyError: - signable['signatures'].remove(signature) - continue - - # Remove 'signature' from 'signable' if it is an invalid signature. - if not securesystemslib.keys.verify_signature(key, signature, signed): - logger.debug('Removing invalid signature for ' + repr(keyid)) - signable['signatures'].remove(signature) - - # Although valid, it may still need removal if it is a duplicate. Check - # the keyid, rather than the signature, to remove duplicate PSS signatures. - # PSS may generate multiple different signatures for the same keyid. - else: - if keyid in signature_keyids: - signable['signatures'].remove(signature) - - # 'keyid' is valid and not a duplicate, so add it to 'signature_keyids'. - else: - signature_keyids.append(keyid) - - - - - -def _delete_obsolete_metadata(metadata_directory, snapshot_metadata, - consistent_snapshot, repository_name, storage_backend): - """ - Non-public function that deletes metadata files marked as removed by - 'repository_tool.py'. Revoked metadata files are not actually deleted until - this function is called. Obsolete metadata should *not* be retained in - "metadata.staged", otherwise they may be re-loaded by 'load_repository()'. - - Note: Obsolete metadata may not always be easily detected (by inspecting - top-level metadata during loading) due to partial metadata and top-level - metadata that have not been written yet. - """ - - # Walk the repository's metadata sub-directory, which is where all metadata - # is stored (including delegated roles). The 'django.json' role (e.g., - # delegated by Targets) would be located in the - # '{repository_directory}/metadata/' directory. - metadata_files = sorted(storage_backend.list_folder(metadata_directory)) - for metadata_role in metadata_files: - if metadata_role.endswith('root.json'): - continue - - metadata_path = os.path.join(metadata_directory, metadata_role) - - # Strip the version number if 'consistent_snapshot' is True. Example: - # '10.django.json' --> 'django.json'. Consistent and non-consistent - # metadata might co-exist if write() and - # write(consistent_snapshot=True) are mixed, so ensure only - # '.filename' metadata is stripped. - - # Should we check if 'consistent_snapshot' is True? It might have been - # set previously, but 'consistent_snapshot' can potentially be False - # now. We'll proceed with the understanding that 'metadata_name' can - # have a prepended version number even though the repository is now - # a non-consistent one. - if metadata_role not in snapshot_metadata['meta']: - metadata_role, junk = _strip_version_number(metadata_role, - consistent_snapshot) - - else: - logger.debug(repr(metadata_role) + ' found in the snapshot role.') - - # Strip metadata extension from filename. The role database does not - # include the metadata extension. - if metadata_role.endswith(METADATA_EXTENSION): - metadata_role = metadata_role[:-len(METADATA_EXTENSION)] - else: - logger.debug(repr(metadata_role) + ' does not match' - ' supported extension ' + repr(METADATA_EXTENSION)) - - if metadata_role in tuf.roledb.TOP_LEVEL_ROLES: - logger.debug('Not removing top-level metadata ' + repr(metadata_role)) - return - - # Delete the metadata file if it does not exist in 'tuf.roledb'. - # 'repository_tool.py' might have removed 'metadata_name,' - # but its metadata file is not actually deleted yet. Do it now. - if not tuf.roledb.role_exists(metadata_role, repository_name): - logger.info('Removing outdated metadata: ' + repr(metadata_path)) - storage_backend.remove(metadata_path) - - else: - logger.debug('Not removing metadata: ' + repr(metadata_path)) - - # TODO: Should we delete outdated consistent snapshots, or does it make - # more sense for integrators to remove outdated consistent snapshots? - - - - -def _get_written_metadata(metadata_signable): - """ - Non-public function that returns the actual content of written metadata. - """ - - # Explicitly specify the JSON separators for Python 2 + 3 consistency. - written_metadata_content = json.dumps(metadata_signable, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8') - - return written_metadata_content - - - - - -def _strip_version_number(metadata_filename, consistent_snapshot): - """ - Strip from 'metadata_filename' any version number (in the - expected '{dirname}/.rolename.' format) that - it may contain, and return the stripped filename and version number, - as a tuple. 'consistent_snapshot' is a boolean indicating if a version - number is prepended to 'metadata_filename'. - """ - - # Strip the version number if 'consistent_snapshot' is True. - # Example: '10.django.json' --> 'django.json' - if consistent_snapshot: - dirname, basename = os.path.split(metadata_filename) - version_number, basename = basename.split('.', 1) - stripped_metadata_filename = os.path.join(dirname, basename) - - if not version_number.isdigit(): - return metadata_filename, '' - - else: - return stripped_metadata_filename, version_number - - else: - return metadata_filename, '' - - - - -def _load_top_level_metadata(repository, top_level_filenames, repository_name): - """ - Load the metadata of the Root, Timestamp, Targets, and Snapshot roles. At a - minimum, the Root role must exist and load successfully. - """ - - root_filename = top_level_filenames[ROOT_FILENAME] - targets_filename = top_level_filenames[TARGETS_FILENAME] - snapshot_filename = top_level_filenames[SNAPSHOT_FILENAME] - timestamp_filename = top_level_filenames[TIMESTAMP_FILENAME] - - root_metadata = None - targets_metadata = None - snapshot_metadata = None - timestamp_metadata = None - - # Load 'root.json'. A Root role file without a version number is always - # written. - try: - # Initialize the key and role metadata of the top-level roles. - signable = securesystemslib.util.load_json_file(root_filename) - try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - root_metadata = signable['signed'] - tuf.keydb.create_keydb_from_root_metadata(root_metadata, repository_name) - tuf.roledb.create_roledb_from_root_metadata(root_metadata, repository_name) - - # Load Root's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('root', repository_name) - roleinfo['consistent_snapshot'] = root_metadata['consistent_snapshot'] - roleinfo['signatures'] = [] - for signature in signable['signatures']: - if signature not in roleinfo['signatures']: - roleinfo['signatures'].append(signature) - - else: - logger.debug('Found a Root signature that is already loaded:' - ' ' + repr(signature)) - - # By default, roleinfo['partial_loaded'] of top-level roles should be set - # to False in 'create_roledb_from_root_metadata()'. Update this field, if - # necessary, now that we have its signable object. - if _metadata_is_partially_loaded('root', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Root was not partially loaded.') - - _log_warning_if_expires_soon(ROOT_FILENAME, roleinfo['expires'], - ROOT_EXPIRES_WARN_SECONDS) - - tuf.roledb.update_roleinfo('root', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - # Ensure the 'consistent_snapshot' field is extracted. - consistent_snapshot = root_metadata['consistent_snapshot'] - - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the required' - ' root file: ' + repr(root_filename)), error) - - # Load 'timestamp.json'. A Timestamp role file without a version number is - # always written. - try: - signable = securesystemslib.util.load_json_file(timestamp_filename) - timestamp_metadata = signable['signed'] - for signature in signable['signatures']: - repository.timestamp.add_signature(signature, mark_role_as_dirty=False) - - # Load Timestamp's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) - roleinfo['expires'] = timestamp_metadata['expires'] - roleinfo['version'] = timestamp_metadata['version'] - - if _metadata_is_partially_loaded('timestamp', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('The Timestamp role was not partially loaded.') - - _log_warning_if_expires_soon(TIMESTAMP_FILENAME, roleinfo['expires'], - TIMESTAMP_EXPIRES_WARN_SECONDS) - - tuf.roledb.update_roleinfo('timestamp', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('Cannot load the Timestamp ' - 'file: ' + repr(timestamp_filename)), error) - - # Load 'snapshot.json'. A consistent snapshot.json must be calculated if - # 'consistent_snapshot' is True. - # The Snapshot and Root roles are both accessed by their hashes. - if consistent_snapshot: - snapshot_version = timestamp_metadata['meta'][SNAPSHOT_FILENAME]['version'] - - dirname, basename = os.path.split(snapshot_filename) - basename = basename.split(METADATA_EXTENSION, 1)[0] - snapshot_filename = os.path.join(dirname, - str(snapshot_version) + '.' + basename + METADATA_EXTENSION) - - try: - signable = securesystemslib.util.load_json_file(snapshot_filename) - try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - snapshot_metadata = signable['signed'] - - for signature in signable['signatures']: - repository.snapshot.add_signature(signature, mark_role_as_dirty=False) - - # Load Snapshot's roleinfo and update 'tuf.roledb'. - roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) - roleinfo['expires'] = snapshot_metadata['expires'] - roleinfo['version'] = snapshot_metadata['version'] - - if _metadata_is_partially_loaded('snapshot', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Snapshot was not partially loaded.') - - _log_warning_if_expires_soon(SNAPSHOT_FILENAME, roleinfo['expires'], - SNAPSHOT_EXPIRES_WARN_SECONDS) - - tuf.roledb.update_roleinfo('snapshot', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Snapshot file ' - 'cannot be loaded: '+ repr(snapshot_filename)), error) - - # Load 'targets.json'. A consistent snapshot of the Targets role must be - # calculated if 'consistent_snapshot' is True. - if consistent_snapshot: - targets_version = snapshot_metadata['meta'][TARGETS_FILENAME]['version'] - dirname, basename = os.path.split(targets_filename) - targets_filename = os.path.join(dirname, str(targets_version) + '.' + basename) - - try: - signable = securesystemslib.util.load_json_file(targets_filename) - try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - targets_metadata = signable['signed'] - - for signature in signable['signatures']: - repository.targets.add_signature(signature, mark_role_as_dirty=False) - - # Update 'targets.json' in 'tuf.roledb.py' - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - roleinfo['paths'] = targets_metadata['targets'] - roleinfo['version'] = targets_metadata['version'] - roleinfo['expires'] = targets_metadata['expires'] - roleinfo['delegations'] = targets_metadata['delegations'] - - if _metadata_is_partially_loaded('targets', signable, repository_name): - roleinfo['partial_loaded'] = True - - else: - logger.debug('Targets file was not partially loaded.') - - _log_warning_if_expires_soon(TARGETS_FILENAME, roleinfo['expires'], - TARGETS_EXPIRES_WARN_SECONDS) - - tuf.roledb.update_roleinfo('targets', roleinfo, mark_role_as_dirty=False, - repository_name=repository_name) - - # Add the keys specified in the delegations field of the Targets role. - for keyid, key_metadata in six.iteritems(targets_metadata['delegations']['keys']): - - # Use the keyid found in the delegation - key_object, _ = securesystemslib.keys.format_metadata_to_key(key_metadata, - keyid) - - # Add 'key_object' to the list of recognized keys. Keys may be shared, - # so do not raise an exception if 'key_object' has already been loaded. - # In contrast to the methods that may add duplicate keys, do not log - # a warning as there may be many such duplicate key warnings. The - # repository maintainer should have also been made aware of the duplicate - # key when it was added. - try: - tuf.keydb.add_key(key_object, keyid=None, repository_name=repository_name) - - except tuf.exceptions.KeyAlreadyExistsError: - pass - - except securesystemslib.exceptions.StorageError as error: - six.raise_from(tuf.exceptions.RepositoryError('The Targets file ' - 'can not be loaded: ' + repr(targets_filename)), error) - - return repository, consistent_snapshot - - - - -def _log_warning_if_expires_soon(rolename, expires_iso8601_timestamp, - seconds_remaining_to_warn): - """ - Non-public function that logs a warning if 'rolename' expires in - 'seconds_remaining_to_warn' seconds, or less. - """ - - # Metadata stores expiration datetimes in ISO8601 format. Convert to - # unix timestamp, subtract from current time.time() (also in POSIX time) - # and compare against 'seconds_remaining_to_warn'. Log a warning message - # to console if 'rolename' expires soon. - datetime_object = tuf.formats.expiry_string_to_datetime( - expires_iso8601_timestamp) - expires_unix_timestamp = \ - tuf.formats.datetime_to_unix_timestamp(datetime_object) - seconds_until_expires = expires_unix_timestamp - int(time.time()) - - if seconds_until_expires <= seconds_remaining_to_warn: - if seconds_until_expires <= 0: - logger.warning( - repr(rolename) + ' expired ' + repr(datetime_object.ctime() + ' (UTC).')) - - else: - days_until_expires = seconds_until_expires / 86400 - logger.warning(repr(rolename) + ' expires ' + datetime_object.ctime() + '' - ' (UTC). ' + repr(days_until_expires) + ' day(s) until it expires.') - - else: - pass - - - - - -def import_rsa_privatekey_from_file(filepath, password=None): - """ - - Import the encrypted PEM file in 'filepath', decrypt it, and return the key - object in 'securesystemslib.RSAKEY_SCHEMA' format. - - - filepath: - file, an RSA encrypted PEM file. Unlike the public RSA PEM - key file, 'filepath' does not have an extension. - - password: - The passphrase to decrypt 'filepath'. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.CryptoError, if 'filepath' is not a valid - encrypted key file. - - - The contents of 'filepath' is read, decrypted, and the key stored. - - - An RSA key object, conformant to 'securesystemslib.RSAKEY_SCHEMA'. - """ - - # Note: securesystemslib.interface.import_rsa_privatekey_from_file() does not - # allow both 'password' and 'prompt' to be True, nor does it automatically - # prompt for a password if the key file is encrypted and a password isn't - # given. - try: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( - filepath, password) - - # The user might not have given a password for an encrypted private key. - # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: - if password is None: - private_key = securesystemslib.interface.import_rsa_privatekey_from_file( - filepath, password, prompt=True) - - else: - raise - - return private_key - - - - - - - -def import_ed25519_privatekey_from_file(filepath, password=None): - """ - - Import the encrypted ed25519 TUF key file in 'filepath', decrypt it, and - return the key object in 'securesystemslib.ED25519KEY_SCHEMA' format. - - The TUF private key (may also contain the public part) is encrypted with - AES 256 and CTR the mode of operation. The password is strengthened with - PBKDF2-HMAC-SHA256. - - - filepath: - file, an RSA encrypted TUF key file. - - password: - The password, or passphrase, to import the private key (i.e., the - encrypted key file 'filepath' must be decrypted before the ed25519 key - object can be returned. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted or the imported key object contains an invalid key type (i.e., - not 'ed25519'). - - securesystemslib.exceptions.CryptoError, if 'filepath' cannot be decrypted. - - securesystemslib.exceptions.UnsupportedLibraryError, if 'filepath' cannot be - decrypted due to an invalid configuration setting (i.e., invalid - 'tuf.settings.py' setting). - - - 'password' is used to decrypt the 'filepath' key file. - - - An ed25519 key object of the form: 'securesystemslib.ED25519KEY_SCHEMA'. - """ - - # Note: securesystemslib.interface.import_ed25519_privatekey_from_file() does - # not allow both 'password' and 'prompt' to be True, nor does it - # automatically prompt for a password if the key file is encrypted and a - # password isn't given. - try: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( - filepath, password) - - # The user might not have given a password for an encrypted private key. - # Prompt for a password for convenience. - except securesystemslib.exceptions.CryptoError: - if password is None: - private_key = securesystemslib.interface.import_ed25519_privatekey_from_file( - filepath, password, prompt=True) - - else: - raise - - return private_key - - - -def get_delegated_roles_metadata_filenames(metadata_directory, - consistent_snapshot, storage_backend=None): - """ - Return a dictionary containing all filenames in 'metadata_directory' - except the top-level roles. - If multiple versions of a file exist because of a consistent snapshot, - only the file with biggest version prefix is included. - """ - - filenames = {} - metadata_files = sorted(storage_backend.list_folder(metadata_directory), - reverse=True) - - # Iterate over role metadata files, sorted by their version-number prefix, with - # more recent versions first, and only add the most recent version of any - # (non top-level) metadata to the list of returned filenames. Note that there - # should only be one version of each file, if consistent_snapshot is False. - for metadata_role in metadata_files: - metadata_path = os.path.join(metadata_directory, metadata_role) - - # Strip the version number if 'consistent_snapshot' is True, - # or if 'metadata_role' is Root. - # Example: '10.django.json' --> 'django.json' - consistent = \ - metadata_role.endswith('root.json') or consistent_snapshot == True - metadata_name, junk = _strip_version_number(metadata_role, - consistent) - - if metadata_name.endswith(METADATA_EXTENSION): - extension_length = len(METADATA_EXTENSION) - metadata_name = metadata_name[:-extension_length] - - else: - logger.debug('Skipping file with unsupported metadata' - ' extension: ' + repr(metadata_path)) - continue - - # Skip top-level roles, only interested in delegated roles. - if metadata_name in tuf.roledb.TOP_LEVEL_ROLES: - continue - - # Prevent reloading duplicate versions if consistent_snapshot is True - if metadata_name not in filenames: - filenames[metadata_name] = metadata_path - - return filenames - - - -def get_top_level_metadata_filenames(metadata_directory): - """ - - Return a dictionary containing the filenames of the top-level roles. - If 'metadata_directory' is set to 'metadata', the dictionary - returned would contain: - - filenames = {'root.json': 'metadata/root.json', - 'targets.json': 'metadata/targets.json', - 'snapshot.json': 'metadata/snapshot.json', - 'timestamp.json': 'metadata/timestamp.json'} - - If 'metadata_directory' is not set by the caller, the current directory is - used. - - - metadata_directory: - The directory containing the metadata files. - - - securesystemslib.exceptions.FormatError, if 'metadata_directory' is - improperly formatted. - - - None. - - - A dictionary containing the expected filenames of the top-level - metadata files, such as 'root.json' and 'snapshot.json'. - """ - - # Does 'metadata_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - - # Store the filepaths of the top-level roles, including the - # 'metadata_directory' for each one. - filenames = {} - - filenames[ROOT_FILENAME] = \ - os.path.join(metadata_directory, ROOT_FILENAME) - - filenames[TARGETS_FILENAME] = \ - os.path.join(metadata_directory, TARGETS_FILENAME) - - filenames[SNAPSHOT_FILENAME] = \ - os.path.join(metadata_directory, SNAPSHOT_FILENAME) - - filenames[TIMESTAMP_FILENAME] = \ - os.path.join(metadata_directory, TIMESTAMP_FILENAME) - - return filenames - - - - - -def get_targets_metadata_fileinfo(filename, storage_backend, custom=None): - """ - - Retrieve the file information of 'filename'. The object returned - conforms to 'tuf.formats.TARGETS_FILEINFO_SCHEMA'. The information - generated for 'filename' is stored in metadata files like 'targets.json'. - The fileinfo object returned has the form: - - fileinfo = {'length': 1024, - 'hashes': {'sha256': 1233dfba312, ...}, - 'custom': {...}} - - - filename: - The metadata file whose file information is needed. It must exist. - - custom: - An optional object providing additional information about the file. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - - securesystemslib.exceptions.FormatError, if 'filename' is improperly - formatted. - - - The file is opened and information about the file is generated, - such as file size and its hash. - - - A dictionary conformant to 'tuf.formats.TARGETS_FILEINFO_SCHEMA'. This - dictionary contains the length, hashes, and custom data about the - 'filename' metadata file. SHA256 hashes are generated by default. - """ - - # Does 'filename' and 'custom' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - if custom is not None: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) - - # Note: 'filehashes' is a dictionary of the form - # {'sha256': 1233dfba312, ...}. 'custom' is an optional - # dictionary that a client might define to include additional - # file information, such as the file's author, version/revision - # numbers, etc. - filesize, filehashes = securesystemslib.util.get_file_details(filename, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) - - return tuf.formats.make_targets_fileinfo(filesize, filehashes, custom=custom) - - - - - -def get_metadata_versioninfo(rolename, repository_name): - """ - - Retrieve the version information of 'rolename'. The object returned - conforms to 'tuf.formats.VERSIONINFO_SCHEMA'. The information - generated for 'rolename' is stored in 'snapshot.json'. - The versioninfo object returned has the form: - - versioninfo = {'version': 14} - - - rolename: - The metadata role whose versioninfo is needed. It must exist, otherwise - a 'tuf.exceptions.UnknownRoleError' exception is raised. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if 'rolename' is improperly - formatted. - - tuf.exceptions.UnknownRoleError, if 'rolename' does not exist. - - - None. - - - A dictionary conformant to 'tuf.formats.VERSIONINFO_SCHEMA'. - This dictionary contains the version number of 'rolename'. - """ - - # Does 'rolename' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - roleinfo = tuf.roledb.get_roleinfo(rolename, repository_name) - versioninfo = {'version': roleinfo['version']} - - return versioninfo - - - - - -def create_bin_name(low, high, prefix_len): - """ - - Create a string name of a delegated hash bin, where name will be a range of - zero-padded (up to prefix_len) strings i.e. for low=00, high=07, - prefix_len=3 the returned name would be '000-007'. - - - low: - The low end of the prefix range to be binned - - high: - The high end of the prefix range to be binned - - prefix_len: - The length of the prefix range components - - - A string bin name, with each end of the range zero-padded up to prefix_len - """ - if low == high: - return "{low:0{len}x}".format(low=low, len=prefix_len) - - return "{low:0{len}x}-{high:0{len}x}".format(low=low, high=high, - len=prefix_len) - - - - - -def get_bin_numbers(number_of_bins): - """ - - Given the desired number of bins (number_of_bins) calculate the prefix - length (prefix_length), total number of prefixes (prefix_count) and the - number of prefixes to be stored in each bin (bin_size). - Example: number_of_bins = 32 - prefix_length = 2 - prefix_count = 256 - bin_size = 8 - That is, each of the 32 hashed bins are responsible for 8 hash prefixes, - i.e. 00-07, 08-0f, ..., f8-ff. - - - number_of_bins: - The number of hashed bins in use - - - A tuple of three values: - 1. prefix_length: the length of each prefix - 2. prefix_count: the total number of prefixes in use - 3. bin_size: the number of hash prefixes to be stored in each bin - """ - # Convert 'number_of_bins' to hexadecimal and determine the number of - # hexadecimal digits needed by each hash prefix - prefix_length = len("{:x}".format(number_of_bins - 1)) - # Calculate the total number of hash prefixes (e.g., 000 - FFF total values) - prefix_count = 16 ** prefix_length - # Determine how many prefixes to assign to each bin - bin_size = prefix_count // number_of_bins - - # For simplicity, ensure that 'prefix_count' (16 ^ n) can be evenly - # distributed over 'number_of_bins' (must be 2 ^ n). Each bin will contain - # (prefix_count / number_of_bins) hash prefixes. - if prefix_count % number_of_bins != 0: - # Note: x % y != 0 does not guarantee that y is not a power of 2 for - # arbitrary x and y values. However, due to the relationship between - # number_of_bins and prefix_count, it is true for them. - raise securesystemslib.exceptions.Error('The "number_of_bins" argument' - ' must be a power of 2.') - - return prefix_length, prefix_count, bin_size - - - - - -def find_bin_for_target_hash(target_hash, number_of_bins): - """ - - For a given hashed filename, target_hash, calculate the name of a hashed bin - into which this file would be delegated given number_of_bins bins are in - use. - - - target_hash: - The hash of the target file's path - - number_of_bins: - The number of hashed_bins in use - - - The name of the hashed bin target_hash would be binned into - """ - - prefix_length, _, bin_size = get_bin_numbers(number_of_bins) - - prefix = int(target_hash[:prefix_length], 16) - - low = prefix - (prefix % bin_size) - high = (low + bin_size - 1) - - return create_bin_name(low, high, prefix_length) - - - - - -def get_target_hash(target_filepath): - """ - - Compute the hash of 'target_filepath'. This is useful in conjunction with - the "path_hash_prefixes" attribute in a delegated targets role, which - tells us which paths a role is implicitly responsible for. - - The repository may optionally organize targets into hashed bins to ease - target delegations and role metadata management. The use of consistent - hashing allows for a uniform distribution of targets into bins. - - - target_filepath: - The path to the target file on the repository. This will be relative to - the 'targets' (or equivalent) directory on a given mirror. - - - None. - - - None. - - - The hash of 'target_filepath'. - - """ - tuf.formats.RELPATH_SCHEMA.check_match(target_filepath) - - digest_object = securesystemslib.hash.digest(algorithm=HASH_FUNCTION) - digest_object.update(target_filepath.encode('utf-8')) - return digest_object.hexdigest() - - - - -def generate_root_metadata(version, expiration_date, consistent_snapshot, - repository_name='default'): - """ - - Create the root metadata. 'tuf.roledb.py' and 'tuf.keydb.py' - are read and the information returned by these modules is used to generate - the root metadata object. - - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. Conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - consistent_snapshot: - Boolean. If True, a file digest is expected to be prepended to the - filename of any target file located in the targets directory. Each digest - is stripped from the target filename and listed in the snapshot metadata. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the generated root metadata - object could not be generated with the correct format. - - securesystemslib.exceptions.Error, if an error is encountered while - generating the root metadata object (e.g., a required top-level role not - found in 'tuf.roledb'.) - - - The contents of 'tuf.keydb.py' and 'tuf.roledb.py' are read. - - - A root metadata object, conformant to 'tuf.formats.ROOT_SCHEMA'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any of the arguments are - # improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # The role and key dictionaries to be saved in the root metadata object. - # Conformant to 'ROLEDICT_SCHEMA' and 'KEYDICT_SCHEMA', respectively. - roledict = {} - keydict = {} - keylist = [] - - # Extract the role, threshold, and keyid information of the top-level roles, - # which Root stores in its metadata. The necessary role metadata is generated - # from this information. - for rolename in tuf.roledb.TOP_LEVEL_ROLES: - - # If a top-level role is missing from 'tuf.roledb.py', raise an exception. - if not tuf.roledb.role_exists(rolename, repository_name): - raise securesystemslib.exceptions.Error(repr(rolename) + ' not in' - ' "tuf.roledb".') - - # Collect keys from all roles in a list - keyids = tuf.roledb.get_role_keyids(rolename, repository_name) - for keyid in keyids: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) - keylist.append(key) - - # Generate the authentication information Root establishes for each - # top-level role. - role_threshold = tuf.roledb.get_role_threshold(rolename, repository_name) - role_metadata = tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROLE_SCHEMA, - keyids=keyids, - threshold=role_threshold) - roledict[rolename] = role_metadata - - # Create the root metadata 'keys' dictionary - _, keydict = keys_to_keydict(keylist) - - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for this type of metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.ROOT_SCHEMA, - version=version, - expires=expiration_date, - keys=keydict, - roles=roledict, - consistent_snapshot=consistent_snapshot) - - - - - -def generate_targets_metadata(targets_directory, target_files, version, - expiration_date, delegations=None, write_consistent_targets=False, - use_existing_fileinfo=False, storage_backend=None, - repository_name='default'): - """ - - Generate the targets metadata object. The targets in 'target_files' must - exist at the same path they should on the repo. 'target_files' is a list - of targets. The 'custom' field of the targets metadata is not currently - supported. - - - targets_directory: - The absolute path to a directory containing the target files and - directories of the repository. - - target_files: - The target files tracked by 'targets.json'. 'target_files' is a - dictionary mapping target paths (relative to the targets directory) to - a dict matching tuf.formats.LOOSE_FILEINFO_SCHEMA. LOOSE_FILEINFO_SCHEMA - can support multiple different value patterns: - 1) an empty dictionary - for when fileinfo should be generated - 2) a dictionary matching tuf.formats.CUSTOM_SCHEMA - for when fileinfo - should be generated, with the supplied custom metadata attached - 3) a dictionary matching tuf.formats.FILEINFO_SCHEMA - for when full - fileinfo is provided in conjunction with use_existing_fileinfo - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. Conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - delegations: - The delegations made by the targets role to be generated. 'delegations' - must match 'tuf.formats.DELEGATIONS_SCHEMA'. - - write_consistent_targets: - Boolean that indicates whether file digests should be prepended to the - target files. - NOTE: it is an error for write_consistent_targets to be True when - use_existing_fileinfo is also True. We can not create consistent targets - for a target file where the fileinfo isn't generated by tuf. - - use_existing_fileinfo: - Boolean that indicates whether to use the complete fileinfo, including - hashes, as already exists in the roledb (True) or whether to generate - hashes (False). - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'default' repository - is used. - - - securesystemslib.exceptions.FormatError, if an error occurred trying to - generate the targets metadata object. - - securesystemslib.exceptions.Error, if use_existing_fileinfo is False and - any of the target files cannot be read. - - securesystemslib.exceptions.Error, if use_existing_fileinfo is True and - some of the target files do not have corresponding hashes in the roledb. - - securesystemslib.exceptions.Error, if both of use_existing_fileinfo and - write_consistent_targets are True. - - - If use_existing_fileinfo is False, the target files are read from storage - and file information about them is generated. - If 'write_consistent_targets' is True, each target in 'target_files' will be - copied to a file with a digest prepended to its filename. For example, if - 'some_file.txt' is one of the targets of 'target_files', consistent targets - .some_file.txt, .some_file.txt, etc., are created - and the content of 'some_file.txt' will be copied into them. - - - A targets metadata object, conformant to - 'tuf.formats.TARGETS_SCHEMA'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.PATH_FILEINFO_SCHEMA.check_match(target_files) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(write_consistent_targets) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_existing_fileinfo) - - if write_consistent_targets and use_existing_fileinfo: - raise securesystemslib.exceptions.Error('Cannot support writing consistent' - ' targets and using existing fileinfo.') - - if delegations is not None: - tuf.formats.DELEGATIONS_SCHEMA.check_match(delegations) - # If targets role has delegations, collect the up-to-date 'keyids' and - # 'threshold' for each role. Update the delegations keys dictionary. - delegations_keys = [] - # Update 'keyids' and 'threshold' for each delegated role - for role in delegations['roles']: - role['keyids'] = tuf.roledb.get_role_keyids(role['name'], - repository_name) - role['threshold'] = tuf.roledb.get_role_threshold(role['name'], - repository_name) - - # Collect all delegations keys for generating the delegations keydict - for keyid in role['keyids']: - key = tuf.keydb.get_key(keyid, repository_name=repository_name) - delegations_keys.append(key) - - _, delegations['keys'] = keys_to_keydict(delegations_keys) - - - # Store the file attributes of targets in 'target_files'. 'filedict', - # conformant to 'tuf.formats.FILEDICT_SCHEMA', is added to the - # targets metadata object returned. - filedict = {} - - if use_existing_fileinfo: - # Use the provided fileinfo dicts, conforming to FILEINFO_SCHEMA, rather than - # generating fileinfo - for target, fileinfo in six.iteritems(target_files): - - # Ensure all fileinfo entries in target_files have a non-empty hashes dict - if not fileinfo.get('hashes', None): - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' - ' set but no hashes exist in fileinfo for ' + repr(target)) - - # and a non-empty length - if fileinfo.get('length', -1) < 0: - raise securesystemslib.exceptions.Error('use_existing_fileinfo option' - ' set but no length exists in fileinfo for ' + repr(target)) - - filedict[target] = fileinfo - - else: - # Generate the fileinfo dicts by accessing the target files on storage. - # Default to accessing files on local storage. - if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() - - filedict = _generate_targets_fileinfo(target_files, targets_directory, - write_consistent_targets, storage_backend) - - # Generate the targets metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for targets metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - if delegations is not None: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - version=version, - expires=expiration_date, - targets=filedict, - delegations=delegations) - else: - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TARGETS_SCHEMA, - version=version, - expires=expiration_date, - targets=filedict) - # TODO: As an alternative to the odd if/else above where we decide whether or - # not to include the delegations argument based on whether or not it is - # None, consider instead adding a check in - # build_dict_conforming_to_schema that skips a keyword if that keyword - # is optional in the schema and the value passed in is set to None.... - - - - - -def _generate_targets_fileinfo(target_files, targets_directory, - write_consistent_targets, storage_backend): - """ - Iterate over target_files and: - * ensure they exist in the targets_directory - * generate a fileinfo dict for the target file, including hashes - * copy 'target_path' to 'digest_target' if write_consistent_targets - add all generated fileinfo dicts to a dictionary mapping - targetpath: fileinfo and return the dict. - """ - - filedict = {} - - # Generate the fileinfo of all the target files listed in 'target_files'. - for target, fileinfo in six.iteritems(target_files): - - # The root-most folder of the targets directory should not be included in - # target paths listed in targets metadata. - # (e.g., 'targets/more_targets/somefile.txt' -> 'more_targets/somefile.txt') - relative_targetpath = target - - # Note: join() discards 'targets_directory' if 'target' contains a leading - # path separator (i.e., is treated as an absolute path). - target_path = os.path.join(targets_directory, target.lstrip(os.sep)) - - # Add 'custom' if it has been provided. Custom data about the target is - # optional and will only be included in metadata (i.e., a 'custom' field in - # the target's fileinfo dictionary) if specified here. - custom_data = fileinfo.get('custom', None) - - filedict[relative_targetpath] = \ - get_targets_metadata_fileinfo(target_path, storage_backend, custom_data) - - # Copy 'target_path' to 'digest_target' if consistent hashing is enabled. - if write_consistent_targets: - for target_digest in six.itervalues(filedict[relative_targetpath]['hashes']): - dirname, basename = os.path.split(target_path) - digest_filename = target_digest + '.' + basename - digest_target = os.path.join(dirname, digest_filename) - shutil.copyfile(target_path, digest_target) - - return filedict - - - -def _get_hashes_and_length_if_needed(use_length, use_hashes, full_file_path, - storage_backend): - """ - Calculate length and hashes only if they are required, - otherwise, for adopters of tuf with lots of delegations, - this will cause unnecessary overhead. - """ - - length = None - hashes = None - if use_length: - length = securesystemslib.util.get_file_length(full_file_path, - storage_backend) - - if use_hashes: - hashes = securesystemslib.util.get_file_hashes(full_file_path, - tuf.settings.FILE_HASH_ALGORITHMS, storage_backend) - - return length, hashes - - - -def generate_snapshot_metadata(metadata_directory, version, expiration_date, - storage_backend, consistent_snapshot=False, - repository_name='default', use_length=False, use_hashes=False): - """ - - Create the snapshot metadata. The minimum metadata must exist (i.e., - 'root.json' and 'targets.json'). This function searches - 'metadata_directory' and the resulting snapshot file will list all the - delegated roles found there. - - - metadata_directory: - The directory containing the 'root.json' and 'targets.json' metadata - files. - - version: - The metadata version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file. - Conformant to 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - consistent_snapshot: - Boolean. If True, a file digest is expected to be prepended to the - filename of any target file located in the targets directory. Each digest - is stripped from the target filename and listed in the snapshot metadata. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if an error occurred trying to generate - the snapshot metadata object. - - - The 'root.json' and 'targets.json' files are read. - - - The snapshot metadata object, conformant to 'tuf.formats.SNAPSHOT_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) - - # Snapshot's 'fileinfodict' shall contain the version number of Root, - # Targets, and all delegated roles of the repository. - fileinfodict = {} - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - os.path.join(metadata_directory, TARGETS_FILENAME), storage_backend) - - targets_role = TARGETS_FILENAME[:-len(METADATA_EXTENSION)] - - targets_file_version = get_metadata_versioninfo(targets_role, - repository_name) - - # Make file info dictionary with make_metadata_fileinfo because - # in the tuf spec length and hashes are optional for all - # METAFILES in snapshot.json including the top-level targets file. - fileinfodict[TARGETS_FILENAME] = tuf.formats.make_metadata_fileinfo( - targets_file_version['version'], length, hashes) - - # Search the metadata directory and generate the versioninfo of all the role - # files found there. This information is stored in the 'meta' field of - # 'snapshot.json'. - - metadata_files = sorted(storage_backend.list_folder(metadata_directory), - reverse=True) - for metadata_filename in metadata_files: - # Strip the version number if 'consistent_snapshot' is True. - # Example: '10.django.json' --> 'django.json' - metadata_name, junk = _strip_version_number(metadata_filename, - consistent_snapshot) - - # All delegated roles are added to the snapshot file. - if metadata_filename.endswith(METADATA_EXTENSION): - rolename = metadata_filename[:-len(METADATA_EXTENSION)] - - # Obsolete role files may still be found. Ensure only roles loaded - # in the roledb are included in the Snapshot metadata. Since the - # snapshot and timestamp roles are not listed in snapshot.json, do not - # list these roles found in the metadata directory. - if tuf.roledb.role_exists(rolename, repository_name) and \ - rolename not in tuf.roledb.TOP_LEVEL_ROLES: - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - os.path.join(metadata_directory, metadata_filename), storage_backend) - - file_version = get_metadata_versioninfo(rolename, - repository_name) - - fileinfodict[metadata_name] = tuf.formats.make_metadata_fileinfo( - file_version['version'], length, hashes) - - else: - logger.debug('Metadata file has an unsupported file' - ' extension: ' + metadata_filename) - - # Generate the Snapshot metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for snapshot metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.SNAPSHOT_SCHEMA, - version=version, - expires=expiration_date, - meta=fileinfodict) - - - - - - -def generate_timestamp_metadata(snapshot_file_path, version, expiration_date, - storage_backend, repository_name, use_length=True, use_hashes=True): - """ - - Generate the timestamp metadata object. The 'snapshot.json' file must - exist. - - - snapshot_file_path: - Path to the required snapshot metadata file. The timestamp role - needs to the calculate the file size and hash of this file. - - version: - The timestamp's version number. Clients use the version number to - determine if the downloaded version is newer than the one currently - trusted. - - expiration_date: - The expiration date of the metadata file, conformant to - 'securesystemslib.formats.ISO8601_DATETIME_SCHEMA'. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - - securesystemslib.exceptions.FormatError, if the generated timestamp metadata - object cannot be formatted correctly, or one of the arguments is improperly - formatted. - - - None. - - - A timestamp metadata object, conformant to 'tuf.formats.TIMESTAMP_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(snapshot_file_path) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - securesystemslib.formats.ISO8601_DATETIME_SCHEMA.check_match(expiration_date) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_hashes) - - snapshot_fileinfo = {} - - length, hashes = _get_hashes_and_length_if_needed(use_length, use_hashes, - snapshot_file_path, storage_backend) - - snapshot_filename = os.path.basename(snapshot_file_path) - # Retrieve the versioninfo of the Snapshot metadata file. - snapshot_version = get_metadata_versioninfo('snapshot', repository_name) - snapshot_fileinfo[snapshot_filename] = \ - tuf.formats.make_metadata_fileinfo(snapshot_version['version'], - length, hashes) - - # Generate the timestamp metadata object. - # Use generalized build_dict_conforming_to_schema func to produce a dict that - # contains all the appropriate information for timestamp metadata, - # checking that the result conforms to the appropriate schema. - # TODO: Later, probably after the rewrite for TUF Issue #660, generalize - # further, upward, by replacing generate_targets_metadata, - # generate_root_metadata, etc. with one function that generates - # metadata, possibly rolling that upwards into the calling function. - # There are very few things that really need to be done differently. - return tuf.formats.build_dict_conforming_to_schema( - tuf.formats.TIMESTAMP_SCHEMA, - version=version, - expires=expiration_date, - meta=snapshot_fileinfo) - - - - - -def sign_metadata(metadata_object, keyids, filename, repository_name): - """ - - Sign a metadata object. If any of the keyids have already signed the file, - the old signature is replaced. The keys in 'keyids' must already be - loaded in 'tuf.keydb'. - - - metadata_object: - The metadata object to sign. For example, 'metadata' might correspond to - 'tuf.formats.ROOT_SCHEMA' or - 'tuf.formats.TARGETS_SCHEMA'. - - keyids: - The keyids list of the signing keys. - - filename: - The intended filename of the signed metadata object. - For example, 'root.json' or 'targets.json'. This function - does NOT save the signed metadata to this filename. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if a valid 'signable' object could - not be generated or the arguments are improperly formatted. - - securesystemslib.exceptions.Error, if an invalid keytype was found in the - keystore. - - - None. - - - A signable object conformant to 'tuf.formats.SIGNABLE_SCHEMA'. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.ANYROLE_SCHEMA.check_match(metadata_object) - securesystemslib.formats.KEYIDS_SCHEMA.check_match(keyids) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # Make sure the metadata is in 'signable' format. That is, - # it contains a 'signatures' field containing the result - # of signing the 'signed' field of 'metadata' with each - # keyid of 'keyids'. - signable = tuf.formats.make_signable(metadata_object) - - # Sign the metadata with each keyid in 'keyids'. 'signable' should have - # zero signatures (metadata_object contained none). - for keyid in keyids: - - # Load the signing key. - key = tuf.keydb.get_key(keyid, repository_name=repository_name) - # Generate the signature using the appropriate signing method. - if key['keytype'] in SUPPORTED_KEY_TYPES: - if 'private' in key['keyval']: - signed = securesystemslib.formats.encode_canonical(signable['signed']).encode('utf-8') - try: - signature = securesystemslib.keys.create_signature(key, signed) - signable['signatures'].append(signature) - - except Exception: - logger.warning('Unable to create signature for keyid: ' + repr(keyid)) - - else: - logger.debug('Private key unset. Skipping: ' + repr(keyid)) - - else: - raise securesystemslib.exceptions.Error('The keydb contains a key with' - ' an invalid key type.' + repr(key['keytype'])) - - # Raise 'securesystemslib.exceptions.FormatError' if the resulting 'signable' - # is not formatted correctly. - try: - tuf.formats.check_signable_object_format(signable) - except tuf.exceptions.UnsignedMetadataError: - # Downgrade the error to a warning because a use case exists where - # metadata may be generated unsigned on one machine and signed on another. - logger.warning('Unsigned metadata object: ' + repr(signable)) - - - return signable - - - - - -def write_metadata_file(metadata, filename, version_number, consistent_snapshot, - storage_backend): - """ - - If necessary, write the 'metadata' signable object to 'filename'. - - - metadata: - The object that will be saved to 'filename', conformant to - 'tuf.formats.SIGNABLE_SCHEMA'. - - filename: - The filename of the metadata to be written (e.g., 'root.json'). - - version_number: - The version number of the metadata file to be written. The version - number is needed for consistent snapshots, which prepend the version - number to 'filename'. - - consistent_snapshot: - Boolean that determines whether the metadata file's digest should be - prepended to the filename. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if the directory of 'filename' does not - exist. - - Any other runtime (e.g., IO) exception. - - - The 'filename' file is created, or overwritten if it exists. - - - The filename of the written file. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - tuf.formats.SIGNABLE_SCHEMA.check_match(metadata) - securesystemslib.formats.PATH_SCHEMA.check_match(filename) - tuf.formats.METADATAVERSION_SCHEMA.check_match(version_number) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - - if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() - - # Generate the actual metadata file content of 'metadata'. Metadata is - # saved as JSON and includes formatting, such as indentation and sorted - # objects. The new digest of 'metadata' is also calculated to help determine - # if re-saving is required. - file_content = _get_written_metadata(metadata) - - # We previously verified whether new metadata needed to be written (i.e., has - # not been previously written or has changed). It is now assumed that the - # caller intends to write changes that have been marked as dirty. - - # The 'metadata' object is written to 'file_object'. To avoid partial - # metadata from being written, 'metadata' is first written to a temporary - # location (i.e., 'file_object') and then moved to 'filename'. - file_object = tempfile.TemporaryFile() - - # Serialize 'metadata' to the file-like object and then write 'file_object' - # to disk. The dictionary keys of 'metadata' are sorted and indentation is - # used. - file_object.write(file_content) - - if consistent_snapshot: - dirname, basename = os.path.split(filename) - basename = basename.split(METADATA_EXTENSION, 1)[0] - version_and_filename = str(version_number) + '.' + basename + METADATA_EXTENSION - written_consistent_filename = os.path.join(dirname, version_and_filename) - - # If we were to point consistent snapshots to 'written_filename', they - # would always point to the current version. Example: 1.root.json and - # 2.root.json -> root.json. If consistent snapshot is True, we should save - # the consistent snapshot and point 'written_filename' to it. - logger.debug('Creating a consistent file for ' + repr(filename)) - logger.debug('Saving ' + repr(written_consistent_filename)) - securesystemslib.util.persist_temp_file(file_object, - written_consistent_filename, should_close=False) - - else: - logger.debug('Not creating a consistent snapshot for ' + repr(filename)) - - logger.debug('Saving ' + repr(filename)) - storage_backend.put(file_object, filename) - - file_object.close() - - return filename - - - - - -def _log_status_of_top_level_roles(targets_directory, metadata_directory, - repository_name, storage_backend): - """ - Non-public function that logs whether any of the top-level roles contain an - invalid number of public and private keys, or an insufficient threshold of - signatures. Considering that the top-level metadata have to be verified in - the expected root -> targets -> snapshot -> timestamp order, this function - logs the error message and returns as soon as a required metadata file is - found to be invalid. It is assumed here that the delegated roles have been - written and verified. Example output: - - 'root' role contains 1 / 1 signatures. - 'targets' role contains 1 / 1 signatures. - 'snapshot' role contains 1 / 1 signatures. - 'timestamp' role contains 1 / 1 signatures. - - Note: Temporary metadata is generated so that file hashes & sizes may be - computed and verified against the attached signatures. 'metadata_directory' - should be a directory in a temporary repository directory. - """ - - # The expected full filenames of the top-level roles needed to write them to - # disk. - filenames = get_top_level_metadata_filenames(metadata_directory) - root_filename = filenames[ROOT_FILENAME] - targets_filename = filenames[TARGETS_FILENAME] - snapshot_filename = filenames[SNAPSHOT_FILENAME] - timestamp_filename = filenames[TIMESTAMP_FILENAME] - - # Verify that the top-level roles contain a valid number of public keys and - # that their corresponding private keys have been loaded. - for rolename in ['root', 'targets', 'snapshot', 'timestamp']: - try: - _check_role_keys(rolename, repository_name) - - except tuf.exceptions.InsufficientKeysError as e: - logger.info(str(e)) - - # Do the top-level roles contain a valid threshold of signatures? Top-level - # metadata is verified in Root -> Targets -> Snapshot -> Timestamp order. - # Verify the metadata of the Root role. - dirty_rolenames = tuf.roledb.get_dirty_roles(repository_name) - - root_roleinfo = tuf.roledb.get_roleinfo('root', repository_name) - root_is_dirty = None - if 'root' in dirty_rolenames: - root_is_dirty = True - - else: - root_is_dirty = False - - try: - signable, root_filename = \ - _generate_and_write_metadata('root', root_filename, targets_directory, - metadata_directory, storage_backend, repository_name=repository_name) - _log_status('root', signable, repository_name) - - # 'tuf.exceptions.UnsignedMetadataError' raised if metadata contains an - # invalid threshold of signatures. log the valid/threshold message, where - # valid < threshold. - except tuf.exceptions.UnsignedMetadataError as e: - _log_status('root', e.signable, repository_name) - return - - finally: - tuf.roledb.unmark_dirty(['root'], repository_name) - tuf.roledb.update_roleinfo('root', root_roleinfo, - mark_role_as_dirty=root_is_dirty, repository_name=repository_name) - - # Verify the metadata of the Targets role. - targets_roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - targets_is_dirty = None - if 'targets' in dirty_rolenames: - targets_is_dirty = True - - else: - targets_is_dirty = False - - try: - signable, targets_filename = \ - _generate_and_write_metadata('targets', targets_filename, - targets_directory, metadata_directory, storage_backend, - repository_name=repository_name) - _log_status('targets', signable, repository_name) - - except tuf.exceptions.UnsignedMetadataError as e: - _log_status('targets', e.signable, repository_name) - return - - finally: - tuf.roledb.unmark_dirty(['targets'], repository_name) - tuf.roledb.update_roleinfo('targets', targets_roleinfo, - mark_role_as_dirty=targets_is_dirty, repository_name=repository_name) - - # Verify the metadata of the snapshot role. - snapshot_roleinfo = tuf.roledb.get_roleinfo('snapshot', repository_name) - snapshot_is_dirty = None - if 'snapshot' in dirty_rolenames: - snapshot_is_dirty = True - - else: - snapshot_is_dirty = False - - filenames = {'root': root_filename, 'targets': targets_filename} - try: - signable, snapshot_filename = \ - _generate_and_write_metadata('snapshot', snapshot_filename, - targets_directory, metadata_directory, storage_backend, False, - filenames, repository_name=repository_name) - _log_status('snapshot', signable, repository_name) - - except tuf.exceptions.UnsignedMetadataError as e: - _log_status('snapshot', e.signable, repository_name) - return - - finally: - tuf.roledb.unmark_dirty(['snapshot'], repository_name) - tuf.roledb.update_roleinfo('snapshot', snapshot_roleinfo, - mark_role_as_dirty=snapshot_is_dirty, repository_name=repository_name) - - # Verify the metadata of the Timestamp role. - timestamp_roleinfo = tuf.roledb.get_roleinfo('timestamp', repository_name) - timestamp_is_dirty = None - if 'timestamp' in dirty_rolenames: - timestamp_is_dirty = True - - else: - timestamp_is_dirty = False - - filenames = {'snapshot': snapshot_filename} - try: - signable, timestamp_filename = \ - _generate_and_write_metadata('timestamp', timestamp_filename, - targets_directory, metadata_directory, storage_backend, - False, filenames, repository_name=repository_name) - _log_status('timestamp', signable, repository_name) - - except tuf.exceptions.UnsignedMetadataError as e: - _log_status('timestamp', e.signable, repository_name) - return - - finally: - tuf.roledb.unmark_dirty(['timestamp'], repository_name) - tuf.roledb.update_roleinfo('timestamp', timestamp_roleinfo, - mark_role_as_dirty=timestamp_is_dirty, repository_name=repository_name) - - - -def _log_status(rolename, signable, repository_name): - """ - Non-public function logs the number of (good/threshold) signatures of - 'rolename'. - """ - - status = tuf.sig.get_signature_status(signable, rolename, repository_name) - - logger.info(repr(rolename) + ' role contains ' + \ - repr(len(status['good_sigs'])) + ' / ' + repr(status['threshold']) + \ - ' signatures.') - - - - - -def create_tuf_client_directory(repository_directory, client_directory): - """ - - Create client directory structure as 'tuf.client.updater' expects it. - Metadata files downloaded from a remote TUF repository are saved to - 'client_directory'. - The Root file must initially exist before an update request can be - satisfied. create_tuf_client_directory() ensures the minimum metadata - is copied and that required directories ('previous' and 'current') are - created in 'client_directory'. Software updaters integrating TUF may - use the client directory created as an initial copy of the repository's - metadata. - - - repository_directory: - The path of the root repository directory. The 'metadata' and 'targets' - sub-directories should be available in 'repository_directory'. The - metadata files of 'repository_directory' are copied to 'client_directory'. - - client_directory: - The path of the root client directory. The 'current' and 'previous' - sub-directories are created and will store the metadata files copied - from 'repository_directory'. 'client_directory' will store metadata - and target files downloaded from a TUF repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.RepositoryError, if the metadata directory in - 'client_directory' already exists. - - - Copies metadata files and directories from 'repository_directory' to - 'client_directory'. Parent directories are created if they do not exist. - - - None. - """ - - # Do the arguments have the correct format? - # This check ensures arguments have the appropriate number of objects and - # object types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if the check fails. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(client_directory) - - # Set the absolute path of the Repository's metadata directory. The metadata - # directory should be the one served by the Live repository. At a minimum, - # the repository's root file must be copied. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = os.path.join(repository_directory, - METADATA_DIRECTORY_NAME) - - # Set the client's metadata directory, which will store the metadata copied - # from the repository directory set above. - client_directory = os.path.abspath(client_directory) - client_metadata_directory = os.path.join(client_directory, - METADATA_DIRECTORY_NAME) - - # If the client's metadata directory does not already exist, create it and - # any of its parent directories, otherwise raise an exception. An exception - # is raised to avoid accidentally overwriting previous metadata. - try: - os.makedirs(client_metadata_directory) - - except OSError as e: - if e.errno == errno.EEXIST: - message = 'Cannot create a fresh client metadata directory: ' +\ - repr(client_metadata_directory) + '. Already exists.' - raise tuf.exceptions.RepositoryError(message) - - # Testing of non-errno.EEXIST exceptions have been verified on all - # supported OSs. An unexpected exception (the '/' directory exists, rather - # than disallowed path) is possible on Travis, so the '#pragma: no branch' - # below is included to prevent coverage failure. - else: #pragma: no branch - raise - - # Move all metadata to the client's 'current' and 'previous' directories. - # The root metadata file MUST exist in '{client_metadata_directory}/current'. - # 'tuf.client.updater' expects the 'current' and 'previous' directories to - # exist under 'metadata'. - client_current = os.path.join(client_metadata_directory, 'current') - client_previous = os.path.join(client_metadata_directory, 'previous') - shutil.copytree(metadata_directory, client_current) - shutil.copytree(metadata_directory, client_previous) - - - -def disable_console_log_messages(): - """ - - Disable logger messages printed to the console. For example, repository - maintainers may want to call this function if many roles will be sharing - keys, otherwise detected duplicate keys will continually log a warning - message. - - - None. - - - None. - - - Removes the 'tuf.log' console handler, added by default when - 'tuf.repository_tool.py' is imported. - - - None. - """ - - tuf.log.remove_console_handler() - - - -def keys_to_keydict(keys): - """ - - Iterate over a list of keys and return a list of keyids and a dict mapping - keyid to key metadata - - - keys: - A list of key objects conforming to - securesystemslib.formats.ANYKEYLIST_SCHEMA. - - - keyids: - A list of keyids conforming to securesystemslib.formats.KEYID_SCHEMA - keydict: - A dictionary conforming to securesystemslib.formats.KEYDICT_SCHEMA - """ - keyids = [] - keydict = {} - - for key in keys: - keyid = key['keyid'] - key_metadata_format = securesystemslib.keys.format_keyval_to_metadata( - key['keytype'], key['scheme'], key['keyval']) - - new_keydict = {keyid: key_metadata_format} - keydict.update(new_keydict) - keyids.append(keyid) - return keyids, keydict - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running repository_lib.py as a standalone module: - # $ python repository_lib.py. - import doctest - doctest.testmod() diff --git a/tuf/repository_tool.py b/tuf/repository_tool.py deleted file mode 100755 index 1fe6a51e83..0000000000 --- a/tuf/repository_tool.py +++ /dev/null @@ -1,3302 +0,0 @@ - -#!/usr/bin/env python - -# Copyright 2013 - 2017, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repository_tool.py - - - Vladimir Diaz - - - October 19, 2013 - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a tool that can create a TUF repository. It can be used with the - Python interpreter in interactive mode, or imported directly into a Python - module. See 'tuf/README' for the complete guide to using - 'tuf.repository_tool.py'. -""" - -# Help with Python 3 compatibility, where the print statement is a function, an -# implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -import os -import time -import datetime -import logging -import tempfile -import shutil -import json - -from collections import deque - -import tuf -import tuf.formats -import tuf.roledb -import tuf.sig -import tuf.log -import tuf.exceptions -import tuf.repository_lib as repo_lib - -import securesystemslib.keys -import securesystemslib.formats -import securesystemslib.util -import six - -import securesystemslib.storage - - -# Copy API -# pylint: disable=unused-import - -# Copy generic repository API functions to be used via `repository_tool` -from tuf.repository_lib import ( - create_tuf_client_directory, - disable_console_log_messages) - - -# Copy key-related API functions to be used via `repository_tool` -from tuf.repository_lib import ( - import_rsa_privatekey_from_file, - import_ed25519_privatekey_from_file) - -from securesystemslib.interface import ( - generate_and_write_rsa_keypair, - generate_and_write_rsa_keypair_with_prompt, - generate_and_write_unencrypted_rsa_keypair, - generate_and_write_ecdsa_keypair, - generate_and_write_ecdsa_keypair_with_prompt, - generate_and_write_unencrypted_ecdsa_keypair, - generate_and_write_ed25519_keypair, - generate_and_write_ed25519_keypair_with_prompt, - generate_and_write_unencrypted_ed25519_keypair, - import_rsa_publickey_from_file, - import_ecdsa_publickey_from_file, - import_ed25519_publickey_from_file, - import_ecdsa_privatekey_from_file) - -from securesystemslib.keys import ( - generate_rsa_key, - generate_ecdsa_key, - generate_ed25519_key, - import_rsakey_from_pem, - import_ecdsakey_from_pem) - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -# Add a console handler so that users are aware of potentially unintended -# states, such as multiple roles that share keys. -tuf.log.add_console_handler() -tuf.log.set_console_log_level(logging.INFO) - -# Recommended RSA key sizes: -# https://en.wikipedia.org/wiki/Key_size#Asymmetric_algorithm_key_lengths -# Based on the above, RSA keys of size 3072 are expected to provide security -# through 2031 and beyond. -DEFAULT_RSA_KEY_BITS=3072 - -# The default number of hashed bin delegations -DEFAULT_NUM_BINS=1024 - -# The targets and metadata directory names. Metadata files are written -# to the staged metadata directory instead of the "live" one. -METADATA_STAGED_DIRECTORY_NAME = 'metadata.staged' -METADATA_DIRECTORY_NAME = 'metadata' -TARGETS_DIRECTORY_NAME = 'targets' - -# The extension of TUF metadata. -METADATA_EXTENSION = '.json' - -# Expiration date delta, in seconds, of the top-level roles. A metadata -# expiration date is set by taking the current time and adding the expiration -# seconds listed below. - -# Initial 'root.json' expiration time of 1 year. -ROOT_EXPIRATION = 31556900 - -# Initial 'targets.json' expiration time of 3 months. -TARGETS_EXPIRATION = 7889230 - -# Initial 'snapshot.json' expiration time of 1 week. -SNAPSHOT_EXPIRATION = 604800 - -# Initial 'timestamp.json' expiration time of 1 day. -TIMESTAMP_EXPIRATION = 86400 - - -class Repository(object): - """ - - Represent a TUF repository that contains the metadata of the top-level - roles, including all those delegated from the 'targets.json' role. The - repository object returned provides access to the top-level roles, and any - delegated targets that are added as the repository is modified. For - example, a Repository object named 'repository' provides the following - access by default: - - repository.root.version = 2 - repository.timestamp.expiration = datetime.datetime(2015, 8, 8, 12, 0) - repository.snapshot.add_verification_key(...) - repository.targets.delegate('unclaimed', ...) - - Delegating a role from 'targets' updates the attributes of the parent - delegation, which then provides: - - repository.targets('unclaimed').add_verification_key(...) - - - - repository_directory: - The root folder of the repository that contains the metadata and targets - sub-directories. - - metadata_directory: - The metadata sub-directory contains the files of the top-level - roles, including all roles delegated from 'targets.json'. - - targets_directory: - The targets sub-directory contains all the target files that are - downloaded by clients and are referenced in TUF Metadata. The hashes and - file lengths are listed in Metadata files so that they are securely - downloaded. Metadata files are similarly referenced in the top-level - metadata. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Creates top-level role objects and assigns them as attributes. - - - A Repository object that contains default Metadata objects for the top-level - roles. - """ - - def __init__(self, repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name='default', - use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_directory) - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_timestamp_hashes) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_length) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(use_snapshot_hashes) - - self._repository_directory = repository_directory - self._metadata_directory = metadata_directory - self._targets_directory = targets_directory - self._repository_name = repository_name - self._storage_backend = storage_backend - self._use_timestamp_length = use_timestamp_length - self._use_timestamp_hashes = use_timestamp_hashes - self._use_snapshot_length = use_snapshot_length - self._use_snapshot_hashes = use_snapshot_hashes - - try: - tuf.roledb.create_roledb(repository_name) - tuf.keydb.create_keydb(repository_name) - - except securesystemslib.exceptions.InvalidNameError: - logger.debug(repr(repository_name) + ' already exists. Overwriting' - ' its contents.') - - # Set the top-level role objects. - self.root = Root(self._repository_name) - self.snapshot = Snapshot(self._repository_name) - self.timestamp = Timestamp(self._repository_name) - self.targets = Targets(self._targets_directory, 'targets', - repository_name=self._repository_name) - - - - def writeall(self, consistent_snapshot=False, use_existing_fileinfo=False): - """ - - Write all the JSON Metadata objects to their corresponding files for - roles which have changed. - writeall() raises an exception if any of the role metadata to be written - to disk is invalid, such as an insufficient threshold of signatures, - missing private keys, etc. - - - consistent_snapshot: - A boolean indicating whether role metadata files should have their - version numbers as filename prefix when written to disk, i.e - 'VERSION.ROLENAME.json', and target files should be copied to a - filename that has their hex digest as filename prefix, i.e - 'HASH.FILENAME'. Note that: - - root metadata is always written with a version prefix, independently - of 'consistent_snapshot' - - the latest version of each metadata file is always also written - without version prefix - - target files are only copied to a hash-prefixed filename if - 'consistent_snapshot' is True and 'use_existing_fileinfo' is False. - If both are True hash-prefixed target file copies must be created - out-of-band. - - use_existing_fileinfo: - Boolean indicating whether the fileinfo dicts in the roledb should be - written as-is (True) or whether hashes should be generated (False, - requires access to the targets files on-disk). - - - tuf.exceptions.UnsignedMetadataError, if any of the top-level - and delegated roles do not have the minimum threshold of signatures. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly - # formatted. - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(consistent_snapshot) - - # At this point, tuf.keydb and tuf.roledb must be fully populated, - # otherwise writeall() throws a 'tuf.exceptions.UnsignedMetadataError' for - # the top-level roles. exception if any of the top-level roles are missing - # signatures, keys, etc. - - # Write the metadata files of all the Targets roles that are dirty (i.e., - # have been modified via roledb.update_roleinfo()). - filenames = {'root': os.path.join(self._metadata_directory, - repo_lib.ROOT_FILENAME), 'targets': os.path.join(self._metadata_directory, - repo_lib.TARGETS_FILENAME), 'snapshot': os.path.join(self._metadata_directory, - repo_lib.SNAPSHOT_FILENAME), 'timestamp': os.path.join(self._metadata_directory, - repo_lib.TIMESTAMP_FILENAME)} - - snapshot_signable = None - dirty_rolenames = tuf.roledb.get_dirty_roles(self._repository_name) - - for dirty_rolename in dirty_rolenames: - - # Ignore top-level roles, they will be generated later in this method. - if dirty_rolename in tuf.roledb.TOP_LEVEL_ROLES: - continue - - dirty_filename = os.path.join(self._metadata_directory, - dirty_rolename + METADATA_EXTENSION) - repo_lib._generate_and_write_metadata(dirty_rolename, dirty_filename, - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, filenames, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Metadata should be written in (delegated targets -> root -> targets -> - # snapshot -> timestamp) order. Begin by generating the 'root.json' - # metadata file. _generate_and_write_metadata() raises a - # 'securesystemslib.exceptions.Error' exception if the metadata cannot be - # written. - root_roleinfo = tuf.roledb.get_roleinfo('root', self._repository_name) - old_consistent_snapshot = root_roleinfo['consistent_snapshot'] - if 'root' in dirty_rolenames or consistent_snapshot != old_consistent_snapshot: - repo_lib._generate_and_write_metadata('root', filenames['root'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, filenames, - repository_name=self._repository_name) - - # Generate the 'targets.json' metadata file. - if 'targets' in dirty_rolenames: - repo_lib._generate_and_write_metadata('targets', filenames['targets'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Generate the 'snapshot.json' metadata file. - if 'snapshot' in dirty_rolenames: - snapshot_signable, junk = repo_lib._generate_and_write_metadata('snapshot', - filenames['snapshot'], self._targets_directory, - self._metadata_directory, self._storage_backend, - consistent_snapshot, filenames, - repository_name=self._repository_name, - use_snapshot_length=self._use_snapshot_length, - use_snapshot_hashes=self._use_snapshot_hashes) - - # Generate the 'timestamp.json' metadata file. - if 'timestamp' in dirty_rolenames: - repo_lib._generate_and_write_metadata('timestamp', filenames['timestamp'], - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - filenames, repository_name=self._repository_name, - use_timestamp_length=self._use_timestamp_length, - use_timestamp_hashes=self._use_timestamp_hashes) - - tuf.roledb.unmark_dirty(dirty_rolenames, self._repository_name) - - # Delete the metadata of roles no longer in 'tuf.roledb'. Obsolete roles - # may have been revoked and should no longer have their metadata files - # available on disk, otherwise loading a repository may unintentionally - # load them. - if snapshot_signable is not None: - repo_lib._delete_obsolete_metadata(self._metadata_directory, - snapshot_signable['signed'], consistent_snapshot, self._repository_name, - self._storage_backend) - - - - def write(self, rolename, consistent_snapshot=False, increment_version_number=True, - use_existing_fileinfo=False): - """ - - Write the JSON metadata for 'rolename' to its corresponding file on disk. - Unlike writeall(), write() allows the metadata file to contain an invalid - threshold of signatures. - - - rolename: - The name of the role to be written to disk. - - consistent_snapshot: - A boolean indicating whether the role metadata file should have its - version number as filename prefix when written to disk, i.e - 'VERSION.ROLENAME.json'. Note that: - - root metadata is always written with a version prefix, independently - of 'consistent_snapshot' - - the latest version of the metadata file is always also written - without version prefix - - if the metadata is targets metadata and 'consistent_snapshot' is - True, the corresponding target files are copied to a filename with - their hex digest as filename prefix, i.e 'HASH.FILENAME', unless - 'use_existing_fileinfo' is also True. - If 'consistent_snapshot' and 'use_existing_fileinfo' both are True, - hash-prefixed target file copies must be created out-of-band. - - increment_version_number: - Boolean indicating whether the version number of 'rolename' should be - automatically incremented. - - use_existing_fileinfo: - Boolean indicating whether the fileinfo dicts in the roledb should be - written as-is (True) or whether hashes should be generated (False, - requires access to the targets files on-disk). - - - None. - - - Creates metadata files in the repository's metadata directory. - - - None. - """ - - rolename_filename = os.path.join(self._metadata_directory, - rolename + METADATA_EXTENSION) - - filenames = {'root': os.path.join(self._metadata_directory, repo_lib.ROOT_FILENAME), - 'targets': os.path.join(self._metadata_directory, repo_lib.TARGETS_FILENAME), - 'snapshot': os.path.join(self._metadata_directory, repo_lib.SNAPSHOT_FILENAME), - 'timestamp': os.path.join(self._metadata_directory, repo_lib.TIMESTAMP_FILENAME)} - - repo_lib._generate_and_write_metadata(rolename, rolename_filename, - self._targets_directory, self._metadata_directory, - self._storage_backend, consistent_snapshot, - filenames=filenames, allow_partially_signed=True, - increment_version_number=increment_version_number, - repository_name=self._repository_name, - use_existing_fileinfo=use_existing_fileinfo) - - # Ensure 'rolename' is no longer marked as dirty after the successful write(). - tuf.roledb.unmark_dirty([rolename], self._repository_name) - - - - - - def status(self): - """ - - Determine the status of the top-level roles. status() checks if each - role provides sufficient public and private keys, signatures, and that a - valid metadata file is generated if writeall() or write() were to be - called. Metadata files are temporarily written so that file hashes and - lengths may be verified, determine if delegated role trust is fully - obeyed, and target paths valid according to parent roles. status() does - not do a simple check for number of threshold keys and signatures. - - - None. - - - None. - - - Generates and writes temporary metadata files. - - - None. - """ - - temp_repository_directory = None - - # Generate and write temporary metadata so that full verification of - # metadata is possible, such as verifying signatures, digests, and file - # content. Ensure temporary files are removed after verification results - # are completed. - try: - temp_repository_directory = tempfile.mkdtemp() - targets_directory = self._targets_directory - metadata_directory = os.path.join(temp_repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - os.mkdir(metadata_directory) - - # Verify the top-level roles and log the results. - repo_lib._log_status_of_top_level_roles(targets_directory, - metadata_directory, self._repository_name, self._storage_backend) - - finally: - shutil.rmtree(temp_repository_directory, ignore_errors=True) - - - - def dirty_roles(self): - """ - - Print/log the roles that have been modified. For example, if some role's - version number is changed (repository.timestamp.version = 2), it is - considered dirty and will be included in the list of dirty roles - printed/logged here. Unlike status(), signatures, public keys, targets, - etc. are not verified. status() should be called instead if the caller - would like to verify if a valid role file is generated if writeall() were - to be called. - - - None. - - - None. - - - None. - - - None. - """ - - logger.info('Dirty roles: ' + str(tuf.roledb.get_dirty_roles(self._repository_name))) - - - - def mark_dirty(self, roles): - """ - - Mark the list of 'roles' as dirty. - - - roles: - A list of roles to mark as dirty. on the next write, these roles - will be written to disk. - - - None. - - - None. - - - None. - """ - - tuf.roledb.mark_dirty(roles, self._repository_name) - - - - def unmark_dirty(self, roles): - """ - - No longer mark the list of 'roles' as dirty. - - - roles: - A list of roles to mark as dirty. on the next write, these roles - will be written to disk. - - - None. - - - None. - - - None. - """ - - tuf.roledb.unmark_dirty(roles, self._repository_name) - - - - @staticmethod - def get_filepaths_in_directory(files_directory, recursive_walk=False, - followlinks=True): - """ - - Walk the given 'files_directory' and build a list of target files found. - - - files_directory: - The path to a directory of target files. - - recursive_walk: - To recursively walk the directory, set recursive_walk=True. - - followlinks: - To follow symbolic links, set followlinks=True. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if 'file_directory' is not a valid - directory. - - Python IO exceptions. - - - None. - - - A list of absolute paths to target files in the given 'files_directory'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(files_directory) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(recursive_walk) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(followlinks) - - # Ensure a valid directory is given. - if not os.path.isdir(files_directory): - raise securesystemslib.exceptions.Error(repr(files_directory) + ' is not' - ' a directory.') - - # A list of the target filepaths found in 'files_directory'. - targets = [] - - # FIXME: We need a way to tell Python 2, but not Python 3, to return - # filenames in Unicode; see #61 and: - # http://docs.python.org/2/howto/unicode.html#unicode-filenames - for dirpath, dirnames, filenames in os.walk(files_directory, - followlinks=followlinks): - for filename in filenames: - full_target_path = os.path.join(os.path.abspath(dirpath), filename) - targets.append(full_target_path) - - # Prune the subdirectories to walk right now if we do not wish to - # recursively walk 'files_directory'. - if recursive_walk is False: - del dirnames[:] - - else: - logger.debug('Not pruning subdirectories ' + repr(dirnames)) - - return targets - - - - - -class Metadata(object): - """ - - Provide a base class to represent a TUF Metadata role. There are four - top-level roles: Root, Targets, Snapshot, and Timestamp. The Metadata - class provides methods that are needed by all top-level roles, such as - adding and removing public keys, private keys, and signatures. Metadata - attributes, such as rolename, version, threshold, expiration, and key list - are also provided by the Metadata base class. - - - None. - - - None. - - - None. - - - None. - """ - - def __init__(self): - self._rolename = None - self._repository_name = None - - - def add_verification_key(self, key, expires=None): - """ - - Add 'key' to the role. Adding a key, which should contain only the - public portion, signifies the corresponding private key and signatures - the role is expected to provide. A threshold of signatures is required - for a role to be considered properly signed. If a metadata file contains - an insufficient threshold of signatures, it must not be accepted. - - >>> - >>> - >>> - - - key: - The role key to be added, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. Adding a public key to a role - means that its corresponding private key must generate and add its - signature to the role. A threshold number of signatures is required - for a role to be fully signed. - - expires: - The date in which 'key' expires. 'expires' is a datetime.datetime() - object. - - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - securesystemslib.exceptions.Error, if the 'expires' datetime has already - expired. - - - The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are - updated. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) - - # If 'expires' is unset, choose a default expiration for 'key'. By - # default, Root, Targets, Snapshot, and Timestamp keys are set to expire - # 1 year, 3 months, 1 week, and 1 day from the current time, respectively. - if expires is None: - if self.rolename == 'root': - expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + ROOT_EXPIRATION)) - - elif self.rolename == 'Targets': - expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TARGETS_EXPIRATION)) - - elif self.rolename == 'Snapshot': - expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + SNAPSHOT_EXPIRATION)) - - elif self.rolename == 'Timestamp': - expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) - - else: - expires = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time() + TIMESTAMP_EXPIRATION)) - - # Is 'expires' a datetime.datetime() object? - # Raise 'securesystemslib.exceptions.FormatError' if not. - if not isinstance(expires, datetime.datetime): - raise securesystemslib.exceptions.FormatError(repr(expires) + ' is not a' - ' datetime.datetime() object.') - - # Truncate the microseconds value to produce a correct schema string - # of the form 'yyyy-mm-ddThh:mm:ssZ'. - expires = expires.replace(microsecond = 0) - - # Ensure the expiration has not already passed. - current_datetime = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) - - if expires < current_datetime: - raise securesystemslib.exceptions.Error(repr(key) + ' has already' - ' expired.') - - # Update the key's 'expires' entry. - expires = expires.isoformat() + 'Z' - key['expires'] = expires - - # Ensure 'key', which should contain the public portion, is added to - # 'tuf.keydb.py'. Add 'key' to the list of recognized keys. - # Keys may be shared, so do not raise an exception if 'key' has already - # been loaded. - try: - tuf.keydb.add_key(key, repository_name=self._repository_name) - - except tuf.exceptions.KeyAlreadyExistsError: - logger.warning('Adding a verification key that has already been used.') - - keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - # Save the keyids that are being replaced since certain roles will need to - # re-sign metadata with these keys (e.g., root). Use list() to make a copy - # of roleinfo['keyids'] to ensure we're modifying distinct lists. - previous_keyids = list(roleinfo['keyids']) - - # Add 'key' to the role's entry in 'tuf.roledb.py', and avoid duplicates. - if keyid not in roleinfo['keyids']: - roleinfo['keyids'].append(keyid) - roleinfo['previous_keyids'] = previous_keyids - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def remove_verification_key(self, key): - """ - - Remove 'key' from the role's currently recognized list of role keys. - The role expects a threshold number of signatures. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'. - 'key' should contain only the public portion, as only the public key is - needed. The 'add_verification_key()' method should have previously - added 'key'. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the 'key' argument has not been - previously added. - - - Updates the role's 'tuf.roledb.py' entry. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) - - keyid = key['keyid'] - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - if keyid in roleinfo['keyids']: - roleinfo['keyids'].remove(keyid) - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise securesystemslib.exceptions.Error('Verification key not found.') - - - - def load_signing_key(self, key): - """ - - Load the role key, which must contain the private portion, so that role - signatures may be generated when the role's metadata file is eventually - written to disk. - - >>> - >>> - >>> - - - key: - The role's key, conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'. - It must contain the private key, so that role signatures may be - generated when writeall() or write() is eventually called to generate - valid metadata files. - - - securesystemslib.exceptions.FormatError, if 'key' is improperly formatted. - - securesystemslib.exceptions.Error, if the private key is not found in 'key'. - - - Updates the role's 'tuf.keydb.py' and 'tuf.roledb.py' entries. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) - - # Ensure the private portion of the key is available, otherwise signatures - # cannot be generated when the metadata file is written to disk. - if 'private' not in key['keyval'] or not len(key['keyval']['private']): - raise securesystemslib.exceptions.Error('This is not a private key.') - - # Has the key, with the private portion included, been added to the keydb? - # The public version of the key may have been previously added. - try: - tuf.keydb.add_key(key, repository_name=self._repository_name) - - except tuf.exceptions.KeyAlreadyExistsError: - tuf.keydb.remove_key(key['keyid'], self._repository_name) - tuf.keydb.add_key(key, repository_name=self._repository_name) - - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - if key['keyid'] not in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].append(key['keyid']) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - def unload_signing_key(self, key): - """ - - Remove a previously loaded role private key (i.e., load_signing_key()). - The keyid of the 'key' is removed from the list of recognized signing - keys. - - >>> - >>> - >>> - - - key: - The role key to be unloaded, conformant to - 'securesystemslib.formats.ANYKEY_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'key' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if the 'key' argument has not been - previously loaded. - - - Updates the signing keys of the role in 'tuf.roledb.py'. - - - None. - """ - - # Does 'key' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.ANYKEY_SCHEMA.check_match(key) - - # Update the role's 'signing_keys' field in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - # TODO: Should we consider removing keys from keydb that are no longer - # associated with any roles? There could be many no-longer-used keys - # stored in the keydb if not. For now, just unload the key. - if key['keyid'] in roleinfo['signing_keyids']: - roleinfo['signing_keyids'].remove(key['keyid']) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise securesystemslib.exceptions.Error('Signing key not found.') - - - - def add_signature(self, signature, mark_role_as_dirty=True): - """ - - Add a signature to the role. A role is considered fully signed if it - contains a threshold of signatures. The 'signature' should have been - generated by the private key corresponding to one of the role's expected - keys. - - >>> - >>> - >>> - - - signature: - The signature to be added to the role, conformant to - 'securesystemslib.formats.SIGNATURE_SCHEMA'. - - mark_role_as_dirty: - A boolean indicating whether the updated 'roleinfo' for 'rolename' - should be marked as dirty. The caller might not want to mark - 'rolename' as dirty if it is loading metadata from disk and only wants - to populate roledb.py. Likewise, add_role() would support a similar - boolean to allow the repository tools to successfully load roles via - load_repository() without needing to mark these roles as dirty (default - behavior). - - - securesystemslib.exceptions.FormatError, if the 'signature' argument is - improperly formatted. - - - Adds 'signature', if not already added, to the role's 'signatures' field - in 'tuf.roledb.py'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(mark_role_as_dirty) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - # Ensure the roleinfo contains a 'signatures' field. - if 'signatures' not in roleinfo: - roleinfo['signatures'] = [] - - # Update the role's roleinfo by adding 'signature', if it has not been - # added. - if signature not in roleinfo['signatures']: - roleinfo['signatures'].append(signature) - tuf.roledb.update_roleinfo(self.rolename, roleinfo, mark_role_as_dirty, - repository_name=self._repository_name) - - else: - logger.debug('Signature already exists for role: ' + repr(self.rolename)) - - - - def remove_signature(self, signature): - """ - - Remove a previously loaded, or added, role 'signature'. A role must - contain a threshold number of signatures to be considered fully signed. - - >>> - >>> - >>> - - - signature: - The role signature to remove, conformant to - 'securesystemslib.formats.SIGNATURE_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'signature' argument is - improperly formatted. - - securesystemslib.exceptions.Error, if 'signature' has not been previously - added to this role. - - - Updates the 'signatures' field of the role in 'tuf.roledb.py'. - - - None. - """ - - # Does 'signature' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - if signature in roleinfo['signatures']: - roleinfo['signatures'].remove(signature) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - else: - raise securesystemslib.exceptions.Error('Signature not found.') - - - - @property - def signatures(self): - """ - - A getter method that returns the role's signatures. A role is considered - fully signed if it contains a threshold number of signatures, where each - signature must be provided by the generated by the private key. Keys - are added to a role with the add_verification_key() method. - - - None. - - - None. - - - None. - - - A list of signatures, conformant to - 'securesystemslib.formats.SIGNATURES_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - signatures = roleinfo['signatures'] - - return signatures - - - - @property - def keys(self): - """ - - A getter method that returns the role's keyids of the keys. The role - is expected to eventually contain a threshold of signatures generated - by the private keys of each of the role's keys (returned here as a keyid.) - - - None. - - - None. - - - None. - - - A list of the role's keyids (i.e., keyids of the keys). - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - keyids = roleinfo['keyids'] - - return keyids - - - - @property - def rolename(self): - """ - - Return the role's name. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - - - None. - - - None. - - - None. - - - The role's name, conformant to 'tuf.formats.ROLENAME_SCHEMA'. - Examples: 'root', 'timestamp', 'targets/unclaimed/django'. - """ - - return self._rolename - - - - @property - def version(self): - """ - - A getter method that returns the role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - - - None. - - - None. - - - None. - - - The role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - version = roleinfo['version'] - - return version - - - - @version.setter - def version(self, version): - """ - - A setter method that updates the role's version number. TUF clients - download new metadata with version number greater than the version - currently trusted. New metadata start at version 1 when either write() - or write_partial() is called. Version numbers are automatically - incremented, when the write methods are called, as follows: - - 1. write_partial==True and the metadata is the first to be written. - - 2. write_partial=False (i.e., write()), the metadata was not loaded as - partially written, and a write_partial is not needed. - - >>> - >>> - >>> - - - version: - The role's version number, conformant to - 'tuf.formats.VERSION_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'version' argument is - improperly formatted. - - - Modifies the 'version' attribute of the Repository object and updates the - role's version in 'tuf.roledb.py'. - - - None. - """ - - # Does 'version' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.METADATAVERSION_SCHEMA.check_match(version) - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - roleinfo['version'] = version - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - @property - def threshold(self): - """ - - Return the role's threshold value. A role is considered fully signed if - a threshold number of signatures is available. - - - None. - - - None. - - - None. - - - The role's threshold value, conformant to - 'tuf.formats.THRESHOLD_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) - threshold = roleinfo['threshold'] - - return threshold - - - - @threshold.setter - def threshold(self, threshold): - """ - - A setter method that modified the threshold value of the role. Metadata - is considered fully signed if a 'threshold' number of signatures is - available. - - >>> - >>> - >>> - - - threshold: - An integer value that sets the role's threshold value, or the minimum - number of signatures needed for metadata to be considered fully - signed. Conformant to 'tuf.formats.THRESHOLD_SCHEMA'. - - - securesystemslib.exceptions.FormatError, if the 'threshold' argument is - improperly formatted. - - - Modifies the threshold attribute of the Repository object and updates - the roles threshold in 'tuf.roledb.py'. - - - None. - """ - - # Does 'threshold' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) - roleinfo['previous_threshold'] = roleinfo['threshold'] - roleinfo['threshold'] = threshold - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - @property - def expiration(self): - """ - - A getter method that returns the role's expiration datetime. - - - None. - - - securesystemslib.exceptions.FormatError, if the expiration cannot be - parsed correctly - - - None. - - - The role's expiration datetime, a datetime.datetime() object. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - expires = roleinfo['expires'] - - return tuf.formats.expiry_string_to_datetime(expires) - - - - @expiration.setter - def expiration(self, datetime_object): - """ - - A setter method for the role's expiration datetime. The top-level - roles have a default expiration (e.g., ROOT_EXPIRATION), but may later - be modified by this setter method. - - >>> - >>> - >>> - - - datetime_object: - The datetime expiration of the role, a datetime.datetime() object. - - - securesystemslib.exceptions.FormatError, if 'datetime_object' is not a - datetime.datetime() object. - - securesystemslib.exceptions.Error, if 'datetime_object' has already - expired. - - - Modifies the expiration attribute of the Repository object. - The datetime given will be truncated to microseconds = 0 - - - None. - """ - - # Is 'datetime_object' a datetime.datetime() object? - # Raise 'securesystemslib.exceptions.FormatError' if not. - if not isinstance(datetime_object, datetime.datetime): - raise securesystemslib.exceptions.FormatError( - repr(datetime_object) + ' is not a datetime.datetime() object.') - - # truncate the microseconds value to produce a correct schema string - # of the form yyyy-mm-ddThh:mm:ssZ - datetime_object = datetime_object.replace(microsecond = 0) - - # Ensure the expiration has not already passed. - current_datetime_object = \ - tuf.formats.unix_timestamp_to_datetime(int(time.time())) - - if datetime_object < current_datetime_object: - raise securesystemslib.exceptions.Error(repr(self.rolename) + ' has' - ' already expired.') - - # Update the role's 'expires' entry in 'tuf.roledb.py'. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - expires = datetime_object.isoformat() + 'Z' - roleinfo['expires'] = expires - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - @property - def signing_keys(self): - """ - - A getter method that returns a list of the role's signing keys. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - A list of keyids of the role's signing keys, conformant to - 'securesystemslib.formats.KEYIDS_SCHEMA'. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - signing_keyids = roleinfo['signing_keyids'] - - return signing_keyids - - - - - -class Root(Metadata): - """ - - Represent a Root role object. The root role is responsible for - listing the public keys and threshold of all the top-level roles, including - itself. Top-level metadata is rejected if it does not comply with what is - specified by the Root role. - - This Root object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Root is a top-level role and must exist, a default Root object - is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'root' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self, repository_name): - - super(Root, self).__init__() - - self._rolename = 'root' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - tuf.formats.ROLENAME_SCHEMA.check_match(repository_name) - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + ROOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'consistent_snapshot': False, - 'expires': expiration, 'partial_loaded': False} - try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) - - except tuf.exceptions.RoleAlreadyExistsError: - pass - - - - - -class Timestamp(Metadata): - """ - - Represent a Timestamp role object. The timestamp role is responsible for - referencing the latest version of the Snapshot role. Under normal - conditions, it is the only role to be downloaded from a remote repository - without a known file length and hash. An upper length limit is set, though. - Also, its signatures are also verified to be valid according to the Root - role. If invalid metadata can only be downloaded by the client, Root - is the only other role that is downloaded without a known length and hash. - This case may occur if a role's signing keys have been revoked and a newer - Root file is needed to list the updated keys. - - This Timestamp object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Snapshot is a top-level role and must exist, a default Timestamp - object is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'timestamp' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self, repository_name): - - super(Timestamp, self).__init__() - - self._rolename = 'timestamp' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # By default, 'root' metadata is set to expire 1 year from the current - # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + TIMESTAMP_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'expires': expiration, - 'partial_loaded': False} - - try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) - - except tuf.exceptions.RoleAlreadyExistsError: - pass - - - - - -class Snapshot(Metadata): - """ - - Represent a Snapshot role object. The snapshot role is responsible for - referencing the other top-level roles (excluding Timestamp) and all - delegated roles. - - This Snapshot object sub-classes Metadata, so the expected - Metadata operations like adding/removing public keys, signatures, private - keys, and updating metadata attributes (e.g., version and expiration) is - supported. Since Snapshot is a top-level role and must exist, a default - Snapshot object is instantiated when a new Repository object is created. - - >>> - >>> - >>> - - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - tuf.exceptions.FormatError, if the argument is improperly formatted. - - - A 'snapshot' role is added to 'tuf.roledb.py'. - - - None. - """ - - def __init__(self, repository_name): - - super(Snapshot, self).__init__() - - self._rolename = 'snapshot' - self._repository_name = repository_name - - # Is 'repository_name' properly formatted? Otherwise, raise a - # tuf.exceptions.FormatError exception. - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - # By default, 'snapshot' metadata is set to expire 1 week from the current - # time. The expiration may be modified. - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + SNAPSHOT_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'signatures': [], 'version': 0, 'expires': expiration, - 'partial_loaded': False} - - try: - tuf.roledb.add_role(self._rolename, roleinfo, self._repository_name) - - except tuf.exceptions.RoleAlreadyExistsError: - pass - - - - - -class Targets(Metadata): - """ - - Represent a Targets role object. Targets roles include the top-level role - 'targets.json' and all delegated roles (e.g., 'targets/unclaimed/django'). - The expected operations of Targets metadata is included, such as adding - and removing repository target files, making and revoking delegations, and - listing the target files provided by it. - - Adding or removing a delegation causes the attributes of the Targets object - to be updated. That is, if the 'django' Targets object is delegated by - 'targets/unclaimed', a new attribute is added so that the following - code statement is supported: - repository.targets('unclaimed')('django').version = 2 - - Likewise, revoking a delegation causes removal of the delegation attribute. - - This Targets object sub-classes Metadata, so the expected Metadata - operations like adding/removing public keys, signatures, private keys, and - updating metadata attributes (e.g., version and expiration) is supported. - Since Targets is a top-level role and must exist, a default Targets object - (for 'targets.json', not delegated roles) is instantiated when a new - Repository object is created. - - >>> - >>> - >>> - - - targets_directory: - The targets directory of the Repository object. - - rolename: - The rolename of this Targets object. - - roleinfo: - An already populated roleinfo object of 'rolename'. Conformant to - 'tuf.formats.ROLEDB_SCHEMA'. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Modifies the roleinfo of the targets role in 'tuf.roledb', or creates - a default one named 'targets'. - - - None. - """ - - def __init__(self, targets_directory, rolename='targets', roleinfo=None, - parent_targets_object=None, repository_name='default'): - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - securesystemslib.formats.PATH_SCHEMA.check_match(targets_directory) - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - if roleinfo is not None: - tuf.formats.ROLEDB_SCHEMA.check_match(roleinfo) - - super(Targets, self).__init__() - self._targets_directory = targets_directory - self._rolename = rolename - self._target_files = [] - self._delegated_roles = {} - self._parent_targets_object = self - self._repository_name = repository_name - - # Keep a reference to the top-level 'targets' object. Any delegated roles - # that may be created, can be added to and accessed via the top-level - # 'targets' object. - if parent_targets_object is not None: - self._parent_targets_object = parent_targets_object - - # By default, Targets objects are set to expire 3 months from the current - # time. May be later modified. - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - # If 'roleinfo' is not provided, set an initial default. - if roleinfo is None: - roleinfo = {'keyids': [], 'signing_keyids': [], 'threshold': 1, - 'version': 0, 'expires': expiration, - 'signatures': [], 'paths': {}, 'path_hash_prefixes': [], - 'partial_loaded': False, 'delegations': {'keys': {}, - 'roles': []}} - - # Add the new role to the 'tuf.roledb'. - try: - tuf.roledb.add_role(self.rolename, roleinfo, self._repository_name) - - except tuf.exceptions.RoleAlreadyExistsError: - pass - - - - def __call__(self, rolename): - """ - - Allow callable Targets object so that delegated roles may be referenced - by their string rolenames. Rolenames may include characters like '-' and - are not restricted to Python identifiers. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.UnknownRoleError, if 'rolename' has not been - delegated by this Targets object. - - - Modifies the roleinfo of the targets role in 'tuf.roledb'. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if rolename in self._delegated_roles: - return self._delegated_roles[rolename] - - else: - raise tuf.exceptions.UnknownRoleError(repr(rolename) + ' has' - ' not been delegated by ' + repr(self.rolename)) - - - - def add_delegated_role(self, rolename, targets_object): - """ - - Add 'targets_object' to this Targets object's list of known delegated - roles. Specifically, delegated Targets roles should call 'super(Targets, - self).add_delegated_role(...)' so that the top-level 'targets' role - contains a dictionary of all the available roles on the repository. - - - rolename: - The rolename of the delegated role. 'rolename' must be a role - previously delegated by this Targets role. - - targets_object: - A Targets() object. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - Updates the Target object's dictionary of delegated targets. - - - The Targets object of 'rolename'. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if not isinstance(targets_object, Targets): - raise securesystemslib.exceptions.FormatError(repr(targets_object) + ' is' - ' not a Targets object.') - - - if rolename in self._delegated_roles: - logger.debug(repr(rolename) + ' already exists.') - - else: - self._delegated_roles[rolename] = targets_object - - - - def remove_delegated_role(self, rolename): - """ - Remove 'rolename' from this Targets object's list of delegated roles. - This method does not update tuf.roledb and others. - - - rolename: - The rolename of the delegated role to remove. 'rolename' should be a - role previously delegated by this Targets role. - - - securesystemslib.exceptions.FormatError, if the argument is improperly - formatted. - - - Updates the Target object's dictionary of delegated targets. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if any are improperly formatted. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - if rolename not in self._delegated_roles: - logger.debug(repr(rolename) + ' has not been delegated.') - return - - else: - del self._delegated_roles[rolename] - - - - @property - def target_files(self): - """ - - A getter method that returns the target files added thus far to this - Targets object. - - >>> - >>> - >>> - - - None. - - - None. - - - None. - - - None. - """ - - target_files = tuf.roledb.get_roleinfo(self._rolename, - self._repository_name)['paths'] - return target_files - - - - def add_paths(self, paths, child_rolename): - """ - - Add 'paths' to the delegated paths of 'child_rolename'. 'paths' can be a - list of either file paths or glob patterns. The updater client verifies - the target paths specified by child roles, and searches for targets by - visiting these delegated paths. A child role may only provide targets - specifically listed in the delegations field of the delegating role, or a - target that matches a delegated path. - - >>> - >>> - >>> - - - paths: - A list of glob patterns, or file paths, that 'child_rolename' is - trusted to provide. - - child_rolename: - The child delegation that requires an update to its delegated or - trusted paths, as listed in the parent role's delegations (e.g., - 'Django' in 'unclaimed'). - - - securesystemslib.exceptions.FormatError, if a path or glob pattern in - 'paths' is not a string, or if 'child_rolename' is not a formatted - rolename. - - securesystemslib.exceptions.Error, if 'child_rolename' has not been - delegated yet. - - tuf.exceptions.InvalidNameError, if any path in 'paths' does not match - pattern. - - - Modifies this Targets' delegations field. - - - None. - """ - - # Do the argument have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(paths) - tuf.formats.ROLENAME_SCHEMA.check_match(child_rolename) - - # Ensure that 'child_rolename' exists, otherwise it will not have an entry - # in the parent role's delegations field. - if not tuf.roledb.role_exists(child_rolename, self._repository_name): - raise securesystemslib.exceptions.Error(repr(child_rolename) + ' does' - ' not exist.') - - for path in paths: - # Check if the delegated paths or glob patterns are relative and use - # forward slash as a separator or raise an exception. Paths' existence - # on the file system is not verified. If the path is incorrect, - # the targetfile won't be matched successfully during a client update. - self._check_path(path) - - # Get the current role's roleinfo, so that its delegations field can be - # updated. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) - - # Update the delegated paths of 'child_rolename' to add relative paths. - for role in roleinfo['delegations']['roles']: - if role['name'] == child_rolename: - for relative_path in paths: - if relative_path not in role['paths']: - role['paths'].append(relative_path) - - else: - logger.debug(repr(relative_path) + ' is already a delegated path.') - else: - logger.debug(repr(role['name']) + ' does not match child rolename.') - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def add_target(self, filepath, custom=None, fileinfo=None): - """ - - Add a filepath (must be relative to the repository's targets directory) - to the Targets object. - - If 'filepath' has already been added, it will be replaced with any new - file or 'custom' information. - - >>> - >>> - >>> - - - filepath: - The path of the target file. It must be relative to the repository's - targets directory. - - custom: - An optional dictionary providing additional information about the file. - NOTE: if a custom value is passed, the fileinfo parameter must be None. - This parameter will be deprecated in a future release of tuf, use of - the fileinfo parameter is preferred. - - fileinfo: - An optional fileinfo dictionary, conforming to - tuf.formats.TARGETS_FILEINFO_SCHEMA, providing full information about the - file, i.e: - { 'length': 101, - 'hashes': { 'sha256': '123EDF...' }, - 'custom': { 'permissions': '600'} # optional - } - NOTE: if a custom value is passed, the fileinfo parameter must be None. - - - securesystemslib.exceptions.FormatError, if 'filepath' is improperly - formatted. - - tuf.exceptions.InvalidNameError, if 'filepath' does not match pattern. - - - Adds 'filepath' to this role's list of targets. This role's - 'tuf.roledb.py' entry is also updated. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) - - if fileinfo and custom: - raise securesystemslib.exceptions.Error("Can only take one of" - " custom or fileinfo, not both.") - - if fileinfo: - tuf.formats.TARGETS_FILEINFO_SCHEMA.check_match(fileinfo) - - if custom is None: - custom = {} - else: - tuf.formats.CUSTOM_SCHEMA.check_match(custom) - - # Add 'filepath' (i.e., relative to the targets directory) to the role's - # list of targets. 'filepath' will not be verified as an allowed path - # according to some delegating role. Not verifying 'filepath' here allows - # freedom to add targets and parent restrictions in any order, minimize - # the number of times these checks are performed, and allow any role to - # delegate trust of packages to this Targets role. - - # Check if the target is relative and uses forward slash as a separator - # or raise an exception. File's existence on the file system is not - # verified. If the file does not exist relative to the targets directory, - # later calls to write() will fail. - self._check_path(filepath) - - # Update the role's 'tuf.roledb.py' entry and avoid duplicates. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) - - if filepath not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(filepath)) - - else: - logger.debug('Replacing target: ' + repr(filepath)) - - if fileinfo: - roleinfo['paths'].update({filepath: fileinfo}) - else: - roleinfo['paths'].update({filepath: {'custom': custom}}) - - tuf.roledb.update_roleinfo(self._rolename, roleinfo, - repository_name=self._repository_name) - - - - def add_targets(self, list_of_targets): - """ - - Add a list of target filepaths (all relative to 'self.targets_directory'). - This method does not actually create files on the file system. The - list of targets must already exist on disk. - - >>> - >>> - >>> - - - list_of_targets: - A list of target filepaths that are added to the paths of this Targets - object. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - tuf.exceptions.InvalidNameError, if any target in 'list_of_targets' - does not match pattern. - - - This Targets' roleinfo is updated with the paths in 'list_of_targets'. - - - None. - """ - - # Does 'list_of_targets' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) - - # Ensure the paths in 'list_of_targets' are relative and use forward slash - # as a separator or raise an exception. The paths of 'list_of_targets' - # will be verified as existing and allowed paths according to this Targets - # parent role when write() or writeall() is called. Not verifying - # filepaths here allows the freedom to add targets and parent restrictions - # in any order and minimize the number of times these checks are performed. - for target in list_of_targets: - self._check_path(target) - - # Update this Targets 'tuf.roledb.py' entry. - roleinfo = tuf.roledb.get_roleinfo(self._rolename, self._repository_name) - for relative_target in list_of_targets: - if relative_target not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(relative_target)) - else: - logger.debug('Replacing target: ' + repr(relative_target)) - roleinfo['paths'].update({relative_target: {}}) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - def remove_target(self, filepath): - """ - - Remove the target 'filepath' from this Targets' 'paths' field. 'filepath' - is relative to the targets directory. - - >>> - >>> - >>> - - - filepath: - The target to remove from this Targets object, relative to the - repository's targets directory. - - - securesystemslib.exceptions.FormatError, if 'filepath' is improperly - formatted. - - securesystemslib.exceptions.Error, if 'filepath' is not located in the - repository's targets directory, or not found. - - - Modifies this Targets 'tuf.roledb.py' entry. - - - None. - """ - - # Does 'filepath' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. Raise - # 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.RELPATH_SCHEMA.check_match(filepath) - - # Remove 'relative_filepath', if found, and update this Targets roleinfo. - fileinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - if filepath in fileinfo['paths']: - del fileinfo['paths'][filepath] - tuf.roledb.update_roleinfo(self.rolename, fileinfo, - repository_name=self._repository_name) - - else: - raise securesystemslib.exceptions.Error('Target file path not found.') - - - - def clear_targets(self): - """ - - Remove all the target filepaths in the "paths" field of this Targets. - - >>> - >>> - >>> - - - None - - - None. - - - Modifies this Targets' 'tuf.roledb.py' entry. - - - None. - """ - - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - roleinfo['paths'] = {} - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - - - - - def get_delegated_rolenames(self): - """ - - Return all delegations of a role. If ['a/b/', 'a/b/c/', 'a/b/c/d'] have - been delegated by the delegated role 'django', - repository.targets('django').get_delegated_rolenames() returns: ['a/b', - 'a/b/c', 'a/b/c/d']. - - - None. - - - None. - - - None. - - - A list of rolenames. - """ - - return tuf.roledb.get_delegated_rolenames(self.rolename, self._repository_name) - - - - - - def _create_delegated_target(self, rolename, keyids, threshold, paths): - """ - Create a new Targets object for the 'rolename' delegation. An initial - expiration is set (3 months from the current time). - """ - - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + TARGETS_EXPIRATION)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'name': rolename, 'keyids': keyids, 'signing_keyids': [], - 'threshold': threshold, 'version': 0, - 'expires': expiration, 'signatures': [], 'partial_loaded': False, - 'paths': paths, 'delegations': {'keys': {}, 'roles': []}} - - # The new targets object is added as an attribute to this Targets object. - new_targets_object = Targets(self._targets_directory, rolename, roleinfo, - parent_targets_object=self._parent_targets_object, - repository_name=self._repository_name) - - return new_targets_object - - - - - - def _update_roledb_delegations(self, keydict, delegations_roleinfo): - """ - Update the roledb to include delegations of the keys in keydict and the - roles in delegations_roleinfo - """ - - current_roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - current_roleinfo['delegations']['keys'].update(keydict) - - for roleinfo in delegations_roleinfo: - current_roleinfo['delegations']['roles'].append(roleinfo) - - tuf.roledb.update_roleinfo(self.rolename, current_roleinfo, - repository_name=self._repository_name) - - - - - - def delegate(self, rolename, public_keys, paths, threshold=1, - terminating=False, list_of_targets=None, path_hash_prefixes=None): - """ - - Create a new delegation, where 'rolename' is a child delegation of this - Targets object. The keys and roles database is updated, including the - delegations field of this Targets. The delegation of 'rolename' is added - and accessible (i.e., repository.targets(rolename)). - - Actual metadata files are not created, only when repository.writeall() or - repository.write() is called. - - >>> - >>> - >>> - - - rolename: - The name of the delegated role, as in 'django' or 'unclaimed'. - - public_keys: - A list of TUF key objects in 'ANYKEYLIST_SCHEMA' format. The list - may contain any of the supported key types: RSAKEY_SCHEMA, - ED25519KEY_SCHEMA, etc. - - paths: - The paths, or glob patterns, delegated to 'rolename'. Any targets - added to 'rolename', via add_targets() or 'list_of_targets', must - match one of the paths or glob patterns in 'paths'. Apart from the - public keys of 'rolename', the delegated 'paths' is often known and - specified when a delegation is first performed. If the delegator - is unsure of which 'paths' to delegate, 'paths' can be set to ['']. - - threshold: - The threshold number of keys of 'rolename'. - - terminating: - Boolean that indicates whether this role allows the updater client to - continue searching for targets (target files it is trusted to list but - has not yet specified) in other delegations. If 'terminating' is True - and 'updater.target()' does not find 'example_target.tar.gz' in this - role, a 'tuf.exceptions.UnknownTargetError' exception should be raised. - If 'terminating' is False (default), and 'target/other_role' is also - trusted with 'example_target.tar.gz' and has listed it, - updater.target() should backtrack and return the target file specified - by 'target/other_role'. - - list_of_targets: - A list of target filepaths that are added to 'rolename'. - 'list_of_targets' is a list of target filepaths, can be empty, and each - filepath must be located in the repository's targets directory. The - list of targets should also exist at the specified paths, otherwise - non-existent target paths might not be added when the targets file is - written to disk with writeall() or write(). - - path_hash_prefixes: - A list of hash prefixes in - 'tuf.formats.PATH_HASH_PREFIXES_SCHEMA' format, used in - hashed bin delegations. Targets may be located and stored in hashed - bins by calculating the target path's hash prefix. - - - securesystemslib.exceptions.FormatError, if any of the arguments are - improperly formatted. - - securesystemslib.exceptions.Error, if the delegated role already exists. - - tuf.exceptions.InvalidNameError, if any path in 'paths' or target in - 'list_of_targets' does not match pattern. - - - A new Target object is created for 'rolename' that is accessible to the - caller (i.e., targets.). The 'tuf.keydb.py' and - 'tuf.roledb.py' stores are updated with 'public_keys'. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(public_keys) - tuf.formats.RELPATHS_SCHEMA.check_match(paths) - tuf.formats.THRESHOLD_SCHEMA.check_match(threshold) - securesystemslib.formats.BOOLEAN_SCHEMA.check_match(terminating) - - if list_of_targets is not None: - tuf.formats.RELPATHS_SCHEMA.check_match(list_of_targets) - - if path_hash_prefixes is not None: - tuf.formats.PATH_HASH_PREFIXES_SCHEMA.check_match(path_hash_prefixes) - - # Keep track of the valid keyids (added to the new Targets object) and - # their keydicts (added to this Targets delegations). - keyids, keydict = repo_lib.keys_to_keydict(public_keys) - - # Ensure the paths of 'list_of_targets' are located in the repository's - # targets directory. - relative_targetpaths = {} - - if list_of_targets: - for target in list_of_targets: - # Check if the target path is relative or raise an exception. File's - # existence on the file system is not verified. If the file does not - # exist relative to the targets directory, later calls to write() - # will fail. - self._check_path(target) - relative_targetpaths.update({target: {}}) - - for path in paths: - # Check if the delegated paths or glob patterns are relative or - # raise an exception. Paths' existence on the file system is not - # verified. If the path is incorrect, the targetfile won't be matched - # successfully during a client update. - self._check_path(path) - - # The new targets object is added as an attribute to this Targets object. - new_targets_object = self._create_delegated_target(rolename, keyids, - threshold, relative_targetpaths) - - # Update the roleinfo of this role. A ROLE_SCHEMA object requires only - # 'keyids', 'threshold', and 'paths'. - roleinfo = {'name': rolename, - 'keyids': keyids, - 'threshold': threshold, - 'terminating': terminating, - 'paths': list(relative_targetpaths.keys())} - - if paths: - roleinfo['paths'] = paths - - if path_hash_prefixes: - roleinfo['path_hash_prefixes'] = path_hash_prefixes - # A role in a delegations must list either 'path_hash_prefixes' - # or 'paths'. - del roleinfo['paths'] - - # Update the public keys of 'new_targets_object'. - for key in public_keys: - new_targets_object.add_verification_key(key) - - # Add the new delegation to the top-level 'targets' role object (i.e., - # 'repository.targets()'). For example, 'django', which was delegated by - # repository.target('claimed'), is added to 'repository.targets('django')). - if self.rolename != 'targets': - self._parent_targets_object.add_delegated_role(rolename, - new_targets_object) - - # Add 'new_targets_object' to the delegating role object (this object). - self.add_delegated_role(rolename, new_targets_object) - - # Update the 'delegations' field of the current role. - self._update_roledb_delegations(keydict, [roleinfo]) - - - - - - def revoke(self, rolename): - """ - - Revoke this Targets' 'rolename' delegation. Its 'rolename' attribute is - deleted, including the entries in its 'delegations' field and in - 'tuf.roledb'. - - Actual metadata files are not updated, only when repository.write() or - repository.write() is called. - - >>> - >>> - >>> - - - rolename: - The rolename (e.g., 'Django' in 'django') of the child delegation the - parent role (this role) wants to revoke. - - - securesystemslib.exceptions.FormatError, if 'rolename' is improperly - formatted. - - - The delegations dictionary of 'rolename' is modified, and its 'tuf.roledb' - entry is updated. This Targets' 'rolename' delegation attribute is also - deleted. - - - None. - """ - - # Does 'rolename' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - tuf.formats.ROLENAME_SCHEMA.check_match(rolename) - - # Remove 'rolename' from this Target's delegations dict. - roleinfo = tuf.roledb.get_roleinfo(self.rolename, self._repository_name) - - for role in roleinfo['delegations']['roles']: - if role['name'] == rolename: - roleinfo['delegations']['roles'].remove(role) - - tuf.roledb.update_roleinfo(self.rolename, roleinfo, - repository_name=self._repository_name) - - # Remove 'rolename' from 'tuf.roledb.py'. - try: - tuf.roledb.remove_role(rolename, self._repository_name) - # Remove the rolename delegation from the current role. For example, the - # 'django' role is removed from repository.targets('django'). - del self._delegated_roles[rolename] - self._parent_targets_object.remove_delegated_role(rolename) - - except (tuf.exceptions.UnknownRoleError, KeyError): - pass - - - - def delegate_hashed_bins(self, list_of_targets, keys_of_hashed_bins, - number_of_bins=DEFAULT_NUM_BINS): - """ - - Distribute a large number of target files over multiple delegated roles - (hashed bins). The metadata files of delegated roles will be nearly - equal in size (i.e., 'list_of_targets' is uniformly distributed by - calculating the target filepath's hash and determining which bin it should - reside in. The updater client will use "lazy bin walk" to find a target - file's hashed bin destination. The parent role lists a range of path - hash prefixes each hashed bin contains. This method is intended for - repositories with a large number of target files, a way of easily - distributing and managing the metadata that lists the targets, and - minimizing the number of metadata files (and their size) downloaded by - the client. See tuf-spec.txt and the following link for more - information: - http://www.python.org/dev/peps/pep-0458/#metadata-scalability - - >>> - >>> - >>> - - - list_of_targets: - The target filepaths of the targets that should be stored in hashed - bins created (i.e., delegated roles). A repository object's - get_filepaths_in_directory() can generate a list of valid target - paths. - - keys_of_hashed_bins: - The initial public keys of the delegated roles. Public keys may be - later added or removed by calling the usual methods of the delegated - Targets object. For example: - repository.targets('000-003').add_verification_key() - - number_of_bins: - The number of delegated roles, or hashed bins, that should be generated - and contain the target file attributes listed in 'list_of_targets'. - 'number_of_bins' must be a power of 2. Each bin may contain a - range of path hash prefixes (e.g., target filepath digests that range - from [000]... - [003]..., where the series of digits in brackets is - considered the hash prefix). - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - securesystemslib.exceptions.Error, if 'number_of_bins' is not a power of - 2, or one of the targets in 'list_of_targets' is not relative to the - repository's targets directory. - - tuf.exceptions.InvalidNameError, if any target in 'list_of_targets' - does not match pattern. - - - Delegates multiple target roles from the current parent role. - - - None. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATHS_SCHEMA.check_match(list_of_targets) - securesystemslib.formats.ANYKEYLIST_SCHEMA.check_match(keys_of_hashed_bins) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - prefix_length, prefix_count, bin_size = repo_lib.get_bin_numbers(number_of_bins) - - logger.info('Creating hashed bin delegations.\n' + - repr(len(list_of_targets)) + ' total targets.\n' + - repr(number_of_bins) + ' hashed bins.\n' + - repr(prefix_count) + ' total hash prefixes.\n' + - 'Each bin ranges over ' + repr(bin_size) + ' hash prefixes.') - - # Generate a list of bin names, the range of prefixes to be delegated to - # that bin, along with the corresponding full list of target prefixes - # to be delegated to that bin - ordered_roles = [] - for idx in range(0, prefix_count, bin_size): - high = idx + bin_size - 1 - name = repo_lib.create_bin_name(idx, high, prefix_length) - if bin_size == 1: - target_hash_prefixes = [name] - else: - target_hash_prefixes = [] - for idy in range(idx, idx+bin_size): - target_hash_prefixes.append("{prefix:0{len}x}".format(prefix=idy, - len=prefix_length)) - - role = {"name": name, - "target_paths": [], - "target_hash_prefixes": target_hash_prefixes} - ordered_roles.append(role) - - for target_path in list_of_targets: - # Check if the target path is relative or raise an exception. File's - # existence on the file system is not verified. If the file does not - # exist relative to the targets directory, later calls to write() and - # writeall() will fail. - self._check_path(target_path) - - # Determine the hash prefix of 'target_path' by computing the digest of - # its path relative to the targets directory. - # We must hash a target path as it appears in the metadata - hash_prefix = repo_lib.get_target_hash(target_path)[:prefix_length] - ordered_roles[int(hash_prefix, 16) // bin_size]["target_paths"].append(target_path) - - keyids, keydict = repo_lib.keys_to_keydict(keys_of_hashed_bins) - - # A queue of roleinfo's that need to be updated in the roledb - delegated_roleinfos = [] - - for bin_role in ordered_roles: - # TODO: originally we just called self.delegate() for each item in this - # iteration. However, this is *extremely* slow when creating a large - # number of hashed bins, i.e. 16k as is recommended for PyPI usage in - # PEP 458: https://www.python.org/dev/peps/pep-0458/ - # The source of the slowness is the interactions with the roledb, which - # causes several deep copies of roleinfo dictionaries: - # https://github.com/theupdateframework/tuf/issues/1005 - # Once the underlying issues in #1005 are resolved, i.e. some combination - # of the intermediate and long-term fixes, we may simplify here by - # switching back to just calling self.delegate(), but until that time we - # queue roledb interactions and perform all updates to the roledb in one - # operation at the end of the iteration. - - relative_paths = {} - for path in bin_role['target_paths']: - relative_paths.update({path: {}}) - - # Delegate from the "unclaimed" targets role to each 'bin_role' - target = self._create_delegated_target(bin_role['name'], keyids, 1, - relative_paths) - - roleinfo = {'name': bin_role['name'], - 'keyids': keyids, - 'threshold': 1, - 'terminating': False, - 'path_hash_prefixes': bin_role['target_hash_prefixes']} - delegated_roleinfos.append(roleinfo) - - for key in keys_of_hashed_bins: - target.add_verification_key(key) - - # Add the new delegation to the top-level 'targets' role object (i.e., - # 'repository.targets()'). - if self.rolename != 'targets': - self._parent_targets_object.add_delegated_role(bin_role['name'], - target) - - # Add 'new_targets_object' to the 'targets' role object (this object). - self.add_delegated_role(bin_role['name'], target) - logger.debug('Delegated from ' + repr(self.rolename) + ' to ' + repr(bin_role)) - - - self._update_roledb_delegations(keydict, delegated_roleinfos) - - - - - def add_target_to_bin(self, target_filepath, number_of_bins=DEFAULT_NUM_BINS, - fileinfo=None): - """ - - Add the fileinfo of 'target_filepath' to the expected hashed bin, if the - bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath is - located in the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then add the fileinfo - to the expected bin. Example: 'targets/foo.tar.gz' may be added to the - 'targets/unclaimed/58-5f.json' role's list of targets by calling this - method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must be located in the repository's targets directory. - - number_of_bins: - The number of delegated roles, or hashed bins, in use by the repository. - Note: 'number_of_bins' must be a power of 2. - - fileinfo: - An optional fileinfo object, conforming to tuf.formats.TARGETS_FILEINFO_SCHEMA, - providing full information about the file. - - - securesystemslib.exceptions.FormatError, if 'target_filepath' is - improperly formatted. - - securesystemslib.exceptions.Error, if 'target_filepath' cannot be added to - a hashed bin (e.g., an invalid target filepath, or the expected hashed - bin does not exist.) - - - The fileinfo of 'target_filepath' is added to a hashed bin of this Targets - object. - - - The name of the hashed bin that the target was added to. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - # TODO: check target_filepath is sane - - path_hash = repo_lib.get_target_hash(target_filepath) - bin_name = repo_lib.find_bin_for_target_hash(path_hash, number_of_bins) - - # Ensure the Targets object has delegated to hashed bins - if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' - ' a delegated role ' + bin_name) - - self._delegated_roles[bin_name].add_target(target_filepath, - fileinfo=fileinfo) - - return bin_name - - - - def remove_target_from_bin(self, target_filepath, - number_of_bins=DEFAULT_NUM_BINS): - """ - - Remove the fileinfo of 'target_filepath' from the expected hashed bin, if - the bin is available. The hashed bin should have been created by - {targets_role}.delegate_hashed_bins(). Assuming the target filepath is - located in the repository's targets directory, determine the filepath's - hash prefix, locate the expected bin (if any), and then remove the - fileinfo from the expected bin. Example: 'targets/foo.tar.gz' may be - removed from the '58-5f.json' role's list of targets by calling this - method. - - - target_filepath: - The filepath of the target to be added to a hashed bin. The filepath - must be located in the repository's targets directory. - - number_of_bins: - The number of delegated roles, or hashed bins, in use by the repository. - Note: 'number_of_bins' must be a power of 2. - - - securesystemslib.exceptions.FormatError, if 'target_filepath' is - improperly formatted. - - securesystemslib.exceptions.Error, if 'target_filepath' cannot be removed - from a hashed bin (e.g., an invalid target filepath, or the expected - hashed bin does not exist.) - - - The fileinfo of 'target_filepath' is removed from a hashed bin of this - Targets object. - - - The name of the hashed bin that the target was added to. - """ - - # Do the arguments have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(target_filepath) - tuf.formats.NUMBINS_SCHEMA.check_match(number_of_bins) - - # TODO: check target_filepath is sane? - - path_hash = repo_lib.get_target_hash(target_filepath) - bin_name = repo_lib.find_bin_for_target_hash(path_hash, number_of_bins) - - # Ensure the Targets object has delegated to hashed bins - if not self._delegated_roles.get(bin_name, None): - raise securesystemslib.exceptions.Error(self.rolename + ' does not have' - ' a delegated role ' + bin_name) - - self._delegated_roles[bin_name].remove_target(target_filepath) - - return bin_name - - - @property - def delegations(self): - """ - - A getter method that returns the delegations made by this Targets role. - - >>> - >>> - >>> - - - None. - - - tuf.exceptions.UnknownRoleError, if this Targets' rolename - does not exist in 'tuf.roledb'. - - - None. - - - A list containing the Targets objects of this Targets' delegations. - """ - - return list(self._delegated_roles.values()) - - - - - - def _check_path(self, pathname): - """ - - Check if a path matches the definition of a PATHPATTERN or a - TARGETPATH (uses the forward slash (/) as directory separator and - does not start with a directory separator). Checks are performed only - on the path string, without accessing the file system. - - - pathname: - A file path or a glob pattern. - - - securesystemslib.exceptions.FormatError, if 'pathname' is improperly - formatted. - - tuf.exceptions.InvalidNameError, if 'pathname' does not match pattern. - - - None. - """ - - tuf.formats.RELPATH_SCHEMA.check_match(pathname) - - if '\\' in pathname: - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) - + ' does not use the forward slash (/) as directory separator.') - - if pathname.startswith('/'): - raise tuf.exceptions.InvalidNameError('Path ' + repr(pathname) - + ' starts with a directory separator. All paths should be relative' - ' to targets directory.') - - - - -def create_new_repository(repository_directory, repository_name='default', - storage_backend=None, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - """ - - Create a new repository, instantiate barebones metadata for the top-level - roles, and return a Repository object. On disk, create_new_repository() - only creates the directories needed to hold the metadata and targets files. - The repository object returned may be modified to update the newly created - repository. The methods of the returned object may be called to create - actual repository files (e.g., repository.write()). - - - repository_directory: - The directory that will eventually hold the metadata and target files of - the TUF repository. - - repository_name: - The name of the repository. If not supplied, 'rolename' is added to the - 'default' repository. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. When no object is - passed a FilesystemBackend will be instantiated and used. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - The 'repository_directory' is created if it does not exist, including its - metadata and targets sub-directories. - - - A 'tuf.repository_tool.Repository' object. - """ - - # Does 'repository_directory' have the correct format? - # Ensure the arguments have the appropriate number of objects and object - # types, and that all dict keys are properly named. - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() - - # Set the repository, metadata, and targets directories. These directories - # are created if they do not exist. - repository_directory = os.path.abspath(repository_directory) - metadata_directory = None - targets_directory = None - - # Ensure the 'repository_directory' exists - logger.info('Creating ' + repr(repository_directory)) - storage_backend.create_folder(repository_directory) - - # Set the metadata and targets directories. The metadata directory is a - # staged one so that the "live" repository is not affected. The - # staged metadata changes may be moved over to "live" after all updated - # have been completed. - metadata_directory = os.path.join(repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, TARGETS_DIRECTORY_NAME) - - # Ensure the metadata directory exists - logger.info('Creating ' + repr(metadata_directory)) - storage_backend.create_folder(metadata_directory) - - # Ensure the targets directory exists - logger.info('Creating ' + repr(targets_directory)) - storage_backend.create_folder(targets_directory) - - # Create the bare bones repository object, where only the top-level roles - # have been set and contain default values (e.g., Root roles has a threshold - # of 1, expires 1 year into the future, etc.) - repository = Repository(repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name, use_timestamp_length, - use_timestamp_hashes, use_snapshot_length, use_snapshot_hashes) - - return repository - - - - - -def load_repository(repository_directory, repository_name='default', - storage_backend=None, use_timestamp_length=True, use_timestamp_hashes=True, - use_snapshot_length=False, use_snapshot_hashes=False): - """ - - Return a repository object containing the contents of metadata files loaded - from the repository. - - - repository_directory: - The root folder of the repository that contains the metadata and targets - sub-directories. - - repository_name: - The name of the repository. If not supplied, 'default' is used as the - repository name. - - storage_backend: - An object which implements - securesystemslib.storage.StorageBackendInterface. When no object is - passed a FilesystemBackend will be instantiated and used. - - use_timestamp_length: - Whether to include the optional length attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_timestamp_hashes: - Whether to include the optional hashes attribute of the snapshot - metadata file in the timestamp metadata. - Default is True. - - use_snapshot_length: - Whether to include the optional length attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - use_snapshot_hashes: - Whether to include the optional hashes attribute for targets - metadata files in the snapshot metadata. - Default is False to save bandwidth but without losing security - from rollback attacks. - Read more at section 5.6 from the Mercury paper: - https://www.usenix.org/conference/atc17/technical-sessions/presentation/kuppusamy - - - securesystemslib.exceptions.FormatError, if 'repository_directory' or any of - the metadata files are improperly formatted. - - tuf.exceptions.RepositoryError, if the Root role cannot be - found. At a minimum, a repository must contain 'root.json' - - - All the metadata files found in the repository are loaded and their contents - stored in a repository_tool.Repository object. - - - repository_tool.Repository object. - """ - - # Does 'repository_directory' have the correct format? - # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. - securesystemslib.formats.PATH_SCHEMA.check_match(repository_directory) - securesystemslib.formats.NAME_SCHEMA.check_match(repository_name) - - if storage_backend is None: - storage_backend = securesystemslib.storage.FilesystemBackend() - - repository_directory = os.path.abspath(repository_directory) - metadata_directory = os.path.join(repository_directory, - METADATA_STAGED_DIRECTORY_NAME) - targets_directory = os.path.join(repository_directory, TARGETS_DIRECTORY_NAME) - - # The Repository() object loaded (i.e., containing all the metadata roles - # found) and returned. - repository = Repository(repository_directory, metadata_directory, - targets_directory, storage_backend, repository_name, use_timestamp_length, - use_timestamp_hashes, use_snapshot_length, use_snapshot_hashes) - - filenames = repo_lib.get_top_level_metadata_filenames(metadata_directory) - - # The Root file is always available without a version number (a consistent - # snapshot) attached to the filename. Store the 'consistent_snapshot' value - # and read the loaded Root file so that other metadata files may be located. - consistent_snapshot = False - - # Load the metadata of the top-level roles (i.e., Root, Timestamp, Targets, - # and Snapshot). - repository, consistent_snapshot = repo_lib._load_top_level_metadata(repository, - filenames, repository_name) - - delegated_roles_filenames = repo_lib.get_delegated_roles_metadata_filenames( - metadata_directory, consistent_snapshot, storage_backend) - - # Load the delegated targets metadata and their fileinfo. - # The delegated targets roles form a tree/graph which is traversed in a - # breadth-first-search manner starting from 'targets' in order to correctly - # load the delegations hierarchy. - parent_targets_object = repository.targets - - # Keep the next delegations to be loaded in a deque structure which - # has the properties of a list but is designed to have fast appends - # and pops from both ends - delegations = deque() - # A set used to keep the already loaded delegations and avoid an infinite - # loop in case of cycles in the delegations graph - loaded_delegations = set() - - # Top-level roles are already loaded, fetch targets and get its delegations. - # Store the delegations in the form of delegated-delegating role tuples, - # starting from the top-level targets: - # [('role1', 'targets'), ('role2', 'targets'), ... ] - roleinfo = tuf.roledb.get_roleinfo('targets', repository_name) - for role in roleinfo['delegations']['roles']: - delegations.append((role, 'targets')) - - # Traverse the graph by appending the next delegation to the deque and - # 'pop'-ing and loading the left-most element. - while delegations: - delegation_info, delegating_role = delegations.popleft() - - rolename = delegation_info['name'] - if (rolename, delegating_role) in loaded_delegations: - logger.warning('Detected cycle in the delegation graph: ' + - repr(delegating_role) + ' -> ' + - repr(rolename) + - ' is reached more than once.') - continue - - # Instead of adding only rolename to the set, store the already loaded - # delegated-delegating role tuples. This way a delegated role is added - # to each of its delegating roles but when the role is reached twice - # from the same delegating role an infinite loop is avoided. - loaded_delegations.add((rolename, delegating_role)) - - metadata_path = delegated_roles_filenames[rolename] - signable = None - - try: - signable = securesystemslib.util.load_json_file(metadata_path) - - except (securesystemslib.exceptions.Error, ValueError, IOError): - logger.debug('Tried to load metadata with invalid JSON' - ' content: ' + repr(metadata_path)) - continue - - metadata_object = signable['signed'] - - # Extract the metadata attributes of 'metadata_object' and update its - # corresponding roleinfo. - roleinfo = {'name': rolename, - 'signing_keyids': [], - 'signatures': [], - 'partial_loaded': False - } - - roleinfo['signatures'].extend(signable['signatures']) - roleinfo['version'] = metadata_object['version'] - roleinfo['expires'] = metadata_object['expires'] - roleinfo['paths'] = metadata_object['targets'] - roleinfo['delegations'] = metadata_object['delegations'] - roleinfo['threshold'] = delegation_info['threshold'] - roleinfo['keyids'] = delegation_info['keyids'] - - # Generate the Targets object of the delegated role, - # add it to the top-level 'targets' object and to its - # direct delegating role object. - new_targets_object = Targets(targets_directory, rolename, - roleinfo, parent_targets_object=parent_targets_object, - repository_name=repository_name) - - parent_targets_object.add_delegated_role(rolename, - new_targets_object) - if delegating_role != 'targets': - parent_targets_object(delegating_role).add_delegated_role(rolename, - new_targets_object) - - # Append the next level delegations to the deque: - # the 'delegated' role becomes the 'delegating' - for delegation in metadata_object['delegations']['roles']: - delegations.append((delegation, rolename)) - - # Extract the keys specified in the delegations field of the Targets - # role. Add 'key_object' to the list of recognized keys. Keys may be - # shared, so do not raise an exception if 'key_object' has already been - # added. In contrast to the methods that may add duplicate keys, do not - # log a warning here as there may be many such duplicate key warnings. - # The repository maintainer should have also been made aware of the - # duplicate key when it was added. - for key_metadata in six.itervalues(metadata_object['delegations']['keys']): - - # The repo may have used hashing algorithms for the generated keyids - # that doesn't match the client's set of hash algorithms. Make sure - # to only used the repo's selected hashing algorithms. - key_object, keyids = securesystemslib.keys.format_metadata_to_key(key_metadata, - keyid_hash_algorithms=key_metadata['keyid_hash_algorithms']) - try: - for keyid in keyids: # pragma: no branch - key_object['keyid'] = keyid - tuf.keydb.add_key(key_object, keyid=None, - repository_name=repository_name) - - except tuf.exceptions.KeyAlreadyExistsError: - pass - - return repository - - - - - -def dump_signable_metadata(metadata_filepath): - """ - - Dump the "signed" portion of metadata. It is the portion that is normally - signed by the repository tool, which is in canonicalized JSON form. - This function is intended for external tools that wish to independently - sign metadata. - - The normal workflow for this use case is to: - (1) call dump_signable_metadata(metadata_filepath) - (2) sign the output with an external tool - (3) call append_signature(signature, metadata_filepath) - - - metadata_filepath: - The path to the metadata file. For example, - repository/metadata/root.json. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - IOError, if 'metadata_filepath' cannot be opened. - - - None. - - - Metadata content that is normally signed by the repository tool (i.e., the - "signed" portion of a metadata file). - """ - - # Are the argument properly formatted? - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) - - signable = securesystemslib.util.load_json_file(metadata_filepath) - - # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - - return securesystemslib.formats.encode_canonical(signable['signed']) - - - - - -def append_signature(signature, metadata_filepath): - """ - - Append 'signature' to the metadata at 'metadata_filepath'. The signature - is assumed to be valid, and externally generated by signing the output of - dump_signable_metadata(metadata_filepath). This function is intended for - external tools that wish to independently sign metadata. - - The normal workflow for this use case is to: - (1) call dump_signable_metadata(metadata_filepath) - (2) sign the output with an external tool - (3) call append_signature(signature, metadata_filepath) - - - signature: - A TUF signature structure that contains the KEYID, signing method, and - the signature. It conforms to securesystemslib.formats.SIGNATURE_SCHEMA. - - For example: - - { - "keyid": "a0a0f0cf08...", - "method": "ed25519", - "sig": "14f6e6566ec13..." - } - - metadata_filepath: - The path to the metadata file. For example, - repository/metadata/root.json. - - - securesystemslib.exceptions.FormatError, if the arguments are improperly - formatted. - - - 'metadata_filepath' is overwritten. - - - None. - """ - - # Are the arguments properly formatted? - securesystemslib.formats.SIGNATURE_SCHEMA.check_match(signature) - securesystemslib.formats.PATH_SCHEMA.check_match(metadata_filepath) - - signable = securesystemslib.util.load_json_file(metadata_filepath) - - # Is 'signable' a valid metadata file? - tuf.formats.SIGNABLE_SCHEMA.check_match(signable) - - signable['signatures'].append(signature) - - file_object = tempfile.TemporaryFile() - - written_metadata_content = json.dumps(signable, indent=1, - separators=(',', ': '), sort_keys=True).encode('utf-8') - - file_object.write(written_metadata_content) - securesystemslib.util.persist_temp_file(file_object, metadata_filepath) - - - - - -if __name__ == '__main__': - # The interactive sessions of the documentation strings can - # be tested by running repository_tool.py as a standalone module: - # $ python repository_tool.py. - import doctest - doctest.testmod() diff --git a/tuf/scripts/repo.py b/tuf/scripts/repo.py deleted file mode 100755 index da1664a86e..0000000000 --- a/tuf/scripts/repo.py +++ /dev/null @@ -1,1154 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2018, New York University and the TUF contributors -# SPDX-License-Identifier: MIT OR Apache-2.0 - -""" - - repo.py - - - Vladimir Diaz - - - January 2018. - - - See LICENSE-MIT OR LICENSE for licensing information. - - - Provide a command-line interface to create and modify TUF repositories. The - CLI removes the need to write Python code when creating or modifying - repositories, which is the case with repository_tool.py and - developer_tool.py. - - Note: - 'pip install securesystemslib[crypto,pynacl]' is required by the CLI, - which installs the 3rd-party dependencies: cryptography and pynacl. - - - Note: arguments within brackets are optional. - - $ repo.py --init - [--consistent, --bare, --path, --root_pw, --targets_pw, - --snapshot_pw, --timestamp_pw] - $ repo.py --add ... [--path, --recursive] - $ repo.py --remove - $ repo.py --distrust --pubkeys [--role] - $ repo.py --trust --pubkeys [--role] - $ repo.py --sign [--role ] - $ repo.py --key - [--filename - --path , --pw [my_password]] - $ repo.py --delegate --delegatee - --pubkeys - [--role --terminating --threshold - --sign ] - $ repo.py --revoke --delegatee - [--role --sign ] - $ repo.py --verbose <0-5> - $ repo.py --clean [--path] - - - --init: - Create new TUF repository in current working or specified directory. - - --consistent: - Enable consistent snapshots for newly created TUF repository. - - --bare: - Specify creation of bare TUF repository with no key created or set. - - --path: - Choose specified path location of a TUF repository or key(s). - - --role: - Specify top-level role(s) affected by the main command-line option. - - --pubkeys: - Indicate location of key(s) affected by the main command-line option. - - --root_pw: - Set password for encrypting top-level key file of root role. - - --targets_pw: - Set password for encrypting top-level key file of targets role. - - --snapshot_pw: - Set password for encrypting top-level key file of snapshot role. - - --timestamp_pw: - Set password for encrypting top-level key file of timestamp role. - - --add: - Add file specified by to the Targets metadata. - - --recursive: - Include files in subdirectories of specified directory . - - --remove: - Remove target files from Targets metadata matching . - - --distrust: - Discontinue trust of keys located in directory of a role. - - --trust: - Indicate trusted keys located in directory of a role. - - --sign: - Sign metadata of target role(s) with keys in specified directory. - - --key: - Generate cryptographic key of specified type (default: Ed25519). - - --filename: - Specify filename associated with generated top-level key. - - --pw: - Set password for the generated key of specified type . - - --delegate: - Delegate trust of target files from Targets role (or specified - in --role) to --delegatee role with specified . - - --delegatee: - Specify role that is targeted by delegator in --role to sign for - target files matching delegated or in revocation of trust. - - --terminating: - Mark delegation to --delegatee role from delegator as a terminating one. - - --threshold: - Specify signature threshold of --delegatee role as the value . - - --revoke: - Revoke trust of target files from delegated role (--delegatee) - - --verbose: - Set the verbosity level of logging messages. Accepts values 1-5. - - --clean: - Delete repo in current working or specified directory. -""" - -# Help with Python 2+3 compatibility, where the print statement is a function, -# an implicit relative import is invalid, and the '/' operator performs true -# division. Example: print 'hello world' raises a 'SyntaxError' exception. -from __future__ import print_function -from __future__ import absolute_import -from __future__ import division -from __future__ import unicode_literals - -import os -import sys -import logging -import argparse -import shutil -import time -import fnmatch - -import tuf -import tuf.log -import tuf.formats -import tuf.repository_tool as repo_tool - -# 'pip install securesystemslib[crypto,pynacl]' is required for the CLI, -# which installs the cryptography and pynacl. -import securesystemslib -from securesystemslib import interface -import six - - -# See 'log.py' to learn how logging is handled in TUF. -logger = logging.getLogger(__name__) - -repo_tool.disable_console_log_messages() - -PROG_NAME = 'repo.py' - -REPO_DIR = 'tufrepo' -CLIENT_DIR = 'tufclient' -KEYSTORE_DIR = 'tufkeystore' - -ROOT_KEY_NAME = 'root_key' -TARGETS_KEY_NAME = 'targets_key' -SNAPSHOT_KEY_NAME = 'snapshot_key' -TIMESTAMP_KEY_NAME = 'timestamp_key' - -STAGED_METADATA_DIR = 'metadata.staged' -METADATA_DIR = 'metadata' - -# The keytype strings, as expected on the command line. -ED25519_KEYTYPE = 'ed25519' -ECDSA_KEYTYPE = 'ecdsa' -RSA_KEYTYPE = 'rsa' -SUPPORTED_CLI_KEYTYPES = (ECDSA_KEYTYPE, ED25519_KEYTYPE, RSA_KEYTYPE) - -# The supported keytype strings (as they appear in metadata) are listed here -# because they won't necessarily match the key types supported by -# securesystemslib. -SUPPORTED_KEY_TYPES = ('ed25519', 'ecdsa-sha2-nistp256', 'rsa') - -# pylint: disable=protected-access -# ... to allow use of sslib _generate_and_write_*_keypair convenience methods - -def process_command_line_arguments(parsed_arguments): - """ - - Perform the relevant operations on the repo according to the chosen - command-line options. Which functions are executed depends on - 'parsed_arguments'. For instance, the --init and --clean options will - cause the init_repo() and clean_repo() functions to be called. - Multiple operations can be executed in one invocation of the CLI. - - - parsed_arguments: - The parsed arguments returned by argparse.parse_args(). - - - securesystemslib.exceptions.Error, if any of the arguments are - improperly formatted or if any of the argument could not be processed. - - - None. - - - None. - """ - - # Do we have a valid argparse Namespace? - if not isinstance(parsed_arguments, argparse.Namespace): - raise tuf.exceptions.Error('Invalid namespace: ' + repr(parsed_arguments)) - - else: - logger.debug('We have a valid argparse Namespace.') - - # TODO: Make sure the order that the arguments are processed allows for the - # most convenient use of multiple options in one invocation of the CLI. For - # instance, it might be best for --clean to be processed first before --init - # so that a user can do the following: repo.py --clean --init (that is, first - # clear the repo in the current working directory, and then initialize a new - # one. - if parsed_arguments.clean: - clean_repo(parsed_arguments) - - if parsed_arguments.init: - init_repo(parsed_arguments) - - if parsed_arguments.remove: - remove_targets(parsed_arguments) - - if parsed_arguments.add: - add_targets(parsed_arguments) - - if parsed_arguments.distrust: - remove_verification_key(parsed_arguments) - - if parsed_arguments.trust: - add_verification_key(parsed_arguments) - - if parsed_arguments.key: - gen_key(parsed_arguments) - - if parsed_arguments.revoke: - revoke(parsed_arguments) - - if parsed_arguments.delegate: - delegate(parsed_arguments) - - # --sign should be processed last, after the other options, so that metadata - # is signed last after potentially being modified by the other options. - if parsed_arguments.sign: - sign_role(parsed_arguments) - - - -def delegate(parsed_arguments): - - if not parsed_arguments.delegatee: - raise tuf.exceptions.Error( - '--delegatee must be set to perform the delegation.') - - if parsed_arguments.delegatee in ('root', 'snapshot', 'timestamp', 'targets'): - raise tuf.exceptions.Error( - 'Cannot delegate to the top-level role: ' + repr(parsed_arguments.delegatee)) - - if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error( - '--pubkeys must be set to perform the delegation.') - - public_keys = [] - for public_key in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(public_key) - public_keys.append(imported_pubkey) - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - if parsed_arguments.role == 'targets': - repository.targets.delegate(parsed_arguments.delegatee, public_keys, - parsed_arguments.delegate, parsed_arguments.threshold, - parsed_arguments.terminating, list_of_targets=None, - path_hash_prefixes=None) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - - repository.targets.load_signing_key(targets_private) - - # A delegated (non-top-level-Targets) role. - else: - repository.targets(parsed_arguments.role).delegate( - parsed_arguments.delegatee, public_keys, - parsed_arguments.delegate, parsed_arguments.threshold, - parsed_arguments.terminating, list_of_targets=None, - path_hash_prefixes=None) - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def revoke(parsed_arguments): - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - if parsed_arguments.role == 'targets': - repository.targets.revoke(parsed_arguments.delegatee) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - - repository.targets.load_signing_key(targets_private) - - # A non-top-level role. - else: - repository.targets(parsed_arguments.role).revoke(parsed_arguments.delegatee) - - role_privatekey = import_privatekey_from_file(parsed_arguments.sign) - - repository.targets(parsed_arguments.role).load_signing_key(role_privatekey) - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def gen_key(parsed_arguments): - - if parsed_arguments.filename: - parsed_arguments.filename = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, parsed_arguments.filename) - - keypath = None - - keygen_kwargs = { - "password": parsed_arguments.pw, - "filepath": parsed_arguments.filename, - "prompt": (not parsed_arguments.pw) # prompt if no default or passed pw - } - - if parsed_arguments.key not in SUPPORTED_CLI_KEYTYPES: - tuf.exceptions.Error( - 'Invalid key type: ' + repr(parsed_arguments.key) + '. Supported' - ' key types: ' + repr(SUPPORTED_CLI_KEYTYPES)) - - elif parsed_arguments.key == ECDSA_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ecdsa_keypair( - **keygen_kwargs) - - elif parsed_arguments.key == ED25519_KEYTYPE: - keypath = securesystemslib.interface._generate_and_write_ed25519_keypair( - **keygen_kwargs) - - # RSA key.. - else: - keypath = securesystemslib.interface._generate_and_write_rsa_keypair( - **keygen_kwargs) - - - # If a filename is not given, the generated keypair is saved to the current - # working directory. By default, the keypair is written to .pub - # and (private key). - if not parsed_arguments.filename: - privkey_repo_path = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, os.path.basename(keypath)) - pubkey_repo_path = os.path.join(parsed_arguments.path, - KEYSTORE_DIR, os.path.basename(keypath + '.pub')) - - securesystemslib.util.ensure_parent_dir(privkey_repo_path) - securesystemslib.util.ensure_parent_dir(pubkey_repo_path) - - # Move them from the CWD to the repo's keystore. - shutil.move(keypath, privkey_repo_path) - shutil.move(keypath + '.pub', pubkey_repo_path) - - - -def import_privatekey_from_file(keypath, password=None): - # Note: should securesystemslib support this functionality (import any - # privatekey type)? - # If the caller does not provide a password argument, prompt for one. - # Password confirmation is disabled here, which should ideally happen only - # when creating encrypted key files. - if password is None: # pragma: no cover - - # It is safe to specify the full path of 'filepath' in the prompt and not - # worry about leaking sensitive information about the key's location. - # However, care should be taken when including the full path in exceptions - # and log files. - password = securesystemslib.interface.get_password('Enter a password for' - ' the encrypted key (' + interface.TERM_RED + repr(keypath) + interface.TERM_RED + '): ', - confirm=False) - - # Does 'password' have the correct format? - securesystemslib.formats.PASSWORD_SCHEMA.check_match(password) - - # Store the encrypted contents of 'filepath' prior to calling the decryption - # routine. - encrypted_key = None - - with open(keypath, 'rb') as file_object: - encrypted_key = file_object.read().decode('utf-8') - - # Decrypt the loaded key file, calling the 'cryptography' library to generate - # the derived encryption key from 'password'. Raise - # 'securesystemslib.exceptions.CryptoError' if the decryption fails. - try: - key_object = securesystemslib.keys.decrypt_key(encrypted_key, password) - - except securesystemslib.exceptions.CryptoError: - try: - logger.debug( - 'Decryption failed. Attempting to import a private PEM instead.') - key_object = securesystemslib.keys.import_rsakey_from_private_pem( - encrypted_key, 'rsassa-pss-sha256', password) - - except securesystemslib.exceptions.CryptoError as error: - six.raise_from(tuf.exceptions.Error(repr(keypath) + ' cannot be ' - ' imported, possibly because an invalid key file is given or ' - ' the decryption password is incorrect.'), error) - - if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' - ' type: ' + repr(key_object['keytype'] + '.' - ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) - - else: - # Add "keyid_hash_algorithms" so that equal keys with different keyids can - # be associated using supported keyid_hash_algorithms. - key_object['keyid_hash_algorithms'] = securesystemslib.settings.HASH_ALGORITHMS - - return key_object - - - -def import_publickey_from_file(keypath): - - try: - key_metadata = securesystemslib.util.load_json_file(keypath) - - # An RSA public key is saved to disk in PEM format (not JSON), so the - # load_json_file() call above can fail for this reason. Try to potentially - # load the PEM string in keypath if an exception is raised. - except securesystemslib.exceptions.Error: - key_metadata = securesystemslib.interface.import_rsa_publickey_from_file( - keypath) - - key_object, junk = securesystemslib.keys.format_metadata_to_key(key_metadata) - - if key_object['keytype'] not in SUPPORTED_KEY_TYPES: - raise tuf.exceptions.Error('Trying to import an unsupported key' - ' type: ' + repr(key_object['keytype'] + '.' - ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) - - else: - return key_object - - - -def add_verification_key(parsed_arguments): - if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --trust.') - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - for keypath in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(keypath) - - if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') - - elif parsed_arguments.role == 'root': - repository.root.add_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'targets': - repository.targets.add_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.add_verification_key(imported_pubkey) - - # The timestamp role.. - else: - repository.timestamp.add_verification_key(imported_pubkey) - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.write('root', consistent_snapshot=consistent_snapshot, - increment_version_number=False) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def remove_verification_key(parsed_arguments): - if not parsed_arguments.pubkeys: - raise tuf.exceptions.Error('--pubkeys must be given with --distrust.') - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - for keypath in parsed_arguments.pubkeys: - imported_pubkey = import_publickey_from_file(keypath) - - try: - if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error('The given --role is not a top-level role.') - - elif parsed_arguments.role == 'root': - repository.root.remove_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'targets': - repository.targets.remove_verification_key(imported_pubkey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.remove_verification_key(imported_pubkey) - - # The Timestamp key.. - else: - repository.timestamp.remove_verification_key(imported_pubkey) - - # It is assumed remove_verification_key() only raises - # securesystemslib.exceptions.Error and - # securesystemslib.exceptions.FormatError, and the latter is not raised - # because a valid key should have been returned by - # import_publickey_from_file(). - except securesystemslib.exceptions.Error: - print(repr(keypath) + ' is not a trusted key. Skipping.') - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.write('root', consistent_snapshot=consistent_snapshot, - increment_version_number=False) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def sign_role(parsed_arguments): - - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - - for keypath in parsed_arguments.sign: - - role_privatekey = import_privatekey_from_file(keypath) - - if parsed_arguments.role == 'targets': - repository.targets.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'root': - repository.root.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'snapshot': - repository.snapshot.load_signing_key(role_privatekey) - - elif parsed_arguments.role == 'timestamp': - repository.timestamp.load_signing_key(role_privatekey) - - else: - # TODO: repository_tool.py will be refactored to clean up the following - # code, which adds and signs for a non-existent role. - if not tuf.roledb.role_exists(parsed_arguments.role): - - # Load the private key keydb and set the roleinfo in roledb so that - # metadata can be written with repository.write(). - tuf.keydb.remove_key(role_privatekey['keyid'], - repository_name = repository._repository_name) - tuf.keydb.add_key( - role_privatekey, repository_name = repository._repository_name) - - # Set the delegated metadata file to expire in 3 months. - expiration = tuf.formats.unix_timestamp_to_datetime( - int(time.time() + 7889230)) - expiration = expiration.isoformat() + 'Z' - - roleinfo = {'name': parsed_arguments.role, - 'keyids': [role_privatekey['keyid']], - 'signing_keyids': [role_privatekey['keyid']], - 'partial_loaded': False, 'paths': {}, - 'signatures': [], 'version': 1, 'expires': expiration, - 'delegations': {'keys': {}, 'roles': []}} - - tuf.roledb.add_role(parsed_arguments.role, roleinfo, - repository_name=repository._repository_name) - - # Generate the Targets object of --role, and add it to the top-level - # 'targets' object. - new_targets_object = repo_tool.Targets(repository._targets_directory, - parsed_arguments.role, roleinfo, - repository_name=repository._repository_name) - repository.targets._delegated_roles[parsed_arguments.role] = new_targets_object - - else: - repository.targets(parsed_arguments.role).load_signing_key(role_privatekey) - - # Write the Targets metadata now that it's been modified. Once write() is - # called on a role, it is no longer considered "dirty" and the role will not - # be written again if another write() or writeall() were subsequently made. - repository.write(parsed_arguments.role, - consistent_snapshot=consistent_snapshot, increment_version_number=False) - - # Write the updated top-level roles, if any. Also write Snapshot and - # Timestamp to make a new release. Automatically making a new release can be - # disabled via --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def clean_repo(parsed_arguments): - repo_dir = os.path.join(parsed_arguments.path, REPO_DIR) - client_dir = os.path.join(parsed_arguments.path, CLIENT_DIR) - keystore_dir = os.path.join(parsed_arguments.path, KEYSTORE_DIR) - - shutil.rmtree(repo_dir, ignore_errors=True) - shutil.rmtree(client_dir, ignore_errors=True) - shutil.rmtree(keystore_dir, ignore_errors=True) - - - -def write_to_live_repo(parsed_arguments): - staged_meta_directory = os.path.join( - parsed_arguments.path, REPO_DIR, STAGED_METADATA_DIR) - live_meta_directory = os.path.join( - parsed_arguments.path, REPO_DIR, METADATA_DIR) - - shutil.rmtree(live_meta_directory, ignore_errors=True) - shutil.copytree(staged_meta_directory, live_meta_directory) - - - -def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, - repository, custom=None): - """ - (1) Copy 'target_path' to 'repo_targets_path'. - (2) Add 'target_path' to Targets metadata of 'repository'. - """ - - if custom is None: - custom = {} - - if not os.path.exists(target_path): - logger.debug(repr(target_path) + ' does not exist. Skipping.') - - else: - securesystemslib.util.ensure_parent_dir( - os.path.join(repo_targets_path, target_path)) - shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) - - - roleinfo = tuf.roledb.get_roleinfo( - parsed_arguments.role, repository_name=repository._repository_name) - - # It is assumed we have a delegated role, and that the caller has made - # sure to reject top-level roles specified with --role. - if target_path not in roleinfo['paths']: - logger.debug('Adding new target: ' + repr(target_path)) - roleinfo['paths'].update({target_path: custom}) - - else: - logger.debug('Replacing target: ' + repr(target_path)) - roleinfo['paths'].update({target_path: custom}) - - tuf.roledb.update_roleinfo(parsed_arguments.role, roleinfo, - mark_role_as_dirty=True, repository_name=repository._repository_name) - - - -def remove_target_files_from_metadata(parsed_arguments, repository): - - if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): - raise tuf.exceptions.Error( - 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' - ' It must be "targets" or a delegated rolename.') - - else: - # NOTE: The following approach of using tuf.roledb to update the target - # files will be modified in the future when the repository tool's API is - # refactored. - roleinfo = tuf.roledb.get_roleinfo( - parsed_arguments.role, repository._repository_name) - - for glob_pattern in parsed_arguments.remove: - for path in list(six.iterkeys(roleinfo['paths'])): - if fnmatch.fnmatch(path, glob_pattern): - del roleinfo['paths'][path] - - else: - logger.debug('Delegated path ' + repr(path) + ' does not match' - ' given path/glob pattern ' + repr(glob_pattern)) - continue - - tuf.roledb.update_roleinfo( - parsed_arguments.role, roleinfo, mark_role_as_dirty=True, - repository_name=repository._repository_name) - - - -def add_targets(parsed_arguments): - repo_targets_path = os.path.join(parsed_arguments.path, REPO_DIR, 'targets') - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - # Copy the target files in --path to the repo directory, and - # add them to Targets metadata. Make sure to also copy & add files - # in directories (and subdirectories, if --recursive is True). - for target_path in parsed_arguments.add: - if os.path.isdir(target_path): - for sub_target_path in repository.get_filepaths_in_directory( - target_path, parsed_arguments.recursive): - add_target_to_repo(parsed_arguments, sub_target_path, - repo_targets_path, repository) - - else: - add_target_to_repo(parsed_arguments, target_path, - repo_targets_path, repository) - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - - if parsed_arguments.role == 'targets': - # Load the top-level, non-root, keys to make a new release. - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - repository.targets.load_signing_key(targets_private) - - elif parsed_arguments.role not in ('root', 'snapshot', 'timestamp'): - repository.write(parsed_arguments.role, - consistent_snapshot=consistent_snapshot, increment_version_number=True) - return - - # Update the required top-level roles, Snapshot and Timestamp, to make a new - # release. Automatically making a new release can be disabled via - # --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def remove_targets(parsed_arguments): - repository = repo_tool.load_repository( - os.path.join(parsed_arguments.path, REPO_DIR)) - - # Remove target files from the Targets metadata (or the role specified in - # --role) that match the glob patterns specified in --remove. - remove_target_files_from_metadata(parsed_arguments, repository) - - # Examples of how the --pw command-line option is interpreted: - # repo.py --init': parsed_arguments.pw = 'pw' - # repo.py --init --pw my_password: parsed_arguments.pw = 'my_password' - # repo.py --init --pw: The user is prompted for a password, as follows: - if not parsed_arguments.pw: - parsed_arguments.pw = securesystemslib.interface.get_password( - prompt='Enter a password for the top-level role keys: ', confirm=True) - - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - parsed_arguments.targets_pw) - repository.targets.load_signing_key(targets_private) - - # Load the top-level keys for Snapshot and Timestamp to make a new release. - # Automatically making a new release can be disabled via --no_release. - if not parsed_arguments.no_release: - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - consistent_snapshot = tuf.roledb.get_roleinfo('root', - repository._repository_name)['consistent_snapshot'] - repository.writeall(consistent_snapshot=consistent_snapshot) - - # Move staged metadata directory to "live" metadata directory. - write_to_live_repo(parsed_arguments) - - - -def init_repo(parsed_arguments): - """ - Create a repo at the specified location in --path (the current working - directory, by default). Each top-level role has one key, if --bare' is False - (default). - """ - - repo_path = os.path.join(parsed_arguments.path, REPO_DIR) - repository = repo_tool.create_new_repository(repo_path) - - if not parsed_arguments.bare: - set_top_level_keys(repository, parsed_arguments) - repository.writeall(consistent_snapshot=parsed_arguments.consistent) - - else: - repository.write( - 'root', consistent_snapshot=parsed_arguments.consistent) - repository.write('targets', consistent_snapshot=parsed_arguments.consistent) - repository.write('snapshot', consistent_snapshot=parsed_arguments.consistent) - repository.write('timestamp', consistent_snapshot=parsed_arguments.consistent) - - write_to_live_repo(parsed_arguments) - - # Create the client files. The client directory contains the required - # directory structure and metadata files for clients to successfully perform - # an update. - repo_tool.create_tuf_client_directory( - os.path.join(parsed_arguments.path, REPO_DIR), - os.path.join(parsed_arguments.path, CLIENT_DIR, REPO_DIR)) - - - -def set_top_level_keys(repository, parsed_arguments): - """ - Generate, write, and set the top-level keys. 'repository' is modified. - """ - - # Examples of how the --*_pw command-line options are interpreted: - # repo.py --init': parsed_arguments.*_pw = 'pw' - # repo.py --init --*_pw my_pw: parsed_arguments.*_pw = 'my_pw' - # repo.py --init --*_pw: The user is prompted for a password. - - securesystemslib.interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.root_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, ROOT_KEY_NAME), - prompt=(not parsed_arguments.root_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.targets_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), - prompt=(not parsed_arguments.targets_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.snapshot_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), - prompt=(not parsed_arguments.snapshot_pw)) - securesystemslib.interface._generate_and_write_ed25519_keypair( - password=parsed_arguments.timestamp_pw, - filepath=os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), - prompt=(not parsed_arguments.timestamp_pw)) - - # Import the private keys. They are needed to generate the signatures - # included in metadata. - root_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - ROOT_KEY_NAME), parsed_arguments.root_pw) - targets_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TARGETS_KEY_NAME), parsed_arguments.targets_pw) - snapshot_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) - timestamp_private = import_privatekey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) - - # Import the public keys. They are needed so that metadata roles are - # assigned verification keys, which clients need in order to verify the - # signatures created by the corresponding private keys. - root_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - ROOT_KEY_NAME) + '.pub') - targets_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TARGETS_KEY_NAME) + '.pub') - snapshot_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - SNAPSHOT_KEY_NAME) + '.pub') - timestamp_public = import_publickey_from_file( - os.path.join(parsed_arguments.path, KEYSTORE_DIR, - TIMESTAMP_KEY_NAME) + '.pub') - - # Add the verification keys to the top-level roles. - repository.root.add_verification_key(root_public) - repository.targets.add_verification_key(targets_public) - repository.snapshot.add_verification_key(snapshot_public) - repository.timestamp.add_verification_key(timestamp_public) - - # Load the previously imported signing keys for the top-level roles so that - # valid metadata can be written. - repository.root.load_signing_key(root_private) - repository.targets.load_signing_key(targets_private) - repository.snapshot.load_signing_key(snapshot_private) - repository.timestamp.load_signing_key(timestamp_private) - - - -def parse_arguments(): - """ - - Parse the command-line arguments. Also set the logging level, as specified - via the --verbose argument (2, by default). - - Example: - # Create a TUF repository in the current working directory. The - # top-level roles are created, each containing one key. - $ repo.py --init - - $ repo.py --init --bare --consistent --verbose 3 - - If a required argument is unset, a parser error is printed and the script - exits. - - - None. - - - None. - - - Sets the logging level for TUF logging. - - - A tuple ('options.REPOSITORY_PATH', command, command_arguments). 'command' - 'command_arguments' correspond to a repository tool fuction. - """ - - parser = argparse.ArgumentParser( - description='Create or modify a TUF repository.') - - parser.add_argument('-i', '--init', action='store_true', - help='Create a repository. The "tufrepo", "tufkeystore", and' - ' "tufclient" directories are created in the current working' - ' directory, unless --path is specified.') - - parser.add_argument('-p', '--path', nargs='?', default='.', - metavar='', help='Specify a repository path. If used' - ' with --init, the initialized repository is saved to the given' - ' path.') - - parser.add_argument('-b', '--bare', action='store_true', - help='If initializing a repository, neither create nor set keys' - ' for any of the top-level roles. False, by default.') - - parser.add_argument('--no_release', action='store_true', - help='Do not automatically sign Snapshot and Timestamp metadata.' - ' False, by default.') - - parser.add_argument('--consistent', action='store_true', - help='Set consistent snapshots for an initialized repository.' - ' Consistent snapshot is False by default.') - - parser.add_argument('-c', '--clean', type=str, nargs='?', const='.', - metavar='', help='Delete the repo files from the' - ' specified directory. If a directory is not specified, the current' - ' working directory is cleaned.') - - parser.add_argument('-a', '--add', type=str, nargs='+', - metavar='', help='Add one or more target files to the' - ' "targets" role (or the role specified in --role). If a directory' - ' is given, all files in the directory are added.') - - parser.add_argument('--remove', type=str, nargs='+', - metavar='', help='Remove one or more target files from the' - ' "targets" role (or the role specified in --role).') - - parser.add_argument('--role', nargs='?', type=str, const='targets', - default='targets', metavar='', help='Specify a rolename.' - ' The rolename "targets" is used by default.') - - parser.add_argument('-r', '--recursive', action='store_true', - help='By setting -r, any directory specified with --add is processed' - ' recursively. If unset, the default behavior is to not add target' - ' files in subdirectories.') - - parser.add_argument('-k', '--key', type=str, nargs='?', const=ED25519_KEYTYPE, - default=None, choices=[ECDSA_KEYTYPE, ED25519_KEYTYPE, RSA_KEYTYPE], - help='Generate an ECDSA, Ed25519, or RSA key. An Ed25519 key is' - ' created if the key type is unspecified.') - - parser.add_argument('--filename', nargs='?', default=None, const=None, - metavar='', help='Specify a filename. This option can' - ' be used to name a generated key file. The top-level keys should' - ' be named "root_key", "targets_key", "snapshot_key", "timestamp_key."') - - parser.add_argument('--trust', action='store_true', - help='Indicate the trusted key(s) (via --pubkeys) for the role in --role.' - ' This action modifies Root metadata with the trusted key(s).') - - parser.add_argument('--distrust', action='store_true', - help='Discontinue trust of key(s) (via --pubkeys) for the role in --role.' - ' This action modifies Root metadata by removing trusted key(s).') - - parser.add_argument('--sign', nargs='+', type=str, - metavar='', help='Sign the "targets"' - ' metadata (or the one for --role) with the specified key(s).') - - parser.add_argument('--pw', nargs='?', default='pw', metavar='', - help='Specify a password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.' - ' This option can be used with --sign and --key.') - - parser.add_argument('--root_pw', nargs='?', default='pw', metavar='', - help='Specify a Root password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--targets_pw', nargs='?', default='pw', metavar='', - help='Specify a Targets password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--snapshot_pw', nargs='?', default='pw', metavar='', - help='Specify a Snapshot password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('--timestamp_pw', nargs='?', default='pw', metavar='', - help='Specify a Timestamp password. "pw" is used if --pw is unset, or a' - ' password can be entered via a prompt by specifying --pw by itself.') - - parser.add_argument('-d', '--delegate', type=str, nargs='+', - metavar='', help='Delegate trust of target files' - ' from the "targets" role (or --role) to some other role (--delegatee).' - ' The named delegatee is trusted to sign for the target files that' - ' match the glob pattern(s).') - - parser.add_argument('--delegatee', nargs='?', type=str, const=None, - default=None, metavar='', help='Specify the rolename' - ' of the delegated role. Can be used with --delegate.') - - parser.add_argument('-t', '--terminating', action='store_true', - help='Set the terminating flag to True. Can be used with --delegate.') - - parser.add_argument('--threshold', type=int, default=1, metavar='', - help='Set the threshold number of signatures' - ' needed to validate a metadata file. Can be used with --delegate.') - - parser.add_argument('--pubkeys', type=str, nargs='+', - metavar='', help='Specify one or more public keys' - ' for the delegated role. Can be used with --delegate.') - - parser.add_argument('--revoke', action='store_true', - help='Revoke trust of target files from a delegated role.') - - # Add the parser arguments supported by PROG_NAME. - parser.add_argument('-v', '--verbose', type=int, default=2, - choices=range(0, 6), help='Set the verbosity level of logging messages.' - ' The lower the setting, the greater the verbosity. Supported logging' - ' levels: 0=UNSET, 1=DEBUG, 2=INFO, 3=WARNING, 4=ERROR,' - ' 5=CRITICAL') - - # Should we include usage examples in the help output? - - parsed_args = parser.parse_args() - - # Set the logging level. - logging_levels = [logging.NOTSET, logging.DEBUG, - logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL] - - tuf.log.set_log_level(logging_levels[parsed_args.verbose]) - - return parsed_args - - - -if __name__ == '__main__': - - # Parse the arguments and set the logging level. - arguments = parse_arguments() - - # Create or modify the repository depending on the option specified on the - # command line. For example, the following adds the 'foo.bar.gz' to the - # default repository and updates the relevant metadata (i.e., Targets, - # Snapshot, and Timestamp metadata are updated): - # $ repo.py --add foo.bar.gz - - try: - process_command_line_arguments(arguments) - - except (tuf.exceptions.Error) as e: - sys.stderr.write('Error: ' + str(e) + '\n') - sys.exit(1) - - # Successfully created or updated the TUF repository. - sys.exit(0) From f9d20d90dbdf5ba54709f42d7b5b5ec752b09543 Mon Sep 17 00:00:00 2001 From: Teodora Sechkova Date: Tue, 24 Nov 2020 16:37:14 +0200 Subject: [PATCH 3/3] Add a dummy updater test Create a dummy test without using repository_tool and repository_lib functionality. test_api is used as an example for test SetUp and TearDown. Signed-off-by: Teodora Sechkova --- tests/test_updater_no_repo.py | 254 ++++++++++++++++++++++++++++++++++ 1 file changed, 254 insertions(+) create mode 100644 tests/test_updater_no_repo.py diff --git a/tests/test_updater_no_repo.py b/tests/test_updater_no_repo.py new file mode 100644 index 0000000000..ee3f5a9997 --- /dev/null +++ b/tests/test_updater_no_repo.py @@ -0,0 +1,254 @@ +#!/usr/bin/env python + +# Copyright 2012 - 2017, New York University and the TUF contributors +# SPDX-License-Identifier: MIT OR Apache-2.0 + +""" + + test_updater.py + + + Konstantin Andrianov. + + + October 15, 2012. + + March 11, 2014. + Refactored to remove mocked modules and old repository tool dependence, use + exact repositories, and add realistic retrieval of files. -vladimir.v.diaz + + + See LICENSE-MIT OR LICENSE for licensing information. + + + 'test_updater.py' provides a collection of methods that test the public / + non-public methods and functions of 'tuf.client.updater.py'. + + The 'unittest_toolbox.py' module was created to provide additional testing + tools, such as automatically deleting temporary files created in test cases. + For more information, see 'tests/unittest_toolbox.py'. + + + Test cases here should follow a specific order (i.e., independent methods are + tested before dependent methods). More accurately, least dependent methods + are tested before most dependent methods. There is no reason to rewrite or + construct other methods that replicate already-tested methods solely for + testing purposes. This is possible because the 'unittest.TestCase' class + guarantees the order of unit tests. The 'test_something_A' method would + be tested before 'test_something_B'. To ensure the expected order of tests, + a number is placed after 'test' and before methods name like so: + 'test_1_check_directory'. The number is a measure of dependence, where 1 is + less dependent than 2. +""" + +# Help with Python 3 compatibility, where the print statement is a function, an +# implicit relative import is invalid, and the '/' operator performs true +# division. Example: print 'hello world' raises a 'SyntaxError' exception. +from __future__ import print_function +from __future__ import absolute_import +from __future__ import division +from __future__ import unicode_literals + +import os +import time +import shutil +import copy +import tempfile +import logging +import errno +import sys +import unittest + +import tuf +import tuf.exceptions +import tuf.log +import tuf.unittest_toolbox as unittest_toolbox +import tuf.client.updater as updater + +import utils + +import securesystemslib +import six + +from securesystemslib.interface import ( + import_ed25519_publickey_from_file, + import_ed25519_privatekey_from_file, + import_rsa_publickey_from_file, + import_rsa_privatekey_from_file +) + +logger = logging.getLogger(__name__) + + +class TestUpdater(unittest_toolbox.Modified_TestCase): + + @classmethod + def setUpClass(cls): + # Create a temporary directory to store the repository, metadata, and target + # files. 'temporary_directory' must be deleted in TearDownModule() so that + # temporary files are always removed, even when exceptions occur. + cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) + + # Needed because in some tests simple_server.py cannot be found. + # The reason is that the current working directory + # has been changed when executing a subprocess. + cls.SIMPLE_SERVER_PATH = os.path.join(os.getcwd(), 'simple_server.py') + + # Launch a SimpleHTTPServer (serves files in the current directory). + # Test cases will request metadata and target files that have been + # pre-generated in 'tuf/tests/repository_data', which will be served + # by the SimpleHTTPServer launched here. The test cases of 'test_updater.py' + # assume the pre-generated metadata files have a specific structure, such + # as a delegated role 'targets/role1', three target files, five key files, + # etc. + cls.server_process_handler = utils.TestServerProcess(log=logger, + server=cls.SIMPLE_SERVER_PATH) + + + + @classmethod + def tearDownClass(cls): + # Kills the server subprocess and closes the temp file used for logging. + cls.server_process_handler.clean() + + # Remove the temporary repository directory, which should contain all the + # metadata, targets, and key files generated for the test cases + shutil.rmtree(cls.temporary_directory) + + + + def setUp(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.setUp(self) + self.repository_name = 'test_repository1' + + # Copy the original repository files provided in the test folder so that + # any modifications made to repository files are restricted to the copies. + # The 'repository_data' directory is expected to exist in 'tuf.tests/'. + original_repository_files = os.path.join(os.getcwd(), 'repository_data') + temporary_repository_root = \ + self.make_temp_directory(directory=self.temporary_directory) + + # The original repository, keystore, and client directories will be copied + # for each test case. + original_repository = os.path.join(original_repository_files, 'repository') + original_keystore = os.path.join(original_repository_files, 'keystore') + original_client = os.path.join(original_repository_files, 'client') + + # Save references to the often-needed client repository directories. + # Test cases need these references to access metadata and target files. + self.repository_directory = \ + os.path.join(temporary_repository_root, 'repository') + self.keystore_directory = \ + os.path.join(temporary_repository_root, 'keystore') + + self.client_directory = os.path.join(temporary_repository_root, + 'client') + self.client_metadata = os.path.join(self.client_directory, + self.repository_name, 'metadata', 'current') + + # Copy the original 'repository', 'client', and 'keystore' directories + # to the temporary repository the test cases can use. + shutil.copytree(original_repository, self.repository_directory) + shutil.copytree(original_client, self.client_directory) + shutil.copytree(original_keystore, self.keystore_directory) + + # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. + repository_basepath = self.repository_directory[len(os.getcwd()):] + url_prefix = 'http://localhost:' \ + + str(self.server_process_handler.port) + repository_basepath + + # Setting 'tuf.settings.repository_directory' with the temporary client + # directory copied from the original repository files. + tuf.settings.repositories_directory = self.client_directory + + self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix, + 'metadata_path': 'metadata', + 'targets_path': 'targets', + 'confined_target_dirs': ['']}} + + # Creating a repository instance. The test cases will use this client + # updater to refresh metadata, fetch target files, etc. + self.repository_updater = updater.Updater(self.repository_name, + self.repository_mirrors) + + # Metadata role keys are needed by the test cases to make changes to the + # repository (e.g., adding a new target file to 'targets.json' and then + # requesting a refresh()). + self.role_keys = _load_role_keys(self.keystore_directory) + + + + def tearDown(self): + # We are inheriting from custom class. + unittest_toolbox.Modified_TestCase.tearDown(self) + + # Logs stdout and stderr from the sever subprocess. + self.server_process_handler.flush_log() + + + # UNIT TESTS. + def test_refresh(self): + self.repository_updater.refresh() + + + + +def _load_role_keys(keystore_directory): + + # Populating 'self.role_keys' by importing the required public and private + # keys of 'tuf/tests/repository_data/'. The role keys are needed when + # modifying the remote repository used by the test cases in this unit test. + + # The pre-generated key files in 'repository_data/keystore' are all encrypted with + # a 'password' passphrase. + EXPECTED_KEYFILE_PASSWORD = 'password' + + # Store and return the cryptography keys of the top-level roles, including 1 + # delegated role. + role_keys = {} + + root_key_file = os.path.join(keystore_directory, 'root_key') + targets_key_file = os.path.join(keystore_directory, 'targets_key') + snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key') + timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key') + delegation_key_file = os.path.join(keystore_directory, 'delegation_key') + + role_keys = {'root': {}, 'targets': {}, 'snapshot': {}, 'timestamp': {}, + 'role1': {}} + + # Import the top-level and delegated role public keys. + role_keys['root']['public'] = \ + import_rsa_publickey_from_file(root_key_file+'.pub') + role_keys['targets']['public'] = \ + import_ed25519_publickey_from_file(targets_key_file+'.pub') + role_keys['snapshot']['public'] = \ + import_ed25519_publickey_from_file(snapshot_key_file+'.pub') + role_keys['timestamp']['public'] = \ + import_ed25519_publickey_from_file(timestamp_key_file+'.pub') + role_keys['role1']['public'] = \ + import_ed25519_publickey_from_file(delegation_key_file+'.pub') + + # Import the private keys of the top-level and delegated roles. + role_keys['root']['private'] = \ + import_rsa_privatekey_from_file(root_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['targets']['private'] = \ + import_ed25519_privatekey_from_file(targets_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['snapshot']['private'] = \ + import_ed25519_privatekey_from_file(snapshot_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['timestamp']['private'] = \ + import_ed25519_privatekey_from_file(timestamp_key_file, + EXPECTED_KEYFILE_PASSWORD) + role_keys['role1']['private'] = \ + import_ed25519_privatekey_from_file(delegation_key_file, + EXPECTED_KEYFILE_PASSWORD) + + return role_keys + + +if __name__ == '__main__': + utils.configure_test_logging(sys.argv) + unittest.main()