def remove_target_files_from_metadata(parsed_arguments, repository): if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): raise exceptions.Error( 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' ' It must be "targets" or a delegated rolename.') else: # NOTE: The following approach of using roledb to update the target # files will be modified in the future when the repository tool's API is # refactored. roleinfo = roledb.get_roleinfo(parsed_arguments.role, repository._repository_name) for glob_pattern in parsed_arguments.remove: for path in list(six.iterkeys(roleinfo['paths'])): if fnmatch.fnmatch(path, glob_pattern): del roleinfo['paths'][path] else: logger.debug('Delegated path ' + repr(path) + ' does not match' ' given path/glob pattern ' + repr(glob_pattern)) continue roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name)
def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, repository, custom=None): """ (1) Copy 'target_path' to 'repo_targets_path'. (2) Add 'target_path' to Targets metadata of 'repository'. """ if custom is None: custom = {} if not os.path.exists(target_path): logger.debug(repr(target_path) + ' does not exist. Skipping.') else: sslib_util.ensure_parent_dir( os.path.join(repo_targets_path, target_path)) shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository_name=repository._repository_name) # It is assumed we have a delegated role, and that the caller has made # sure to reject top-level roles specified with --role. if target_path not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) else: logger.debug('Replacing target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name)
def load_project(project_directory, prefix='', new_targets_location=None, repository_name='default'): """ <Purpose> Return a Project object initialized with the contents of the metadata files loaded from 'project_directory'. <Arguments> project_directory: The path to the project's metadata and configuration file. prefix: The prefix for the metadata, if defined. It will replace the current prefix, by first removing the existing one (saved). new_targets_location: For flat project configurations, project owner might want to reload the project with a new location for the target files. This overwrites the previous path to search for the target files. repository_name: The name of the repository. If not supplied, 'rolename' is added to the 'default' repository. <Exceptions> securesystemslib.exceptions.FormatError, if 'project_directory' or any of the metadata files are improperly formatted. <Side Effects> All the metadata files found in the project are loaded and their contents stored in a libtuf.Repository object. <Returns> A tuf.developer_tool.Project object. """ # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. sslib_formats.PATH_SCHEMA.check_match(project_directory) sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) project_configuration = sslib_util.load_json_file(config_filename) formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) if project_configuration['layout_type'] == 'flat': project_directory, junk = os.path.split(project_directory) targets_directory = project_configuration['targets_location'] if new_targets_location is not None: targets_directory = new_targets_location metadata_directory = os.path.join( project_directory, project_configuration['metadata_location']) new_prefix = None if prefix != '': new_prefix = prefix prefix = project_configuration['prefix'] # Load the project's filename. project_name = project_configuration['project_name'] project_filename = project_name + METADATA_EXTENSION # Create a blank project on the target directory. project = Project(project_name, metadata_directory, targets_directory, prefix, repository_name) project.threshold = project_configuration['threshold'] project.prefix = project_configuration['prefix'] project.layout_type = project_configuration['layout_type'] # Traverse the public keys and add them to the project. keydict = project_configuration['public_keys'] for keyid in keydict: key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) signable = sslib_util.load_json_file(targets_metadata_path) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) targets_metadata = signable['signed'] # Remove the prefix from the metadata. targets_metadata = _strip_prefix_from_targets_metadata( targets_metadata, prefix) for signature in signable['signatures']: project.add_signature(signature) # Update roledb.py containing the loaded project attributes. roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] roleinfo['delegations'] = targets_metadata['delegations'] roleinfo['partial_loaded'] = False # Check if the loaded metadata was partially written and update the # flag in 'roledb.py'. if _metadata_is_partially_loaded(project_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): key_object, junk = format_metadata_to_key(key_metadata) keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} loaded_metadata = [project_name] targets_objects[project_name] = project metadata_directory = os.path.join(project_directory, metadata_directory) if os.path.exists(metadata_directory) and \ os.path.isdir(metadata_directory): for metadata_role in os.listdir(metadata_directory): metadata_path = os.path.join(metadata_directory, metadata_role) metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) # Strip the extension. The roledb does not include an appended '.json' # extension for each role. if metadata_name.endswith(METADATA_EXTENSION): extension_length = len(METADATA_EXTENSION) metadata_name = metadata_name[:-extension_length] else: continue if metadata_name in loaded_metadata: continue signable = None signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. metadata_object = signable['signed'] metadata_object = _strip_prefix_from_targets_metadata( metadata_object, prefix) roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] roleinfo['paths'] = {} for filepath, fileinfo in metadata_object['targets'].items(): roleinfo['paths'].update( {filepath: fileinfo.get('custom', {})}) roleinfo['delegations'] = metadata_object['delegations'] roleinfo['partial_loaded'] = False # If the metadata was partially loaded, update the roleinfo flag. if _metadata_is_partially_loaded(metadata_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. loaded_metadata.append(metadata_name) # Generate the Targets objects of the delegated roles. new_targets_object = Targets(targets_directory, metadata_name, roleinfo, repository_name=repository_name) targets_object = targets_objects[project_name] targets_object._delegated_roles[metadata_name] = new_targets_object # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values( ): key_object, junk = format_metadata_to_key(key_metadata) try: keydb.add_key(key_object, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix return project