def add_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: raise exceptions.Error('--pubkeys must be given with --trust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) for keypath in parsed_arguments.pubkeys: imported_pubkey = import_publickey_from_file(keypath) if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): raise exceptions.Error('The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.add_verification_key(imported_pubkey) elif parsed_arguments.role == 'targets': repository.targets.add_verification_key(imported_pubkey) elif parsed_arguments.role == 'snapshot': repository.snapshot.add_verification_key(imported_pubkey) # The timestamp role.. else: repository.timestamp.add_verification_key(imported_pubkey) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def remove_target_files_from_metadata(parsed_arguments, repository): if parsed_arguments.role in ('root', 'snapshot', 'timestamp'): raise exceptions.Error( 'Invalid rolename specified: ' + repr(parsed_arguments.role) + '.' ' It must be "targets" or a delegated rolename.') else: # NOTE: The following approach of using roledb to update the target # files will be modified in the future when the repository tool's API is # refactored. roleinfo = roledb.get_roleinfo(parsed_arguments.role, repository._repository_name) for glob_pattern in parsed_arguments.remove: for path in list(six.iterkeys(roleinfo['paths'])): if fnmatch.fnmatch(path, glob_pattern): del roleinfo['paths'][path] else: logger.debug('Delegated path ' + repr(path) + ' does not match' ' given path/glob pattern ' + repr(glob_pattern)) continue roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name)
def add_targets(parsed_arguments): repo_targets_path = os.path.join(parsed_arguments.path, REPO_DIR, 'targets') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) # Copy the target files in --path to the repo directory, and # add them to Targets metadata. Make sure to also copy & add files # in directories (and subdirectories, if --recursive is True). for target_path in parsed_arguments.add: if os.path.isdir(target_path): for sub_target_path in repository.get_filepaths_in_directory( target_path, parsed_arguments.recursive): add_target_to_repo(parsed_arguments, sub_target_path, repo_targets_path, repository) else: add_target_to_repo(parsed_arguments, target_path, repo_targets_path, repository) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] if parsed_arguments.role == 'targets': # Load the top-level, non-root, keys to make a new release. targets_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), parsed_arguments.targets_pw) repository.targets.load_signing_key(targets_private) elif parsed_arguments.role not in ('root', 'snapshot', 'timestamp'): repository.write(parsed_arguments.role, consistent_snapshot=consistent_snapshot, increment_version_number=True) return # Update the required top-level roles, Snapshot and Timestamp, to make a new # release. Automatically making a new release can be disabled via # --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def remove_verification_key(parsed_arguments): if not parsed_arguments.pubkeys: raise exceptions.Error('--pubkeys must be given with --distrust.') repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) for keypath in parsed_arguments.pubkeys: imported_pubkey = import_publickey_from_file(keypath) try: if parsed_arguments.role not in ('root', 'targets', 'snapshot', 'timestamp'): raise exceptions.Error( 'The given --role is not a top-level role.') elif parsed_arguments.role == 'root': repository.root.remove_verification_key(imported_pubkey) elif parsed_arguments.role == 'targets': repository.targets.remove_verification_key(imported_pubkey) elif parsed_arguments.role == 'snapshot': repository.snapshot.remove_verification_key(imported_pubkey) # The Timestamp key.. else: repository.timestamp.remove_verification_key(imported_pubkey) # It is assumed remove_verification_key() only raises # securesystemslib.exceptions.Error and # securesystemslib.exceptions.FormatError, and the latter is not raised # because a valid key should have been returned by # import_publickey_from_file(). except sslib_exceptions.Error: print(repr(keypath) + ' is not a trusted key. Skipping.') consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] repository.write('root', consistent_snapshot=consistent_snapshot, increment_version_number=False) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def revoke(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) if parsed_arguments.role == 'targets': repository.targets.revoke(parsed_arguments.delegatee) targets_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), parsed_arguments.targets_pw) repository.targets.load_signing_key(targets_private) # A non-top-level role. else: repository.targets(parsed_arguments.role).revoke( parsed_arguments.delegatee) role_privatekey = import_privatekey_from_file(parsed_arguments.sign) repository.targets( parsed_arguments.role).load_signing_key(role_privatekey) # Update the required top-level roles, Snapshot and Timestamp, to make a new # release. Automatically making a new release can be disabled via # --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def remove_targets(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) # Remove target files from the Targets metadata (or the role specified in # --role) that match the glob patterns specified in --remove. remove_target_files_from_metadata(parsed_arguments, repository) # Examples of how the --pw command-line option is interpreted: # repo.py --init': parsed_arguments.pw = 'pw' # repo.py --init --pw my_password: parsed_arguments.pw = 'my_password' # repo.py --init --pw: The user is prompted for a password, as follows: if not parsed_arguments.pw: parsed_arguments.pw = sslib_interface.get_password( prompt='Enter a password for the top-level role keys: ', confirm=True) targets_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), parsed_arguments.targets_pw) repository.targets.load_signing_key(targets_private) # Load the top-level keys for Snapshot and Timestamp to make a new release. # Automatically making a new release can be disabled via --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def add_target_to_repo(parsed_arguments, target_path, repo_targets_path, repository, custom=None): """ (1) Copy 'target_path' to 'repo_targets_path'. (2) Add 'target_path' to Targets metadata of 'repository'. """ if custom is None: custom = {} if not os.path.exists(target_path): logger.debug(repr(target_path) + ' does not exist. Skipping.') else: sslib_util.ensure_parent_dir( os.path.join(repo_targets_path, target_path)) shutil.copy(target_path, os.path.join(repo_targets_path, target_path)) roleinfo = roledb.get_roleinfo( parsed_arguments.role, repository_name=repository._repository_name) # It is assumed we have a delegated role, and that the caller has made # sure to reject top-level roles specified with --role. if target_path not in roleinfo['paths']: logger.debug('Adding new target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) else: logger.debug('Replacing target: ' + repr(target_path)) roleinfo['paths'].update({target_path: custom}) roledb.update_roleinfo(parsed_arguments.role, roleinfo, mark_role_as_dirty=True, repository_name=repository._repository_name)
def load_project(project_directory, prefix='', new_targets_location=None, repository_name='default'): """ <Purpose> Return a Project object initialized with the contents of the metadata files loaded from 'project_directory'. <Arguments> project_directory: The path to the project's metadata and configuration file. prefix: The prefix for the metadata, if defined. It will replace the current prefix, by first removing the existing one (saved). new_targets_location: For flat project configurations, project owner might want to reload the project with a new location for the target files. This overwrites the previous path to search for the target files. repository_name: The name of the repository. If not supplied, 'rolename' is added to the 'default' repository. <Exceptions> securesystemslib.exceptions.FormatError, if 'project_directory' or any of the metadata files are improperly formatted. <Side Effects> All the metadata files found in the project are loaded and their contents stored in a libtuf.Repository object. <Returns> A tuf.developer_tool.Project object. """ # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. sslib_formats.PATH_SCHEMA.check_match(project_directory) sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) project_configuration = sslib_util.load_json_file(config_filename) formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) if project_configuration['layout_type'] == 'flat': project_directory, junk = os.path.split(project_directory) targets_directory = project_configuration['targets_location'] if new_targets_location is not None: targets_directory = new_targets_location metadata_directory = os.path.join( project_directory, project_configuration['metadata_location']) new_prefix = None if prefix != '': new_prefix = prefix prefix = project_configuration['prefix'] # Load the project's filename. project_name = project_configuration['project_name'] project_filename = project_name + METADATA_EXTENSION # Create a blank project on the target directory. project = Project(project_name, metadata_directory, targets_directory, prefix, repository_name) project.threshold = project_configuration['threshold'] project.prefix = project_configuration['prefix'] project.layout_type = project_configuration['layout_type'] # Traverse the public keys and add them to the project. keydict = project_configuration['public_keys'] for keyid in keydict: key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) signable = sslib_util.load_json_file(targets_metadata_path) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) targets_metadata = signable['signed'] # Remove the prefix from the metadata. targets_metadata = _strip_prefix_from_targets_metadata( targets_metadata, prefix) for signature in signable['signatures']: project.add_signature(signature) # Update roledb.py containing the loaded project attributes. roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] roleinfo['delegations'] = targets_metadata['delegations'] roleinfo['partial_loaded'] = False # Check if the loaded metadata was partially written and update the # flag in 'roledb.py'. if _metadata_is_partially_loaded(project_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): key_object, junk = format_metadata_to_key(key_metadata) keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} loaded_metadata = [project_name] targets_objects[project_name] = project metadata_directory = os.path.join(project_directory, metadata_directory) if os.path.exists(metadata_directory) and \ os.path.isdir(metadata_directory): for metadata_role in os.listdir(metadata_directory): metadata_path = os.path.join(metadata_directory, metadata_role) metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) # Strip the extension. The roledb does not include an appended '.json' # extension for each role. if metadata_name.endswith(METADATA_EXTENSION): extension_length = len(METADATA_EXTENSION) metadata_name = metadata_name[:-extension_length] else: continue if metadata_name in loaded_metadata: continue signable = None signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. metadata_object = signable['signed'] metadata_object = _strip_prefix_from_targets_metadata( metadata_object, prefix) roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] roleinfo['paths'] = {} for filepath, fileinfo in metadata_object['targets'].items(): roleinfo['paths'].update( {filepath: fileinfo.get('custom', {})}) roleinfo['delegations'] = metadata_object['delegations'] roleinfo['partial_loaded'] = False # If the metadata was partially loaded, update the roleinfo flag. if _metadata_is_partially_loaded(metadata_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. loaded_metadata.append(metadata_name) # Generate the Targets objects of the delegated roles. new_targets_object = Targets(targets_directory, metadata_name, roleinfo, repository_name=repository_name) targets_object = targets_objects[project_name] targets_object._delegated_roles[metadata_name] = new_targets_object # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values( ): key_object, junk = format_metadata_to_key(key_metadata) try: keydb.add_key(key_object, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix return project
def _generate_and_write_metadata(rolename, metadata_filename, write_partial, targets_directory, prefix='', repository_name='default'): """ Non-public function that can generate and write the metadata of the specified 'rolename'. It also increments version numbers if: 1. write_partial==True and the metadata is the first to be written. 2. write_partial=False (i.e., write()), the metadata was not loaded as partially written, and a write_partial is not needed. """ metadata = None # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], False) # Prepend the prefix to the project's filepath to avoid signature errors in # upstream. for element in list(metadata['targets']): junk, relative_target = os.path.split(element) prefixed_path = os.path.join(prefix, relative_target) metadata['targets'][prefixed_path] = metadata['targets'][element] if prefix != '': del (metadata['targets'][element]) signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # Check if the version number of 'rolename' may be automatically incremented, # depending on whether if partial metadata is loaded or if the metadata is # written with write() / write_partial(). # Increment the version number if this is the first partial write. if write_partial: temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, repository_name) temp_signable['signatures'].extend(roleinfo['signatures']) status = sig.get_signature_status(temp_signable, rolename, repository_name) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # non-partial write() else: if sig.verify(signable, rolename, repository_name): metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) if sig.verify(signable, rolename, repository_name) or write_partial: repo_lib._remove_invalid_and_duplicate_signatures( signable, repository_name) storage_backend = sslib_storage.FilesystemBackend() filename = repo_lib.write_metadata_file(signable, metadata_filename, metadata['version'], False, storage_backend) # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) raise sslib_exceptions.Error(message, signable) return signable, filename
def sign_role(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] for keypath in parsed_arguments.sign: role_privatekey = import_privatekey_from_file(keypath) if parsed_arguments.role == 'targets': repository.targets.load_signing_key(role_privatekey) elif parsed_arguments.role == 'root': repository.root.load_signing_key(role_privatekey) elif parsed_arguments.role == 'snapshot': repository.snapshot.load_signing_key(role_privatekey) elif parsed_arguments.role == 'timestamp': repository.timestamp.load_signing_key(role_privatekey) else: # TODO: repository_tool.py will be refactored to clean up the following # code, which adds and signs for a non-existent role. if not roledb.role_exists(parsed_arguments.role): # Load the private key keydb and set the roleinfo in roledb so that # metadata can be written with repository.write(). keydb.remove_key(role_privatekey['keyid'], repository_name=repository._repository_name) keydb.add_key(role_privatekey, repository_name=repository._repository_name) # Set the delegated metadata file to expire in 3 months. expiration = formats.unix_timestamp_to_datetime( int(time.time() + 7889230)) expiration = expiration.isoformat() + 'Z' roleinfo = { 'name': parsed_arguments.role, 'keyids': [role_privatekey['keyid']], 'signing_keyids': [role_privatekey['keyid']], 'partial_loaded': False, 'paths': {}, 'signatures': [], 'version': 1, 'expires': expiration, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(parsed_arguments.role, roleinfo, repository_name=repository._repository_name) # Generate the Targets object of --role, and add it to the top-level # 'targets' object. new_targets_object = repo_tool.Targets( repository._targets_directory, parsed_arguments.role, roleinfo, repository_name=repository._repository_name) repository.targets._delegated_roles[ parsed_arguments.role] = new_targets_object else: repository.targets( parsed_arguments.role).load_signing_key(role_privatekey) # Write the Targets metadata now that it's been modified. Once write() is # called on a role, it is no longer considered "dirty" and the role will not # be written again if another write() or writeall() were subsequently made. repository.write(parsed_arguments.role, consistent_snapshot=consistent_snapshot, increment_version_number=False) # Write the updated top-level roles, if any. Also write Snapshot and # Timestamp to make a new release. Automatically making a new release can be # disabled via --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)
def delegate(parsed_arguments): if not parsed_arguments.delegatee: raise exceptions.Error( '--delegatee must be set to perform the delegation.') if parsed_arguments.delegatee in ('root', 'snapshot', 'timestamp', 'targets'): raise exceptions.Error('Cannot delegate to the top-level role: ' + repr(parsed_arguments.delegatee)) if not parsed_arguments.pubkeys: raise exceptions.Error( '--pubkeys must be set to perform the delegation.') public_keys = [] for public_key in parsed_arguments.pubkeys: imported_pubkey = import_publickey_from_file(public_key) public_keys.append(imported_pubkey) repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) if parsed_arguments.role == 'targets': repository.targets.delegate(parsed_arguments.delegatee, public_keys, parsed_arguments.delegate, parsed_arguments.threshold, parsed_arguments.terminating, list_of_targets=None, path_hash_prefixes=None) targets_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TARGETS_KEY_NAME), parsed_arguments.targets_pw) repository.targets.load_signing_key(targets_private) # A delegated (non-top-level-Targets) role. else: repository.targets(parsed_arguments.role).delegate( parsed_arguments.delegatee, public_keys, parsed_arguments.delegate, parsed_arguments.threshold, parsed_arguments.terminating, list_of_targets=None, path_hash_prefixes=None) # Update the required top-level roles, Snapshot and Timestamp, to make a new # release. Automatically making a new release can be disabled via # --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)