def import_ecdsa_publickey_from_file(filepath): """Imports custom JSON-formatted ecdsa public key from disk. NOTE: The signing scheme is set at key generation (see generate function). Arguments: filepath: The path to read the file from. Raises: FormatError: Argument is malformed. StorageError: Key file cannot be read. Error: Public key is malformed. Returns: An ecdsa public key object conformant with 'ECDSAKEY_SCHEMA'. """ formats.PATH_SCHEMA.check_match(filepath) # Load custom on-disk JSON formatted key and convert to its custom in-memory # dict key representation ecdsa_key_metadata = util.load_json_file(filepath) ecdsa_key, _ = keys.format_metadata_to_key(ecdsa_key_metadata) return ecdsa_key
def import_ed25519_publickey_from_file(filepath): """Imports custom JSON-formatted ed25519 public key from disk. NOTE: The signing scheme is set at key generation (see generate function). Arguments: filepath: The path to read the file from. Raises: FormatError: Argument is malformed. StorageError: Key file cannot be read. Error: Public key is malformed. Returns: An ed25519 public key object conformant with 'ED25519KEY_SCHEMA'. """ formats.PATH_SCHEMA.check_match(filepath) # Load custom on-disk JSON formatted key and convert to its custom in-memory # dict key representation ed25519_key_metadata = util.load_json_file(filepath) ed25519_key, _ = keys.format_metadata_to_key(ed25519_key_metadata) # Check that the generic loading functions indeed loaded an ed25519 key if ed25519_key['keytype'] != 'ed25519': message = 'Invalid key type loaded: ' + repr(ed25519_key['keytype']) raise exceptions.FormatError(message) return ed25519_key
def import_publickey_from_file(keypath): try: key_metadata = sslib_util.load_json_file(keypath) # An RSA public key is saved to disk in PEM format (not JSON), so the # load_json_file() call above can fail for this reason. Try to potentially # load the PEM string in keypath if an exception is raised. except sslib_exceptions.Error: key_metadata = sslib_interface.import_rsa_publickey_from_file(keypath) key_object, junk = sslib_keys.format_metadata_to_key(key_metadata) if key_object['keytype'] not in SUPPORTED_KEY_TYPES: raise exceptions.Error('Trying to import an unsupported key' ' type: ' + repr(key_object['keytype'] + '.' ' Supported key types: ' + repr(SUPPORTED_KEY_TYPES))) else: return key_object
def from_json_file( cls, filename: str, storage_backend: Optional[StorageBackendInterface] = None ) -> 'Metadata': """Loads JSON-formatted TUF metadata from file storage. Arguments: filename: The path to read the file from. storage_backend: An object that implements securesystemslib.storage.StorageBackendInterface. Per default a (local) FilesystemBackend is used. Raises: securesystemslib.exceptions.StorageError: The file cannot be read. securesystemslib.exceptions.Error, ValueError, KeyError: The metadata cannot be parsed. Returns: A TUF Metadata object. """ return cls.from_dict(load_json_file(filename, storage_backend))
def load_project(project_directory, prefix='', new_targets_location=None, repository_name='default'): """ <Purpose> Return a Project object initialized with the contents of the metadata files loaded from 'project_directory'. <Arguments> project_directory: The path to the project's metadata and configuration file. prefix: The prefix for the metadata, if defined. It will replace the current prefix, by first removing the existing one (saved). new_targets_location: For flat project configurations, project owner might want to reload the project with a new location for the target files. This overwrites the previous path to search for the target files. repository_name: The name of the repository. If not supplied, 'rolename' is added to the 'default' repository. <Exceptions> securesystemslib.exceptions.FormatError, if 'project_directory' or any of the metadata files are improperly formatted. <Side Effects> All the metadata files found in the project are loaded and their contents stored in a libtuf.Repository object. <Returns> A tuf.developer_tool.Project object. """ # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. sslib_formats.PATH_SCHEMA.check_match(project_directory) sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) project_configuration = sslib_util.load_json_file(config_filename) formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) if project_configuration['layout_type'] == 'flat': project_directory, junk = os.path.split(project_directory) targets_directory = project_configuration['targets_location'] if new_targets_location is not None: targets_directory = new_targets_location metadata_directory = os.path.join( project_directory, project_configuration['metadata_location']) new_prefix = None if prefix != '': new_prefix = prefix prefix = project_configuration['prefix'] # Load the project's filename. project_name = project_configuration['project_name'] project_filename = project_name + METADATA_EXTENSION # Create a blank project on the target directory. project = Project(project_name, metadata_directory, targets_directory, prefix, repository_name) project.threshold = project_configuration['threshold'] project.prefix = project_configuration['prefix'] project.layout_type = project_configuration['layout_type'] # Traverse the public keys and add them to the project. keydict = project_configuration['public_keys'] for keyid in keydict: key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) signable = sslib_util.load_json_file(targets_metadata_path) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) targets_metadata = signable['signed'] # Remove the prefix from the metadata. targets_metadata = _strip_prefix_from_targets_metadata( targets_metadata, prefix) for signature in signable['signatures']: project.add_signature(signature) # Update roledb.py containing the loaded project attributes. roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] roleinfo['delegations'] = targets_metadata['delegations'] roleinfo['partial_loaded'] = False # Check if the loaded metadata was partially written and update the # flag in 'roledb.py'. if _metadata_is_partially_loaded(project_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): key_object, junk = format_metadata_to_key(key_metadata) keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} loaded_metadata = [project_name] targets_objects[project_name] = project metadata_directory = os.path.join(project_directory, metadata_directory) if os.path.exists(metadata_directory) and \ os.path.isdir(metadata_directory): for metadata_role in os.listdir(metadata_directory): metadata_path = os.path.join(metadata_directory, metadata_role) metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) # Strip the extension. The roledb does not include an appended '.json' # extension for each role. if metadata_name.endswith(METADATA_EXTENSION): extension_length = len(METADATA_EXTENSION) metadata_name = metadata_name[:-extension_length] else: continue if metadata_name in loaded_metadata: continue signable = None signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. metadata_object = signable['signed'] metadata_object = _strip_prefix_from_targets_metadata( metadata_object, prefix) roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] roleinfo['paths'] = {} for filepath, fileinfo in metadata_object['targets'].items(): roleinfo['paths'].update( {filepath: fileinfo.get('custom', {})}) roleinfo['delegations'] = metadata_object['delegations'] roleinfo['partial_loaded'] = False # If the metadata was partially loaded, update the roleinfo flag. if _metadata_is_partially_loaded(metadata_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. loaded_metadata.append(metadata_name) # Generate the Targets objects of the delegated roles. new_targets_object = Targets(targets_directory, metadata_name, roleinfo, repository_name=repository_name) targets_object = targets_objects[project_name] targets_object._delegated_roles[metadata_name] = new_targets_object # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values( ): key_object, junk = format_metadata_to_key(key_metadata) try: keydb.add_key(key_object, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix return project
def read_from_file(self, path: Filepath) -> Metadata: """Return the expected filename based on the rolename.""" return JSONParser.parse(load_json_file(path))