예제 #1
0
def import_ecdsa_publickey_from_file(filepath):
    """Imports custom JSON-formatted ecdsa public key from disk.

  NOTE: The signing scheme is set at key generation (see generate function).

  Arguments:
    filepath: The path to read the file from.

  Raises:
    FormatError: Argument is malformed.
    StorageError: Key file cannot be read.
    Error: Public key is malformed.

  Returns:
    An ecdsa public key object conformant with 'ECDSAKEY_SCHEMA'.

  """
    formats.PATH_SCHEMA.check_match(filepath)

    # Load custom on-disk JSON formatted key and convert to its custom in-memory
    # dict key representation
    ecdsa_key_metadata = util.load_json_file(filepath)
    ecdsa_key, _ = keys.format_metadata_to_key(ecdsa_key_metadata)

    return ecdsa_key
예제 #2
0
def import_ed25519_publickey_from_file(filepath):
    """Imports custom JSON-formatted ed25519 public key from disk.

  NOTE: The signing scheme is set at key generation (see generate function).

  Arguments:
    filepath: The path to read the file from.

  Raises:
    FormatError: Argument is malformed.
    StorageError: Key file cannot be read.
    Error: Public key is malformed.

  Returns:
    An ed25519 public key object conformant with 'ED25519KEY_SCHEMA'.

  """
    formats.PATH_SCHEMA.check_match(filepath)

    # Load custom on-disk JSON formatted key and convert to its custom in-memory
    # dict key representation
    ed25519_key_metadata = util.load_json_file(filepath)
    ed25519_key, _ = keys.format_metadata_to_key(ed25519_key_metadata)

    # Check that the generic loading functions indeed loaded an ed25519 key
    if ed25519_key['keytype'] != 'ed25519':
        message = 'Invalid key type loaded: ' + repr(ed25519_key['keytype'])
        raise exceptions.FormatError(message)

    return ed25519_key
예제 #3
0
def import_publickey_from_file(keypath):

    try:
        key_metadata = sslib_util.load_json_file(keypath)

    # An RSA public key is saved to disk in PEM format (not JSON), so the
    # load_json_file() call above can fail for this reason.  Try to potentially
    # load the PEM string in keypath if an exception is raised.
    except sslib_exceptions.Error:
        key_metadata = sslib_interface.import_rsa_publickey_from_file(keypath)

    key_object, junk = sslib_keys.format_metadata_to_key(key_metadata)

    if key_object['keytype'] not in SUPPORTED_KEY_TYPES:
        raise exceptions.Error('Trying to import an unsupported key'
                               ' type: ' + repr(key_object['keytype'] + '.'
                                                '  Supported key types: ' +
                                                repr(SUPPORTED_KEY_TYPES)))

    else:
        return key_object
예제 #4
0
파일: keydb.py 프로젝트: suryatmodulus/tuf
def create_keydb_from_root_metadata(root_metadata, repository_name='default'):
    """
  <Purpose>
    Populate the key database with the unique keys found in 'root_metadata'.
    The database dictionary will conform to
    'tuf.formats.KEYDB_SCHEMA' and have the form: {keyid: key,
    ...}.  The 'keyid' conforms to 'securesystemslib.formats.KEYID_SCHEMA' and
    'key' to its respective type.  In the case of RSA keys, this object would
    match 'RSAKEY_SCHEMA'.

  <Arguments>
    root_metadata:
      A dictionary conformant to 'tuf.formats.ROOT_SCHEMA'.  The keys found
      in the 'keys' field of 'root_metadata' are needed by this function.

    repository_name:
      The name of the repository to store the key information.  If not supplied,
      the key database is populated for the 'default' repository.

  <Exceptions>
    securesystemslib.exceptions.FormatError, if 'root_metadata' does not have the correct format.

    securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key
    database.

  <Side Effects>
    A function to add the key to the database is called.  In the case of RSA
    keys, this function is add_key().

    The old keydb key database is replaced.

  <Returns>
    None.
  """

    # Does 'root_metadata' have the correct format?
    # This check will ensure 'root_metadata' has the appropriate number of objects
    # and object types, and that all dict keys are properly named.
    # Raise 'securesystemslib.exceptions.FormatError' if the check fails.
    formats.ROOT_SCHEMA.check_match(root_metadata)

    # Does 'repository_name' have the correct format?
    sslib_formats.NAME_SCHEMA.check_match(repository_name)

    # Clear the key database for 'repository_name', or create it if non-existent.
    if repository_name in _keydb_dict:
        _keydb_dict[repository_name].clear()

    else:
        create_keydb(repository_name)

    # Iterate the keys found in 'root_metadata' by converting them to
    # 'RSAKEY_SCHEMA' if their type is 'rsa', and then adding them to the
    # key database using the provided keyid.
    for keyid, key_metadata in six.iteritems(root_metadata['keys']):
        if key_metadata['keytype'] in _SUPPORTED_KEY_TYPES:
            # 'key_metadata' is stored in 'KEY_SCHEMA' format.  Call
            # create_from_metadata_format() to get the key in 'RSAKEY_SCHEMA' format,
            # which is the format expected by 'add_key()'.  Note: This call to
            # format_metadata_to_key() uses the provided keyid as the default keyid.
            # All other keyids returned are ignored.

            key_dict, _ = sslib_keys.format_metadata_to_key(
                key_metadata, keyid)

            # Make sure to update key_dict['keyid'] to use one of the other valid
            # keyids, otherwise add_key() will have no reference to it.
            try:
                add_key(key_dict, repository_name=repository_name)

            # Although keyid duplicates should *not* occur (unique dict keys), log a
            # warning and continue.  However, 'key_dict' may have already been
            # adding to the keydb elsewhere.
            except exceptions.KeyAlreadyExistsError as e:  # pragma: no cover
                logger.warning(e)
                continue

        else:
            logger.warning(
                'Root Metadata file contains a key with an invalid keytype.')
예제 #5
0
def load_project(project_directory,
                 prefix='',
                 new_targets_location=None,
                 repository_name='default'):
    """
  <Purpose>
    Return a Project object initialized with the contents of the metadata
    files loaded from 'project_directory'.

  <Arguments>
    project_directory:
      The path to the project's metadata and configuration file.

    prefix:
      The prefix for the metadata, if defined.  It will replace the current
      prefix, by first removing the existing one (saved).

    new_targets_location:
      For flat project configurations, project owner might want to reload the
      project with a new location for the target files. This overwrites the
      previous path to search for the target files.

    repository_name:
      The name of the repository.  If not supplied, 'rolename' is added to the
      'default' repository.

  <Exceptions>
    securesystemslib.exceptions.FormatError, if 'project_directory' or any of
    the metadata files are improperly formatted.

  <Side Effects>
    All the metadata files found in the project are loaded and their contents
    stored in a libtuf.Repository object.

  <Returns>
    A tuf.developer_tool.Project object.
  """

    # Does 'repository_directory' have the correct format?
    # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch.
    sslib_formats.PATH_SCHEMA.check_match(project_directory)
    sslib_formats.NAME_SCHEMA.check_match(repository_name)

    # Do the same for the prefix
    sslib_formats.ANY_STRING_SCHEMA.check_match(prefix)

    # Clear the role and key databases since we are loading in a new project.
    roledb.clear_roledb(clear_all=True)
    keydb.clear_keydb(clear_all=True)

    # Locate metadata filepaths and targets filepath.
    project_directory = os.path.abspath(project_directory)

    # Load the cfg file and the project.
    config_filename = os.path.join(project_directory, PROJECT_FILENAME)

    project_configuration = sslib_util.load_json_file(config_filename)
    formats.PROJECT_CFG_SCHEMA.check_match(project_configuration)

    targets_directory = os.path.join(project_directory,
                                     project_configuration['targets_location'])

    if project_configuration['layout_type'] == 'flat':
        project_directory, junk = os.path.split(project_directory)
        targets_directory = project_configuration['targets_location']

        if new_targets_location is not None:
            targets_directory = new_targets_location

    metadata_directory = os.path.join(
        project_directory, project_configuration['metadata_location'])

    new_prefix = None

    if prefix != '':
        new_prefix = prefix

    prefix = project_configuration['prefix']

    # Load the project's filename.
    project_name = project_configuration['project_name']
    project_filename = project_name + METADATA_EXTENSION

    # Create a blank project on the target directory.
    project = Project(project_name, metadata_directory, targets_directory,
                      prefix, repository_name)

    project.threshold = project_configuration['threshold']
    project.prefix = project_configuration['prefix']
    project.layout_type = project_configuration['layout_type']

    # Traverse the public keys and add them to the project.
    keydict = project_configuration['public_keys']

    for keyid in keydict:
        key, junk = format_metadata_to_key(keydict[keyid])
        project.add_verification_key(key)

    # Load the project's metadata.
    targets_metadata_path = os.path.join(project_directory, metadata_directory,
                                         project_filename)
    signable = sslib_util.load_json_file(targets_metadata_path)
    try:
        formats.check_signable_object_format(signable)
    except exceptions.UnsignedMetadataError:
        # Downgrade the error to a warning because a use case exists where
        # metadata may be generated unsigned on one machine and signed on another.
        logger.warning('Unsigned metadata object: ' + repr(signable))
    targets_metadata = signable['signed']

    # Remove the prefix from the metadata.
    targets_metadata = _strip_prefix_from_targets_metadata(
        targets_metadata, prefix)
    for signature in signable['signatures']:
        project.add_signature(signature)

    # Update roledb.py containing the loaded project attributes.
    roleinfo = roledb.get_roleinfo(project_name, repository_name)
    roleinfo['signatures'].extend(signable['signatures'])
    roleinfo['version'] = targets_metadata['version']
    roleinfo['paths'] = targets_metadata['targets']
    roleinfo['delegations'] = targets_metadata['delegations']
    roleinfo['partial_loaded'] = False

    # Check if the loaded metadata was partially written and update the
    # flag in 'roledb.py'.
    if _metadata_is_partially_loaded(project_name,
                                     signable,
                                     repository_name=repository_name):
        roleinfo['partial_loaded'] = True

    roledb.update_roleinfo(project_name,
                           roleinfo,
                           mark_role_as_dirty=False,
                           repository_name=repository_name)

    for key_metadata in targets_metadata['delegations']['keys'].values():
        key_object, junk = format_metadata_to_key(key_metadata)
        keydb.add_key(key_object, repository_name=repository_name)

    for role in targets_metadata['delegations']['roles']:
        rolename = role['name']
        roleinfo = {
            'name': role['name'],
            'keyids': role['keyids'],
            'threshold': role['threshold'],
            'signing_keyids': [],
            'signatures': [],
            'partial_loaded': False,
            'delegations': {
                'keys': {},
                'roles': []
            }
        }
        roledb.add_role(rolename, roleinfo, repository_name=repository_name)

    # Load the delegated metadata and generate their fileinfo.
    targets_objects = {}
    loaded_metadata = [project_name]
    targets_objects[project_name] = project
    metadata_directory = os.path.join(project_directory, metadata_directory)

    if os.path.exists(metadata_directory) and \
                      os.path.isdir(metadata_directory):
        for metadata_role in os.listdir(metadata_directory):
            metadata_path = os.path.join(metadata_directory, metadata_role)
            metadata_name = \
              metadata_path[len(metadata_directory):].lstrip(os.path.sep)

            # Strip the extension.  The roledb does not include an appended '.json'
            # extension for each role.
            if metadata_name.endswith(METADATA_EXTENSION):
                extension_length = len(METADATA_EXTENSION)
                metadata_name = metadata_name[:-extension_length]

            else:
                continue

            if metadata_name in loaded_metadata:
                continue

            signable = None
            signable = sslib_util.load_json_file(metadata_path)

            # Strip the prefix from the local working copy, it will be added again
            # when the targets metadata is written to disk.
            metadata_object = signable['signed']
            metadata_object = _strip_prefix_from_targets_metadata(
                metadata_object, prefix)

            roleinfo = roledb.get_roleinfo(metadata_name, repository_name)
            roleinfo['signatures'].extend(signable['signatures'])
            roleinfo['version'] = metadata_object['version']
            roleinfo['expires'] = metadata_object['expires']
            roleinfo['paths'] = {}

            for filepath, fileinfo in metadata_object['targets'].items():
                roleinfo['paths'].update(
                    {filepath: fileinfo.get('custom', {})})
            roleinfo['delegations'] = metadata_object['delegations']
            roleinfo['partial_loaded'] = False

            # If the metadata was partially loaded, update the roleinfo flag.
            if _metadata_is_partially_loaded(metadata_name,
                                             signable,
                                             repository_name=repository_name):
                roleinfo['partial_loaded'] = True

            roledb.update_roleinfo(metadata_name,
                                   roleinfo,
                                   mark_role_as_dirty=False,
                                   repository_name=repository_name)

            # Append to list of elements to avoid reloading repeated metadata.
            loaded_metadata.append(metadata_name)

            # Generate the Targets objects of the delegated roles.
            new_targets_object = Targets(targets_directory,
                                         metadata_name,
                                         roleinfo,
                                         repository_name=repository_name)
            targets_object = targets_objects[project_name]

            targets_object._delegated_roles[metadata_name] = new_targets_object

            # Add the keys specified in the delegations field of the Targets role.
            for key_metadata in metadata_object['delegations']['keys'].values(
            ):
                key_object, junk = format_metadata_to_key(key_metadata)

                try:
                    keydb.add_key(key_object, repository_name=repository_name)

                except exceptions.KeyAlreadyExistsError:
                    pass

            for role in metadata_object['delegations']['roles']:
                rolename = role['name']
                roleinfo = {
                    'name': role['name'],
                    'keyids': role['keyids'],
                    'threshold': role['threshold'],
                    'signing_keyids': [],
                    'signatures': [],
                    'partial_loaded': False,
                    'delegations': {
                        'keys': {},
                        'roles': []
                    }
                }
                roledb.add_role(rolename,
                                roleinfo,
                                repository_name=repository_name)

    if new_prefix:
        project.prefix = new_prefix

    return project