コード例 #1
0
def load_json_string(data):
  """
  <Purpose>
    Deserialize 'data' (JSON string) to a Python object.

  <Arguments>
    data:
      A JSON string.

  <Exceptions>
    securesystemslib.exceptions.Error, if 'data' cannot be deserialized to a
    Python object.

  <Side Effects>
    None.

  <Returns>
    Deserialized object.  For example, a dictionary.
  """

  deserialized_object = None

  try:
    deserialized_object = json.loads(data)

  except TypeError:
    message = 'Invalid JSON string: ' + repr(data)
    raise exceptions.Error(message)

  except ValueError:
    message = 'Cannot deserialize to a Python object: ' + repr(data)
    raise exceptions.Error(message)

  else:
    return deserialized_object
コード例 #2
0
ファイル: developer_tool.py プロジェクト: avelichka/tuf
    def add_verification_key(self, key, expires=None):
        """
      <Purpose>
        Function as a thin wrapper call for the project._targets call
        with the same name. This wrapper is only for usability purposes.

      <Arguments>
        key:
          The role key to be added, conformant to
          'securesystemslib.formats.ANYKEY_SCHEMA'.  Adding a public key to a
          role means that its corresponding private key must generate and add
          its signture to the role.

      <Exceptions>
        securesystemslib.exceptions.FormatError, if the 'key' argument is
        improperly formatted.

        securesystemslib.exceptions.Error, if the project already contains a key.

      <Side Effects>
        The role's entries in 'keydb' and 'roledb' are updated.

      <Returns>
        None
    """

        # Verify that this role does not already contain a key.  The parent project
        # role is restricted to one key.  Any of its delegated roles may have
        # more than one key.
        # TODO: Add condition check for the requirement stated above.
        if len(self.keys) > 0:
            raise sslib_exceptions.Error(
                "This project already contains a key.")

        super(Project, self).add_verification_key(key, expires)
コード例 #3
0
ファイル: log.py プロジェクト: menendezjaume/tuf
def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL):
    """
  <Purpose>
    Allow the default log level for console messages to be overridden.  If
    'log_level' is not provided, log level defaults to 'logging.INFO'.

  <Arguments>
    log_level:
      The log level to set for the console handler.
      'log_level' examples: logging.INFO; logging.CRITICAL.

  <Exceptions>
    securesystemslib.exceptions.Error, if the 'log.py' console handler has not
    been set yet with add_console_handler().

  <Side Effects>
    Overrides the logging level for the console handler.

  <Returns>
    None.
  """

    # Does 'log_level' have the correct format?
    # Raise 'securesystems.exceptions.FormatError' if there is a mismatch.
    sslib_formats.LOGLEVEL_SCHEMA.check_match(log_level)

    # Assign to the global console_handler object.
    global console_handler

    if console_handler is not None:
        console_handler.setLevel(log_level)

    else:
        message = 'The console handler has not been set with add_console_handler().'
        raise sslib_exceptions.Error(message)
コード例 #4
0
def load_json_file(filepath, storage_backend=None):
  """
  <Purpose>
    Deserialize a JSON object from a file containing the object.

  <Arguments>
    filepath:
      Absolute path of JSON file.

    storage_backend:
      An object which implements
      securesystemslib.storage.StorageBackendInterface. When no object is
      passed a FilesystemBackend will be instantiated and used.

  <Exceptions>
    securesystemslib.exceptions.FormatError: If 'filepath' is improperly
    formatted.

    securesystemslib.exceptions.Error: If 'filepath' cannot be deserialized to
    a Python object.

    IOError in case of runtime IO exceptions.

  <Side Effects>
    None.

  <Return>
    Deserialized object.  For example, a dictionary.
  """

  # Making sure that the format of 'filepath' is a path string.
  # securesystemslib.exceptions.FormatError is raised on incorrect format.
  formats.PATH_SCHEMA.check_match(filepath)

  if storage_backend is None:
    storage_backend = FilesystemBackend()

  deserialized_object = None
  with storage_backend.get(filepath) as file_obj:
    raw_data = file_obj.read().decode('utf-8')

    try:
      deserialized_object = json.loads(raw_data)

    except (ValueError, TypeError):
      raise exceptions.Error('Cannot deserialize to a'
          ' Python object: ' + filepath)

    else:
      return deserialized_object
コード例 #5
0
def import_rsa_publickey_from_file(filepath,
                                   scheme='rsassa-pss-sha256',
                                   storage_backend=None):
    """Imports PEM-encoded RSA public key from file storage.

  The expected key format is X.509 SubjectPublicKeyInfo.

  Arguments:
    filepath: The path to read the file from.
    scheme (optional): The signing scheme assigned to the returned key object.
        See RSA_SCHEME_SCHEMA for available signing schemes.
    storage_backend (optional): An object implementing StorageBackendInterface.
        If not passed a default FilesystemBackend will be used.

  Raises:
    UnsupportedLibraryError: pyca/cryptography is not available.
    FormatError: Arguments are malformed.
    StorageError: Key file cannot be read.
    Error: Public key is malformed.

  Returns:
    An RSA public key object conformant with 'RSAKEY_SCHEMA'.

  """
    formats.PATH_SCHEMA.check_match(filepath)
    formats.RSA_SCHEME_SCHEMA.check_match(scheme)

    if storage_backend is None:
        storage_backend = FilesystemBackend()

    with storage_backend.get(filepath) as file_object:
        rsa_pubkey_pem = file_object.read().decode('utf-8')

    # Convert PEM-encoded key to 'RSAKEY_SCHEMA' format
    try:
        rsakey_dict = keys.import_rsakey_from_public_pem(
            rsa_pubkey_pem, scheme)

    except exceptions.FormatError as e:
        raise exceptions.Error('Cannot import improperly formatted'
                               ' PEM file.' + repr(str(e)))

    return rsakey_dict
コード例 #6
0
ファイル: keydb.py プロジェクト: suryatmodulus/tuf
def add_key(key_dict, keyid=None, repository_name='default'):
    """
  <Purpose>
    Add 'rsakey_dict' to the key database while avoiding duplicates.
    If keyid is provided, verify it is the correct keyid for 'rsakey_dict'
    and raise an exception if it is not.

  <Arguments>
    key_dict:
      A dictionary conformant to 'securesystemslib.formats.ANYKEY_SCHEMA'.
      It has the form:

      {'keytype': 'rsa',
       'keyid': keyid,
       'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...',
                  'private': '-----BEGIN RSA PRIVATE KEY----- ...'}}

    keyid:
      An object conformant to 'KEYID_SCHEMA'.  It is used as an identifier
      for RSA keys.

    repository_name:
      The name of the repository to add the key.  If not supplied, the key is
      added to the 'default' repository.

  <Exceptions>
    securesystemslib.exceptions.FormatError, if the arguments do not have the correct format.

    securesystemslib.exceptions.Error, if 'keyid' does not match the keyid for 'rsakey_dict'.

    tuf.exceptions.KeyAlreadyExistsError, if 'rsakey_dict' is found in the key database.

    securesystemslib.exceptions.InvalidNameError, if 'repository_name' does not exist in the key
    database.

  <Side Effects>
    The keydb key database is modified.

  <Returns>
    None.
  """

    # Does 'key_dict' have the correct format?
    # This check will ensure 'key_dict' has the appropriate number of objects
    # and object types, and that all dict keys are properly named.
    # Raise 'securesystemslib.exceptions.FormatError if the check fails.
    sslib_formats.ANYKEY_SCHEMA.check_match(key_dict)

    # Does 'repository_name' have the correct format?
    sslib_formats.NAME_SCHEMA.check_match(repository_name)

    # Does 'keyid' have the correct format?
    if keyid is not None:
        # Raise 'securesystemslib.exceptions.FormatError' if the check fails.
        sslib_formats.KEYID_SCHEMA.check_match(keyid)

        # Check if each keyid found in 'key_dict' matches 'keyid'.
        if keyid != key_dict['keyid']:
            raise sslib_exceptions.Error('Incorrect keyid.  Got ' +
                                         key_dict['keyid'] + ' but expected ' +
                                         keyid)

    # Ensure 'repository_name' is actually set in the key database.
    if repository_name not in _keydb_dict:
        raise sslib_exceptions.InvalidNameError(
            'Repository name does not exist:'
            ' ' + repr(repository_name))

    # Check if the keyid belonging to 'key_dict' is not already
    # available in the key database before returning.
    keyid = key_dict['keyid']
    if keyid in _keydb_dict[repository_name]:
        raise exceptions.KeyAlreadyExistsError('Key: ' + keyid)

    _keydb_dict[repository_name][keyid] = copy.deepcopy(key_dict)
コード例 #7
0
ファイル: developer_tool.py プロジェクト: avelichka/tuf
def _generate_and_write_metadata(rolename,
                                 metadata_filename,
                                 write_partial,
                                 targets_directory,
                                 prefix='',
                                 repository_name='default'):
    """
    Non-public function that can generate and write the metadata of the
    specified 'rolename'.  It also increments version numbers if:

    1.  write_partial==True and the metadata is the first to be written.

    2.  write_partial=False (i.e., write()), the metadata was not loaded as
        partially written, and a write_partial is not needed.
  """

    metadata = None

    # Retrieve the roleinfo of 'rolename' to extract the needed metadata
    # attributes, such as version number, expiration, etc.
    roleinfo = roledb.get_roleinfo(rolename, repository_name)

    metadata = generate_targets_metadata(targets_directory, roleinfo['paths'],
                                         roleinfo['version'],
                                         roleinfo['expires'],
                                         roleinfo['delegations'], False)

    # Prepend the prefix to the project's filepath to avoid signature errors in
    # upstream.
    for element in list(metadata['targets']):
        junk, relative_target = os.path.split(element)
        prefixed_path = os.path.join(prefix, relative_target)
        metadata['targets'][prefixed_path] = metadata['targets'][element]
        if prefix != '':
            del (metadata['targets'][element])

    signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'],
                                      metadata_filename, repository_name)

    # Check if the version number of 'rolename' may be automatically incremented,
    # depending on whether if partial metadata is loaded or if the metadata is
    # written with write() / write_partial().
    # Increment the version number if this is the first partial write.
    if write_partial:
        temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename,
                                               repository_name)
        temp_signable['signatures'].extend(roleinfo['signatures'])
        status = sig.get_signature_status(temp_signable, rolename,
                                          repository_name)
        if len(status['good_sigs']) == 0:
            metadata['version'] = metadata['version'] + 1
            signable = repo_lib.sign_metadata(metadata,
                                              roleinfo['signing_keyids'],
                                              metadata_filename,
                                              repository_name)

    # non-partial write()
    else:
        if sig.verify(signable, rolename, repository_name):
            metadata['version'] = metadata['version'] + 1
            signable = repo_lib.sign_metadata(metadata,
                                              roleinfo['signing_keyids'],
                                              metadata_filename,
                                              repository_name)

    # Write the metadata to file if contains a threshold of signatures.
    signable['signatures'].extend(roleinfo['signatures'])

    if sig.verify(signable, rolename, repository_name) or write_partial:
        repo_lib._remove_invalid_and_duplicate_signatures(
            signable, repository_name)
        storage_backend = sslib_storage.FilesystemBackend()
        filename = repo_lib.write_metadata_file(signable, metadata_filename,
                                                metadata['version'], False,
                                                storage_backend)

    # 'signable' contains an invalid threshold of signatures.
    else:
        message = 'Not enough signatures for ' + repr(metadata_filename)
        raise sslib_exceptions.Error(message, signable)

    return signable, filename
コード例 #8
0
def verify(signable,
           role,
           repository_name='default',
           threshold=None,
           keyids=None):
    """
  <Purpose>
    Verify that 'signable' has a valid threshold of authorized signatures
    identified by unique keyids. The threshold and whether a keyid is
    authorized is determined by querying the 'threshold' and 'keyids' info for
    the passed 'role' in 'roledb'. Both values can be overwritten by
    passing the 'threshold' or 'keyids' arguments.

    NOTE:
    - Signatures with identical authorized keyids only count towards the
      threshold once.
    - Signatures with the same key only count toward the threshold once.

  <Arguments>
    signable:
      A dictionary containing a list of signatures and a 'signed' identifier
      that conforms to SIGNABLE_SCHEMA, e.g.:
      signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]}

    role:
      TUF role string (e.g. 'root', 'targets', 'snapshot' or timestamp).

    threshold:
      Rather than reference the role's threshold as set in roledb, use
      the given 'threshold' to calculate the signature status of 'signable'.
      'threshold' is an integer value that sets the role's threshold value, or
      the minimum number of signatures needed for metadata to be considered
      fully signed.

    keyids:
      Similar to the 'threshold' argument, use the supplied list of 'keyids'
      to calculate the signature status, instead of referencing the keyids
      in roledb for 'role'.

  <Exceptions>
    tuf.exceptions.UnknownRoleError, if 'role' is not recognized.

    securesystemslib.exceptions.FormatError, if 'signable' is not formatted
    correctly.

    securesystemslib.exceptions.Error, if an invalid threshold is encountered.

  <Side Effects>
    tuf.sig.get_signature_status() called.  Any exceptions thrown by
    get_signature_status() will be caught here and re-raised.

  <Returns>
    Boolean.  True if the number of good unique (by keyid) signatures >= the
    role's threshold, False otherwise.
  """

    formats.SIGNABLE_SCHEMA.check_match(signable)
    formats.ROLENAME_SCHEMA.check_match(role)
    sslib_formats.NAME_SCHEMA.check_match(repository_name)

    # Retrieve the signature status.  tuf.sig.get_signature_status() raises:
    # tuf.exceptions.UnknownRoleError
    # securesystemslib.exceptions.FormatError.  'threshold' and 'keyids' are also
    # validated.
    status = get_signature_status(signable, role, repository_name, threshold,
                                  keyids)

    # Retrieve the role's threshold and the authorized keys of 'status'
    threshold = status['threshold']
    good_sigs = status['good_sigs']

    # Does 'status' have the required threshold of signatures?
    # First check for invalid threshold values before returning result.
    # Note: get_signature_status() is expected to verify that 'threshold' is
    # not None or <= 0.
    if threshold is None or threshold <= 0:  #pragma: no cover
        raise sslib_exceptions.Error("Invalid threshold: " + repr(threshold))

    unique_keys = set()
    for keyid in good_sigs:
        key = keydb.get_key(keyid, repository_name)
        unique_keys.add(key['keyval']['public'])

    return len(unique_keys) >= threshold
コード例 #9
0
def get_list_of_mirrors(file_type, file_path, mirrors_dict):
    """
  <Purpose>
    Get a list of mirror urls from a mirrors dictionary, provided the type
    and the path of the file with respect to the base url.

  <Arguments>
    file_type:
      Type of data needed for download, must correspond to one of the strings
      in the list ['meta', 'target'].  'meta' for metadata file type or
      'target' for target file type.  It should correspond to
      NAME_SCHEMA format.

    file_path:
      A relative path to the file that corresponds to RELPATH_SCHEMA format.
      Ex: 'http://url_prefix/targets_path/file_path'

    mirrors_dict:
      A mirrors_dict object that corresponds to MIRRORDICT_SCHEMA, where
      keys are strings and values are MIRROR_SCHEMA. An example format
      of MIRROR_SCHEMA:

      {'url_prefix': 'http://localhost:8001',
       'metadata_path': 'metadata/',
       'targets_path': 'targets/',
       'confined_target_dirs': ['targets/snapshot1/', ...],
       'custom': {...}}

      The 'custom' field is optional.

  <Exceptions>
    securesystemslib.exceptions.Error, on unsupported 'file_type'.

    securesystemslib.exceptions.FormatError, on bad argument.

  <Return>
    List of mirror urls corresponding to the file_type and file_path.  If no
    match is found, empty list is returned.
  """

    # Checking if all the arguments have appropriate format.
    formats.RELPATH_SCHEMA.check_match(file_path)
    formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict)
    sslib_formats.NAME_SCHEMA.check_match(file_type)

    # Verify 'file_type' is supported.
    if file_type not in _SUPPORTED_FILE_TYPES:
        raise sslib_exceptions.Error('Invalid file_type argument.'
                                     '  Supported file types: ' +
                                     repr(_SUPPORTED_FILE_TYPES))
    path_key = 'metadata_path' if file_type == 'meta' else 'targets_path'

    list_of_mirrors = []
    for junk, mirror_info in mirrors_dict.items():
        # Does mirror serve this file type at all?
        path = mirror_info.get(path_key)
        if path is None:
            continue

        # for targets, ensure directory confinement
        if path_key == 'targets_path':
            full_filepath = os.path.join(path, file_path)
            confined_target_dirs = mirror_info.get('confined_target_dirs')
            # confined_target_dirs is optional and can used to confine the client to
            # certain paths on a repository mirror when fetching target files.
            if confined_target_dirs and not file_in_confined_directories(
                    full_filepath, confined_target_dirs):
                continue

        # parse.quote(string) replaces special characters in string using the %xx
        # escape.  This is done to avoid parsing issues of the URL on the server
        # side. Do *NOT* pass URLs with Unicode characters without first encoding
        # the URL as UTF-8. We need a long-term solution with #61.
        # http://bugs.python.org/issue1712522
        file_path = parse.quote(file_path)
        url = os.path.join(mirror_info['url_prefix'], path, file_path)

        # The above os.path.join() result as well as input file_path may be
        # invalid on windows (might contain both separator types), see #1077.
        # Make sure the URL doesn't contain backward slashes on Windows.
        list_of_mirrors.append(url.replace('\\', '/'))

    return list_of_mirrors