Пример #1
0
    def test_with_tuf_mode_1(self):
        # Simulate a slow retrieval attack.
        # 'mode_1': When download begins,the server blocks the download for a long
        # time by doing nothing before it sends the first byte of data.

        server_process = self._start_slow_server('mode_1')

        # Verify that the TUF client detects replayed metadata and refuses to
        # continue the update process.
        client_filepath = os.path.join(self.client_directory, 'file1.txt')
        try:
            file1_target = self.repository_updater.target('file1.txt')
            self.repository_updater.download_target(file1_target,
                                                    self.client_directory)

        # Verify that the specific 'tuf.SlowRetrievalError' exception is raised by
        # each mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'targets', 'file1.txt')

                # Verify that 'file1.txt' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(
                    isinstance(mirror_error, tuf.DownloadLengthMismatchError))

        else:
            self.fail('TUF did not prevent a slow retrieval attack.')

        finally:
            self._stop_slow_server(server_process)
Пример #2
0
  def check_match(self, object):
    if not isinstance(object, dict): 
      raise tuf.FormatError('Expected a dict but got '+repr(object))

    for key, value in six.iteritems(object):
      self._key_schema.check_match(key)
      self._value_schema.check_match(value)
Пример #3
0
def create_roledb_from_root_metadata(root_metadata):
    """
  <Purpose>
    Create a role database containing all of the unique roles found in
    'root_metadata'.

  <Arguments>
    root_metadata:
      A dictionary conformant to 'tuf.formats.ROOT_SCHEMA'.  The roles found
      in the 'roles' field of 'root_metadata' is needed by this function.  

  <Exceptions>
    tuf.FormatError, if 'root_metadata' does not have the correct object format.

    tuf.Error, if one of the roles found in 'root_metadata' contains an invalid
    delegation (i.e., a nonexistent parent role).

  <Side Effects>
    Calls add_role().
    
    The old role database is replaced.

  <Returns>
    None.
  """

    # Does 'root_metadata' have the correct object format?
    # This check will ensure 'root_metadata' has the appropriate number of objects
    # and object types, and that all dict keys are properly named.
    # Raises tuf.FormatError.
    tuf.formats.ROOT_SCHEMA.check_match(root_metadata)

    # Clear the role database.
    _roledb_dict.clear()

    # Do not modify the contents of the 'root_metadata' argument.
    root_metadata = copy.deepcopy(root_metadata)

    # Iterate through the roles found in 'root_metadata'
    # and add them to '_roledb_dict'.  Duplicates are avoided.
    for rolename, roleinfo in six.iteritems(root_metadata['roles']):
        if rolename == 'root':
            roleinfo['version'] = root_metadata['version']
            roleinfo['expires'] = root_metadata['expires']

        roleinfo['signatures'] = []
        roleinfo['signing_keyids'] = []
        roleinfo['compressions'] = ['']
        roleinfo['partial_loaded'] = False
        if rolename.startswith('targets'):
            roleinfo['paths'] = {}
            roleinfo['delegations'] = {'keys': {}, 'roles': []}

        try:
            add_role(rolename, roleinfo)
        # tuf.Error raised if the parent role of 'rolename' does not exist.
        except tuf.Error as e:
            logger.error(e)
            raise
Пример #4
0
 def test_get_target_hash(self):
   # Test normal case. 
   expected_target_hashes = {
     '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
     '/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
     '/packages/file2.txt': 'c9c4a5cdd84858dd6a23d98d7e6e6b2aec45034946c16b2200bc317c75415e92'  
   }
   for filepath, target_hash in six.iteritems(expected_target_hashes):
     self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
     self.assertTrue(tuf.formats.HASH_SCHEMA.matches(target_hash))
     self.assertEqual(repo_lib.get_target_hash(filepath), target_hash)
  
   # Test for improperly formatted argument.
   self.assertRaises(tuf.FormatError, repo_lib.get_target_hash, 8)
Пример #5
0
 def test_C1_get_target_hash(self):
   # Test normal case. 
   expected_target_hashes = {
     '/file1.txt': 'e3a3d89eb3b70ce3fbce6017d7b8c12d4abd5635427a0e8a238f53157df85b3d',
     '/README.txt': '8faee106f1bb69f34aaf1df1e3c2e87d763c4d878cb96b91db13495e32ceb0b0',
     '/warehouse/file2.txt': 'd543a573a2cec67026eff06e75702303559e64e705eba06f65799baaf0424417'
   }
   for filepath, target_hash in six.iteritems(expected_target_hashes):
     self.assertTrue(tuf.formats.RELPATH_SCHEMA.matches(filepath))
     self.assertTrue(tuf.formats.HASH_SCHEMA.matches(target_hash))
     self.assertEqual(tuf.util.get_target_hash(filepath), target_hash)
  
   # Test for improperly formatted argument.
   self.assertRaises(tuf.FormatError, tuf.util.get_target_hash, 8)
Пример #6
0
    def test_get_list_of_mirrors(self):
        # Test: Normal case.
        mirror_list = mirrors.get_list_of_mirrors('meta', 'release.txt',
                                                  self.mirrors)
        self.assertEqual(len(mirror_list), 3)
        for mirror, mirror_info in six.iteritems(self.mirrors):
            url = mirror_info['url_prefix'] + '/metadata/release.txt'
            self.assertTrue(url in mirror_list)

        mirror_list = mirrors.get_list_of_mirrors('target', 'a.txt',
                                                  self.mirrors)
        self.assertEqual(len(mirror_list), 1)
        self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a.txt' in \
                        mirror_list)

        mirror_list = mirrors.get_list_of_mirrors('target', 'a/b',
                                                  self.mirrors)
        self.assertEqual(len(mirror_list), 1)
        self.assertTrue(self.mirrors['mirror1']['url_prefix']+'/targets/a/b' in \
                        mirror_list)

        mirror1 = self.mirrors['mirror1']
        del self.mirrors['mirror1']
        mirror_list = mirrors.get_list_of_mirrors('target', 'a/b',
                                                  self.mirrors)
        self.assertFalse(mirror_list)
        self.mirrors['mirror1'] = mirror1

        # Test: Invalid 'file_type'.
        self.assertRaises(tuf.Error, mirrors.get_list_of_mirrors,
                          self.random_string(), 'a', self.mirrors)

        self.assertRaises(tuf.Error, mirrors.get_list_of_mirrors, 12345, 'a',
                          self.mirrors)

        # Test: Improperly formatted 'file_path'.
        self.assertRaises(tuf.FormatError, mirrors.get_list_of_mirrors, 'meta',
                          12345, self.mirrors)

        # Test: Improperly formatted 'mirrors_dict' object.
        self.assertRaises(tuf.FormatError, mirrors.get_list_of_mirrors, 'meta',
                          'a', 12345)

        self.assertRaises(tuf.FormatError, mirrors.get_list_of_mirrors, 'meta',
                          'a', ['a'])

        self.assertRaises(tuf.FormatError, mirrors.get_list_of_mirrors, 'meta',
                          'a', {'a': 'b'})
Пример #7
0
  def __init__(self, object_name='object', **required):
    """
    <Purpose> 
      Create a new Object schema.

    <Arguments>
      object_name: A string identifier for the object argument.
      
      A variable number of keyword arguments is accepted.
    """
  
    # Ensure valid arguments. 
    for key, schema in six.iteritems(required):
      if not isinstance(schema, Schema):
        raise tuf.FormatError('Expected Schema but got '+repr(schema))

    self._object_name = object_name
    self._required = list(required.items())
Пример #8
0
  def test_with_tuf(self):
    # The same scenario outlined in test_without_tuf() is followed here, except
    # with a TUF client.  The TUF client performs a refresh of top-level
    # metadata, which also includes 'timestamp.json'.
    
    timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                  'timestamp.json')
    
    # Modify the timestamp file on the remote repository.  'timestamp.json'
    # must be properly updated and signed with 'repository_tool.py', otherwise
    # the client will reject it as invalid metadata.  The resulting
    # 'timestamp.json' should be valid metadata, but expired (as intended).
    repository = repo_tool.load_repository(self.repository_directory)
 
    key_file = os.path.join(self.keystore_directory, 'timestamp_key') 
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')

    repository.timestamp.load_signing_key(timestamp_private)
    
    # expire in 1 second.
    datetime_object = tuf.formats.unix_timestamp_to_datetime(int(time.time() + 1))
    repository.timestamp.expiration = datetime_object
    repository.write()
    
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))
    
    # Verify that the TUF client detects outdated metadata and refuses to
    # continue the update process.  Sleep for at least 2 seconds to ensure
    # 'repository.timestamp.expiration' is reached.
    time.sleep(2)
    try:
      self.repository_updater.refresh()
    
    except tuf.NoWorkingMirrorError as e:
      for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
        self.assertTrue(isinstance(mirror_error, tuf.ExpiredMetadataError))
Пример #9
0
def _encode_canonical(object, output_function):
    # Helper for encode_canonical.  Older versions of json.encoder don't
    # even let us replace the separators.

    if isinstance(object, six.string_types):
        output_function(_canonical_string_encoder(object))
    elif object is True:
        output_function("true")
    elif object is False:
        output_function("false")
    elif object is None:
        output_function("null")
    elif isinstance(object, six.integer_types):
        output_function(str(object))
    elif isinstance(object, (tuple, list)):
        output_function("[")
        if len(object):
            for item in object[:-1]:
                _encode_canonical(item, output_function)
                output_function(",")
            _encode_canonical(object[-1], output_function)
        output_function("]")
    elif isinstance(object, dict):
        output_function("{")
        if len(object):
            items = sorted(six.iteritems(object))
            for key, value in items[:-1]:
                output_function(_canonical_string_encoder(key))
                output_function(":")
                _encode_canonical(value, output_function)
                output_function(",")
            key, value = items[-1]
            output_function(_canonical_string_encoder(key))
            output_function(":")
            _encode_canonical(value, output_function)
        output_function("}")
    else:
        raise tuf.FormatError('I cannot encode ' + repr(object))
Пример #10
0
    def test_with_tuf_mode_2(self):
        # Simulate a slow retrieval attack.
        # 'mode_2': During the download process, the server blocks the download
        # by sending just several characters every few seconds.

        server_process = self._start_slow_server('mode_2')
        client_filepath = os.path.join(self.client_directory, 'file1.txt')
        original_average_download_speed = tuf.conf.MIN_AVERAGE_DOWNLOAD_SPEED
        tuf.conf.MIN_AVERAGE_DOWNLOAD_SPEED = 1

        try:
            file1_target = self.repository_updater.target('file1.txt')
            self.repository_updater.download_target(file1_target,
                                                    self.client_directory)

        # Verify that the specific 'tuf.SlowRetrievalError' exception is raised by
        # each mirror.  'file1.txt' should be large enough to trigger a slow
        # retrieval attack, otherwise the expected exception may not be
        # consistently raised.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'targets', 'file1.txt')

                # Verify that 'file1.txt' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(
                    isinstance(mirror_error, tuf.DownloadLengthMismatchError))

        else:
            # Another possibility is to check for a successfully downloaded
            # 'file1.txt' at this point.
            self.fail('TUF did not prevent a slow retrieval attack.')

        finally:
            self._stop_slow_server(server_process)
            tuf.conf.MIN_AVERAGE_DOWNLOAD_SPEED = original_average_download_speed
Пример #11
0
    def test_with_tuf(self):
        # The same scenario outlined in test_without_tuf() is followed here, except
        # with a TUF client (scenario description provided in the opening comment
        # block of that test case.) The TUF client performs a refresh of top-level
        # metadata, which also includes 'timestamp.json'.

        # Backup the current version of 'timestamp'.  It will be used as the
        # outdated version returned to the client.  The repository tool removes
        # obsolete metadadata, so do *not* save the backup version in the
        # repository's metadata directory.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')
        backup_timestamp = os.path.join(self.repository_directory,
                                        'timestamp.json.backup')
        shutil.copy(timestamp_path, backup_timestamp)

        # The fileinfo of the previous version is saved to verify that it is indeed
        # accepted by the non-TUF client.
        length, hashes = tuf.util.get_file_details(backup_timestamp)
        previous_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Modify the timestamp file on the remote repository.
        repository = repo_tool.load_repository(self.repository_directory)
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)

        # Set an arbitrary expiration so that the repository tool generates a new
        # version.
        repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Save the fileinfo of the new version generated to verify that it is
        # saved by the client.
        length, hashes = tuf.util.get_file_details(timestamp_path)
        new_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Refresh top-level metadata, including 'timestamp.json'.  Installation of
        # new version of 'timestamp.json' is expected.
        self.repository_updater.refresh()

        client_timestamp_path = os.path.join(self.client_directory, 'metadata',
                                             'current', 'timestamp.json')
        length, hashes = tuf.util.get_file_details(client_timestamp_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the new version.
        self.assertEqual(download_fileinfo, new_fileinfo)

        # Restore the previous version of 'timestamp.json' on the remote repository
        # and verify that the non-TUF client downloads it (expected, but not ideal).
        shutil.move(backup_timestamp, timestamp_path)

        # Verify that the TUF client detects replayed metadata and refuses to
        # continue the update process.
        try:
            self.repository_updater.refresh()

        # Verify that the specific 'tuf.ReplayedMetadataError' is raised by each
        # mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata',
                                        'timestamp.json')

                # Verify that 'timestamp.json' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(
                    isinstance(mirror_error, tuf.ReplayedMetadataError))

        else:
            self.fail('TUF did not prevent a replay attack.')
Пример #12
0
def get_list_of_mirrors(file_type, file_path, mirrors_dict):
    """
  <Purpose>
    Get a list of mirror urls from a mirrors dictionary, provided the type
    and the path of the file with respect to the base url.

  <Arguments>
    file_type:
      Type of data needed for download, must correspond to one of the strings
      in the list ['meta', 'target'].  'meta' for metadata file type or
      'target' for target file type.  It should correspond to
      NAME_SCHEMA format.

    file_path:
      A relative path to the file that corresponds to RELPATH_SCHEMA format.
      Ex: 'http://url_prefix/targets_path/file_path'

    mirrors_dict:
      A mirrors_dict object that corresponds to MIRRORDICT_SCHEMA, where
      keys are strings and values are MIRROR_SCHEMA. An example format
      of MIRROR_SCHEMA:

      {'url_prefix': 'http://localhost:8001',
       'metadata_path': 'metadata/',
       'targets_path': 'targets/',
       'confined_target_dirs': ['targets/snapshot1/', ...],
       'custom': {...}}

      The 'custom' field is optional.

  <Exceptions>
    tuf.Error, on unsupported 'file_type'.
    
    tuf.FormatError, on bad argument.

  <Return>
    List of mirror urls corresponding to the file_type and file_path.  If no
    match is found, empty list is returned.
  """

    # Checking if all the arguments have appropriate format.
    tuf.formats.RELPATH_SCHEMA.check_match(file_path)
    tuf.formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict)
    tuf.formats.NAME_SCHEMA.check_match(file_type)

    # Verify 'file_type' is supported.
    if file_type not in _SUPPORTED_FILE_TYPES:
        message = 'Invalid file_type argument.  '+ \
          'Supported file types: '+repr(_SUPPORTED_FILE_TYPES)
        raise tuf.Error(message)

    # Reference to 'tuf.util.file_in_confined_directories()' (improve readability).
    # This function checks whether a mirror should serve a file to the client.
    # A client may be confined to certain paths on a repository mirror
    # when fetching target files.  This field may be set by the client when
    # the repository mirror is added to the 'tuf.client.updater.Updater' object.
    in_confined_directory = tuf.util.file_in_confined_directories

    list_of_mirrors = []
    for mirror_name, mirror_info in six.iteritems(mirrors_dict):
        if file_type == 'meta':
            base = mirror_info['url_prefix'] + '/' + mirror_info[
                'metadata_path']

        # 'file_type' == 'target'.  'file_type' should have been verified to contain
        # a supported string value above (either 'meta' or 'target').
        else:
            targets_path = mirror_info['targets_path']
            full_filepath = os.path.join(targets_path, file_path)
            if not in_confined_directory(full_filepath,
                                         mirror_info['confined_target_dirs']):
                continue
            base = mirror_info['url_prefix'] + '/' + mirror_info['targets_path']

        # urllib.quote(string) replaces special characters in string using the %xx
        # escape.  This is done to avoid parsing issues of the URL on the server
        # side. Do *NOT* pass URLs with Unicode characters without first encoding
        # the URL as UTF-8. We need a long-term solution with #61.
        # http://bugs.python.org/issue1712522
        file_path = six.moves.urllib.parse.quote(file_path)
        url = base + '/' + file_path.lstrip(os.sep)
        list_of_mirrors.append(url)

    return list_of_mirrors
    def test_with_tuf(self):
        # An attacker tries to trick a client into installing an extraneous target
        # file (a valid file on the repository, in this case) by listing it in the
        # project's metadata file.  For the purposes of test_with_tuf(),
        # 'targets/role1.json' is treated as the metadata file that indicates all
        # the files needed to install/update the 'role1' project.  The attacker
        # simply adds the extraneous target file to 'role1.json', which the TUF
        # client should reject as untrusted.
        role1_filepath = os.path.join(self.repository_directory, 'metadata',
                                      'targets', 'role1.json')
        file1_filepath = os.path.join(self.repository_directory, 'targets',
                                      'file1.txt')
        length, hashes = tuf.util.get_file_details(file1_filepath)

        role1_metadata = tuf.util.load_json_file(role1_filepath)
        role1_metadata['signed']['targets']['/file2.txt'] = {}
        role1_metadata['signed']['targets']['/file2.txt']['hashes'] = hashes
        role1_metadata['signed']['targets']['/file2.txt']['length'] = length

        tuf.formats.check_signable_object_format(role1_metadata)

        with open(role1_filepath, 'wt') as file_object:
            json.dump(role1_metadata, file_object, indent=1, sort_keys=True)

        # Un-install the metadata of the top-level roles so that the client can
        # download and detect the invalid 'role1.json'.
        os.remove(
            os.path.join(self.client_directory, 'metadata', 'current',
                         'snapshot.json'))
        os.remove(
            os.path.join(self.client_directory, 'metadata', 'current',
                         'targets.json'))
        os.remove(
            os.path.join(self.client_directory, 'metadata', 'current',
                         'timestamp.json'))
        os.remove(
            os.path.join(self.client_directory, 'metadata', 'current',
                         'targets', 'role1.json'))

        # Verify that the TUF client rejects the invalid metadata and refuses to
        # continue the update process.
        self.repository_updater.refresh()

        try:
            self.repository_updater.targets_of_role('targets/role1')

        # Verify that the specific 'tuf.BadHashError' exception is raised by each
        # mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata', 'targets',
                                        'role1.json')

                # Verify that 'role1.json' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(isinstance(mirror_error, tuf.BadHashError))

        else:
            self.fail('TUF did not prevent an extraneous dependencies attack.')
Пример #14
0
def load_project(project_directory, prefix='', new_targets_location=None):
    """
  <Purpose>
    Return a Project object initialized with the contents of the metadata 
    files loaded from 'project_directory'.

  <Arguments>
    project_directory: 
      The path to the project's metadata and configuration file.

    prefix:
      The prefix for the metadata, if defined.  It will replace the current
      prefix, by first removing the existing one (saved).

    new_targets_location:
      For flat project configurations, project owner might want to reload the
      project with a new location for the target files. This overwrites the
      previous path to search for the target files.

  <Exceptions>
    tuf.FormatError, if 'project_directory' or any of the metadata files
    are improperly formatted. 

  <Side Effects>
    All the metadata files found in the project are loaded and their contents
    stored in a libtuf.Repository object.

  <Returns>
    A  tuf.developer_tool.Project object.
  """

    # Does 'repository_directory' have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.PATH_SCHEMA.check_match(project_directory)

    # Do the same for the prefix
    tuf.formats.PATH_SCHEMA.check_match(prefix)

    # Clear the role and key databases since we are loading in a new project.
    tuf.roledb.clear_roledb()
    tuf.keydb.clear_keydb()

    # Locate metadata filepaths and targets filepath.
    project_directory = os.path.abspath(project_directory)

    # Load the cfg file and the project.
    config_filename = os.path.join(project_directory, PROJECT_FILENAME)

    try:
        project_configuration = tuf.util.load_json_file(config_filename)
        tuf.formats.PROJECT_CFG_SCHEMA.check_match(project_configuration)

    except (OSError, IOError) as e:
        raise

    targets_directory = os.path.join(project_directory,
                                     project_configuration['targets_location'])

    if project_configuration['layout_type'] == 'flat':
        project_directory, relative_junk = os.path.split(project_directory)
        targets_directory = project_configuration['targets_location']
        if new_targets_location is not None:
            targets_directory = new_targets_location

    metadata_directory = os.path.join(
        project_directory, project_configuration['metadata_location'])

    new_prefix = None
    if prefix != '':
        new_prefix = prefix

    prefix = project_configuration['prefix']

    # Load the project's filename.
    project_name = project_configuration['project_name']
    project_filename = project_name + METADATA_EXTENSION

    # Create a blank project on the target directory.
    project = Project(project_name, metadata_directory, targets_directory,
                      prefix)

    project.threshold = project_configuration['threshold']
    project._prefix = project_configuration['prefix']
    project.layout_type = project_configuration['layout_type']

    # Traverse the public keys and add them to the project.
    keydict = project_configuration['public_keys']
    for keyid in keydict:
        key = format_metadata_to_key(keydict[keyid])
        project.add_verification_key(key)

    # Load the project's metadata.
    targets_metadata_path = os.path.join(project_directory, metadata_directory,
                                         project_filename)
    signable = tuf.util.load_json_file(targets_metadata_path)
    tuf.formats.check_signable_object_format(signable)
    targets_metadata = signable['signed']

    # Remove the prefix from the metadata.
    targets_metadata = _strip_prefix_from_targets_metadata(
        targets_metadata, prefix)
    for signature in signable['signatures']:
        project.add_signature(signature)

    # Update roledb.py containing the loaded project attributes.
    roleinfo = tuf.roledb.get_roleinfo(project_name)
    roleinfo['signatures'].extend(signable['signatures'])
    roleinfo['version'] = targets_metadata['version']
    roleinfo['paths'] = targets_metadata['targets']
    roleinfo['delegations'] = targets_metadata['delegations']
    roleinfo['partial_loaded'] = False

    # Check if the loaded metadata was partially written and update the
    # flag in 'roledb.py'.
    if _metadata_is_partially_loaded(project_name, signable, roleinfo):
        roleinfo['partial_loaded'] = True

    tuf.roledb.update_roleinfo(project_name, roleinfo)

    for key_metadata in targets_metadata['delegations']['keys'].values():
        key_object = tuf.keys.format_metadata_to_key(key_metadata)
        tuf.keydb.add_key(key_object)

    for role in targets_metadata['delegations']['roles']:
        rolename = role['name']
        roleinfo = {
            'name': role['name'],
            'keyids': role['keyids'],
            'threshold': role['threshold'],
            'compressions': [''],
            'signing_keyids': [],
            'signatures': [],
            'partial_loaded': False,
            'delegations': {
                'keys': {},
                'roles': []
            }
        }
        tuf.roledb.add_role(rolename, roleinfo)

    # Load delegated targets metadata.
    # Walk the 'targets/' directory and generate the fileinfo of all the files
    # listed.  This information is stored in the 'meta' field of the release
    # metadata object.
    targets_objects = {}
    loaded_metadata = []
    targets_objects[project_name] = project
    metadata_directory = os.path.join(project_directory, metadata_directory)
    targets_metadata_directory = os.path.join(metadata_directory, project_name)
    if os.path.exists(targets_metadata_directory) and \
                      os.path.isdir(targets_metadata_directory):
        for root, directories, files in os.walk(targets_metadata_directory):

            # 'files' here is a list of target file names.
            for basename in files:
                metadata_path = os.path.join(root, basename)
                metadata_name = \
                  metadata_path[len(metadata_directory):].lstrip(os.path.sep)

                # Strip the extension.  The roledb does not include an appended '.json'
                # extensions for each role.
                if metadata_name.endswith(METADATA_EXTENSION):
                    extension_length = len(METADATA_EXTENSION)
                    metadata_name = metadata_name[:-extension_length]

                else:
                    continue

                signable = None
                try:
                    signable = tuf.util.load_json_file(metadata_path)

                except (ValueError, IOError, tuf.Error):
                    raise

                # Strip the prefix from the local working copy, it will be added again
                # when the targets metadata is written to disk.
                metadata_object = signable['signed']
                metadata_object = _strip_prefix_from_targets_metadata(
                    metadata_object, prefix)

                roleinfo = tuf.roledb.get_roleinfo(metadata_name)
                roleinfo['signatures'].extend(signable['signatures'])
                roleinfo['version'] = metadata_object['version']
                roleinfo['expires'] = metadata_object['expires']
                roleinfo['paths'] = {}
                for filepath, fileinfo in six.iteritems(
                        metadata_object['targets']):
                    roleinfo['paths'].update(
                        {filepath: fileinfo.get('custom', {})})
                roleinfo['delegations'] = metadata_object['delegations']
                roleinfo['partial_loaded'] = False

                if os.path.exists(metadata_path + '.gz'):
                    roleinfo['compressions'].append('gz')

                # If the metadata was partially loaded, update the roleinfo flag.
                if _metadata_is_partially_loaded(metadata_name, signable,
                                                 roleinfo):
                    roleinfo['partial_loaded'] = True

                tuf.roledb.update_roleinfo(metadata_name, roleinfo)

                # Append to list of elements to avoid reloading repeated metadata.
                loaded_metadata.append(metadata_name)

                # Add the delegation.
                new_targets_object = Targets(targets_directory, metadata_name,
                                             roleinfo)
                targets_object = \
                  targets_objects[tuf.roledb.get_parent_rolename(metadata_name)]
                targets_objects[metadata_name] = new_targets_object

                targets_object._delegated_roles[(os.path.basename(metadata_name))] = \
                                      new_targets_object

                # Add the keys specified in the delegations field of the Targets role.
                for key_metadata in metadata_object['delegations'][
                        'keys'].values():
                    key_object = tuf.keys.format_metadata_to_key(key_metadata)
                    try:
                        tuf.keydb.add_key(key_object)

                    except tuf.KeyAlreadyExistsError:
                        pass

                for role in metadata_object['delegations']['roles']:
                    rolename = role['name']
                    roleinfo = {
                        'name': role['name'],
                        'keyids': role['keyids'],
                        'threshold': role['threshold'],
                        'compressions': [''],
                        'signing_keyids': [],
                        'signatures': [],
                        'partial_loaded': False,
                        'delegations': {
                            'keys': {},
                            'roles': []
                        }
                    }
                    tuf.roledb.add_role(rolename, roleinfo)

    if new_prefix:
        project._prefix = new_prefix

    return project
Пример #15
0
    def test_with_tuf(self):
        # Verify that a target file (on the remote repository) modified by an
        # attacker, to contain a large amount of extra data, is not downloaded by
        # the TUF client.  First test that the valid target file is successfully
        # downloaded.
        file1_fileinfo = self.repository_updater.target('file1.txt')
        destination = os.path.join(self.client_directory)
        self.repository_updater.download_target(file1_fileinfo, destination)
        client_target_path = os.path.join(destination, 'file1.txt')
        self.assertTrue(os.path.exists(client_target_path))

        # Verify the client's downloaded file matches the repository's.
        target_path = os.path.join(self.repository_directory, 'targets',
                                   'file1.txt')
        length, hashes = tuf.util.get_file_details(client_target_path)
        fileinfo = tuf.formats.make_fileinfo(length, hashes)

        length, hashes = tuf.util.get_file_details(client_target_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)
        self.assertEqual(fileinfo, download_fileinfo)

        # Modify 'file1.txt' and confirm that the TUF client only downloads up to
        # the expected file length.
        with open(target_path, 'r+t') as file_object:
            original_content = file_object.read()
            file_object.write(original_content +
                              ('append large amount of data' * 10000))

        # Is the modified file actually larger?
        large_length, hashes = tuf.util.get_file_details(target_path)
        self.assertTrue(large_length > length)

        os.remove(client_target_path)
        self.repository_updater.download_target(file1_fileinfo, destination)

        # A large amount of data has been appended to the original content.  The
        # extra data appended should be discarded by the client, so the downloaded
        # file size and hash should not have changed.
        length, hashes = tuf.util.get_file_details(client_target_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)
        self.assertEqual(fileinfo, download_fileinfo)

        # Test that the TUF client does not download large metadata files, as well.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')

        original_length, hashes = tuf.util.get_file_details(timestamp_path)

        with open(timestamp_path, 'r+') as file_object:
            timestamp_content = tuf.util.load_json_file(timestamp_path)
            large_data = 'LargeTimestamp' * 10000
            timestamp_content['signed']['_type'] = large_data
            json.dump(timestamp_content, file_object, indent=1, sort_keys=True)

        modified_length, hashes = tuf.util.get_file_details(timestamp_path)
        self.assertTrue(modified_length > original_length)

        # Does the TUF client download the upper limit of an unsafely fetched
        # 'timestamp.json'?  'timestamp.json' must not be greater than
        # 'tuf.conf.DEFAULT_TIMESTAMP_REQUIRED_LENGTH'.
        try:
            self.repository_updater.refresh()

        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                self.assertTrue(
                    isinstance(mirror_error, tuf.InvalidMetadataJSONError))

        else:
            self.fail('TUF did not prevent an endless data attack.')
Пример #16
0
    def test_with_tuf(self):
        # Scenario:
        # An attacker tries to trick the client into installing files indicated by
        # a previous release of its corresponding metatadata.  The outdated metadata
        # is properly named and was previously valid, but is no longer current
        # according to the latest 'snapshot.json' role.  Generate a new snapshot of
        # the repository after modifying a target file of 'role1.json'.
        # Backup 'role1.json' (the delegated role to be updated, and then inserted
        # again for the mix-and-match attack.)
        role1_path = os.path.join(self.repository_directory, 'metadata',
                                  'targets', 'role1.json')
        backup_role1 = os.path.join(self.repository_directory,
                                    'role1.json.backup')
        shutil.copy(role1_path, backup_role1)

        # Backup 'file3.txt', specified by 'role1.json'.
        file3_path = os.path.join(self.repository_directory, 'targets',
                                  'file3.txt')
        shutil.copy(file3_path, file3_path + '.backup')

        # Re-generate the required metadata on the remote repository.  The affected
        # metadata must be properly updated and signed with 'repository_tool.py',
        # otherwise the client will reject them as invalid metadata.  The resulting
        # metadata should be valid metadata.
        repository = repo_tool.load_repository(self.repository_directory)

        # Load the signing keys so that newly generated metadata is properly signed.
        timestamp_keyfile = os.path.join(self.keystore_directory,
                                         'timestamp_key')
        role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key')
        snapshot_keyfile = os.path.join(self.keystore_directory,
                                        'snapshot_key')
        timestamp_private = \
          repo_tool.import_rsa_privatekey_from_file(timestamp_keyfile, 'password')
        role1_private = \
          repo_tool.import_rsa_privatekey_from_file(role1_keyfile, 'password')
        snapshot_private = \
          repo_tool.import_rsa_privatekey_from_file(snapshot_keyfile, 'password')

        repository.targets('role1').load_signing_key(role1_private)
        repository.snapshot.load_signing_key(snapshot_private)
        repository.timestamp.load_signing_key(timestamp_private)

        # Modify a 'role1.json' target file, and add it to its metadata so that a
        # new version is generated.
        with open(file3_path, 'wt') as file_object:
            file_object.write('This is role2\'s target file.')
        repository.targets('role1').add_target(file3_path)

        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Insert the previously valid 'role1.json'.  The TUF client should reject it.
        shutil.move(backup_role1, role1_path)

        # Verify that the TUF client detects unexpected metadata (previously valid,
        # but not up-to-date with the latest snapshot of the repository) and refuses
        # to continue the update process.
        # Refresh top-level metadata so that the client is aware of the latest
        # snapshot of the repository.
        self.repository_updater.refresh()

        try:
            self.repository_updater.targets_of_role('targets/role1')

        # Verify that the specific 'tuf.BadHashError' exception is raised by each
        # mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata', 'targets',
                                        'role1.json')

                # Verify that 'timestamp.json' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(isinstance(mirror_error, tuf.BadHashError))

        else:
            self.fail('TUF did not prevent a mix-and-match attack.')
Пример #17
0
def create_keydb_from_root_metadata(root_metadata):
    """
  <Purpose>
    Populate the key database with the unique keys found in 'root_metadata'.
    The database dictionary will conform to 'tuf.formats.KEYDB_SCHEMA' and
    have the form: {keyid: key, ...}.  
    The 'keyid' conforms to 'tuf.formats.KEYID_SCHEMA' and 'key' to its
    respective type.  In the case of RSA keys, this object would match
    'RSAKEY_SCHEMA'.

  <Arguments>
    root_metadata:
      A dictionary conformant to 'tuf.formats.ROOT_SCHEMA'.  The keys found
      in the 'keys' field of 'root_metadata' are needed by this function.

  <Exceptions>
    tuf.FormatError, if 'root_metadata' does not have the correct format.

  <Side Effects>
    A function to add the key to the database is called.  In the case of RSA
    keys, this function is add_key().
    
    The old keydb key database is replaced.

  <Returns>
    None.
  """

    # Does 'root_metadata' have the correct format?
    # This check will ensure 'root_metadata' has the appropriate number of objects
    # and object types, and that all dict keys are properly named.
    # Raise 'tuf.FormatError' if the check fails.
    tuf.formats.ROOT_SCHEMA.check_match(root_metadata)

    # Clear the key database.
    _keydb_dict.clear()

    # Iterate the keys found in 'root_metadata' by converting them to
    # 'RSAKEY_SCHEMA' if their type is 'rsa', and then adding them to the
    # database.
    for keyid, key_metadata in six.iteritems(root_metadata['keys']):
        if key_metadata['keytype'] in _SUPPORTED_KEY_TYPES:
            # 'key_metadata' is stored in 'KEY_SCHEMA' format.  Call
            # create_from_metadata_format() to get the key in 'RSAKEY_SCHEMA'
            # format, which is the format expected by 'add_key()'.
            key_dict = tuf.keys.format_metadata_to_key(key_metadata)
            try:
                add_key(key_dict, keyid)

            # Although keyid duplicates should *not* occur (unique dict keys), log a
            # warning and continue.
            except tuf.KeyAlreadyExistsError as e:  # pragma: no cover
                logger.warning(e)
                continue

            # 'tuf.Error' raised if keyid does not match the keyid for 'rsakey_dict'.
            except tuf.Error as e:
                logger.error(e)
                continue

        else:
            logger.warning(
                'Root Metadata file contains a key with an invalid keytype.')
Пример #18
0
def __read_configuration(configuration_handler,
                         filename="tuf.interposition.json",
                         parent_repository_directory=None,
                         parent_ssl_certificates_directory=None):
    """
  A generic function to read TUF interposition configurations off a file, and
  then handle those configurations with a given function.
  configuration_handler must be a function which accepts a
  tuf.interposition.Configuration instance.

  Returns the parsed configurations as a dictionary of configurations indexed
  by hostnames.
  """

    INVALID_TUF_CONFIGURATION = "Invalid configuration for {network_location}!"
    INVALID_TUF_INTERPOSITION_JSON = "Invalid configuration in {filename}!"
    NO_CONFIGURATIONS = "No configurations found in configuration in {filename}!"

    # Configurations indexed by hostnames.
    parsed_configurations = {}

    try:
        with open(filename) as tuf_interposition_json:
            tuf_interpositions = json.load(tuf_interposition_json)
            configurations = tuf_interpositions.get("configurations", {})

            if len(configurations) == 0:
                raise tuf.InvalidConfigurationError(
                    NO_CONFIGURATIONS.format(filename=filename))

            else:
                for network_location, configuration in six.iteritems(
                        configurations):
                    try:
                        configuration_parser = ConfigurationParser(
                            network_location,
                            configuration,
                            parent_repository_directory=
                            parent_repository_directory,
                            parent_ssl_certificates_directory=
                            parent_ssl_certificates_directory)

                        # configuration_parser.parse() returns a
                        # 'tuf.interposition.Configuration' object, which interposition
                        # uses to determine which URLs should be interposed.
                        configuration = configuration_parser.parse()
                        configuration_handler(configuration)
                        parsed_configurations[
                            configuration.hostname] = configuration

                    except:
                        logger.exception(
                            INVALID_TUF_CONFIGURATION.format(
                                network_location=network_location))
                        raise

    except:
        logger.exception(
            INVALID_TUF_INTERPOSITION_JSON.format(filename=filename))
        raise

    else:
        return parsed_configurations