Ejemplo n.º 1
0
    def ready(self):
        import director.director as director

        if not os.path.exists(settings.DIRECTOR_REPO):
            os.makedirs(settings.DIRECTOR_REPO)
        keys_pri = {}
        keys_pub = {}
        for role in ['root', 'timestamp', 'snapshot']:
            keys_pri[role] = rt.import_ed25519_privatekey_from_file(
                os.path.join(settings.KEY_PATH, 'director' + role),
                password='******')
            keys_pub[role] = rt.import_ed25519_publickey_from_file(
                os.path.join(settings.KEY_PATH, 'director' + role + '.pub'))

        # Because the demo's Director targets key is not named correctly....
        # TODO: Remove this and add 'targets' back to the role list above when
        #       the key is correctly renamed.
        keys_pri['targets'] = rt.import_ed25519_privatekey_from_file(
            os.path.join(settings.KEY_PATH, 'director'), password='******')
        keys_pub['targets'] = rt.import_ed25519_publickey_from_file(
            os.path.join(settings.KEY_PATH, 'director.pub'))

        settings.DIRECTOR = director.Director(
            settings.DIRECTOR_REPO, keys_pri['root'], keys_pub['root'],
            keys_pri['timestamp'], keys_pub['timestamp'], keys_pri['snapshot'],
            keys_pub['snapshot'], keys_pri['targets'], keys_pub['targets'])
Ejemplo n.º 2
0
def _load_role_keys(keystore_directory):

    # Populating 'self.role_keys' by importing the required public and private
    # keys of 'tuf/tests/repository_data/'.  The role keys are needed when
    # modifying the remote repository used by the test cases in this unit test.

    # The pre-generated key files in 'repository_data/keystore' are all encrypted with
    # a 'password' passphrase.
    EXPECTED_KEYFILE_PASSWORD = '******'

    # Store and return the cryptography keys of the top-level roles, including 1
    # delegated role.
    role_keys = {}

    root_key_file = os.path.join(keystore_directory, 'root_key')
    targets_key_file = os.path.join(keystore_directory, 'targets_key')
    snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key')
    timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key')
    delegation_key_file = os.path.join(keystore_directory, 'delegation_key')

    role_keys = {
        'root': {},
        'targets': {},
        'snapshot': {},
        'timestamp': {},
        'role1': {}
    }

    # Import the top-level and delegated role public keys.
    role_keys['root']['public'] = \
      repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub')
    role_keys['targets']['public'] = \
      repo_tool.import_ed25519_publickey_from_file(targets_key_file + '.pub')
    role_keys['snapshot']['public'] = \
      repo_tool.import_ed25519_publickey_from_file(snapshot_key_file + '.pub')
    role_keys['timestamp']['public'] = \
        repo_tool.import_ed25519_publickey_from_file(timestamp_key_file + '.pub')
    role_keys['role1']['public'] = \
        repo_tool.import_ed25519_publickey_from_file(delegation_key_file + '.pub')

    # Import the private keys of the top-level and delegated roles.
    role_keys['root']['private'] = \
      repo_tool.import_rsa_privatekey_from_file(root_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['targets']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(targets_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['snapshot']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['timestamp']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['role1']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(delegation_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)

    return role_keys
Ejemplo n.º 3
0
    def ready(self):
        settings.REPO=rt.create_new_repository(settings.IMAGE_REPO)

        keys_pri = {}
        keys_pub = {}
        for role in ['root', 'timestamp', 'snapshot']:
            keys_pri[role] = rt.import_ed25519_privatekey_from_file(os.path.join(settings.KEY_PATH, 'director' + role),password='******')
            keys_pub[role] = rt.import_ed25519_publickey_from_file(os.path.join(settings.KEY_PATH, 'director' + role + '.pub'))

        # Because the demo's Director targets key is not named correctly....
        # TODO: Remove this and add 'targets' back to the role list above when
        #       the key is correctly renamed.
        keys_pri['targets'] = rt.import_ed25519_privatekey_from_file(os.path.join(settings.KEY_PATH, 'director'),password='******')
        keys_pub['targets'] = rt.import_ed25519_publickey_from_file(os.path.join(settings.KEY_PATH, 'director.pub'))
        
        settings.REPO.root.add_verification_key(keys_pub['root'])
        settings.REPO.timestamp.add_verification_key(keys_pub['timestamp'])
        settings.REPO.snapshot.add_verification_key(keys_pub['snapshot'])
        settings.REPO.targets.add_verification_key(keys_pub['targets'])
        settings.REPO.root.load_signing_key(keys_pri['root'])
        settings.REPO.timestamp.load_signing_key(keys_pri['timestamp'])
        settings.REPO.snapshot.load_signing_key(keys_pri['snapshot'])
        settings.REPO.targets.load_signing_key(keys_pri['targets'])


        repo_dir=settings.IMAGE_REPO
        targets_json=os.path.join(repo_dir,'metadata','targets.json')
        if os.path.exists(targets_json):
            f=open(targets_json)
            targets_meta=json.loads(f.read())
            f.close()
            targets=targets_meta['signed']['targets']
            for key in targets.keys():
                filepath=os.path.join(repo_dir,'targets',key[1:])
                if os.path.exists(filepath):
                    settings.REPO.targets.add_target(filepath)


        settings.REPO.mark_dirty(['timestamp', 'snapshot'])
        settings.REPO.write() # will be writeall() in most recent TUF branch

        # Move staged metadata (from the write above) to live metadata directory.

        if os.path.exists(os.path.join(settings.IMAGE_REPO, 'metadata')):
            shutil.rmtree(os.path.join(settings.IMAGE_REPO, 'metadata'))

        shutil.copytree(
            os.path.join(settings.IMAGE_REPO, 'metadata.staged'),
            os.path.join(settings.IMAGE_REPO, 'metadata'))
Ejemplo n.º 4
0
def write_and_import_keypair(filename):
    pathpriv = 'tufkeystore/{}_key'.format(filename)
    pathpub = '{}.pub'.format(pathpriv)
    rt.generate_and_write_ed25519_keypair(password='******', filepath=pathpriv)
    public_key = rt.import_ed25519_publickey_from_file(pathpub)
    private_key = rt.import_ed25519_privatekey_from_file(password='******', filepath=pathpriv)
    return (public_key, private_key)
Ejemplo n.º 5
0
def loadkey(filename):
    pathpriv = 'tufkeystore/{}_key'.format(filename)
    pathpub = '{}.pub'.format(pathpriv)
    public_key = rt.import_ed25519_publickey_from_file(pathpub)
    private_key = rt.import_ed25519_privatekey_from_file(password='******',
                                                         filepath=pathpriv)
    return (public_key, private_key)
Ejemplo n.º 6
0
    def setUpClass(cls):

        private_key_fname = os.path.join(os.getcwd(), 'repository_data',
                                         'keystore', 'targets_key')

        cls.test_signing_key = repo_tool.import_ed25519_privatekey_from_file(
            private_key_fname, 'password')
Ejemplo n.º 7
0
def import_keypair(name):
    dir = path.join(os.getcwd(), 'keys')
    private_path = path.join(dir, name)
    public_path = private_path + '.pub'

    # Load the keys into TUF.
    public = rt.import_ed25519_publickey_from_file(public_path)
    private = rt.import_ed25519_privatekey_from_file(private_path,
                                                     password='******')

    return (public, private)
Ejemplo n.º 8
0
def import_private_key(keyname):
    """
  Import a private key according to the demo's current default key config.

    Passphrase: 'pw'
    Key type: ed25519
    Key location: DEMO_KEYS_DIR
  """
    return rt.import_ed25519_privatekey_from_file(os.path.join(
        DEMO_KEYS_DIR, keyname),
                                                  password='******')
Ejemplo n.º 9
0
    def write_and_import_keypair(self, name_dst):
        # Identify the paths for the next pre-generated keypair.
        pathpriv_src = os.path.join(os.path.dirname(os.path.realpath(
            __file__)), 'fixture_keys', '{}_key'.format(self.next_keypair_index))
        pathpub_src = '{}.pub'.format(pathpriv_src)
        self.next_keypair_index += 1

        print('Using key {} for {}'.format(pathpriv_src, name_dst))

        # Load the keys into TUF.
        public_key = rt.import_ed25519_publickey_from_file(pathpub_src)
        private_key = rt.import_ed25519_privatekey_from_file(
            pathpriv_src, password='******')
        return (public_key, private_key)
Ejemplo n.º 10
0
def setUpModule():
    """
  This is run once for the full module, before all tests.
  It prepares some globals, including a single Primary ECU client instance.
  When finished, it will also start up an OEM Repository Server,
  Director Server, and Time Server. Currently, it requires them to be already
  running.
  """
    global primary_ecu_key

    destroy_temp_dir()

    private_key_fname = os.path.join(os.getcwd(), 'demo', 'keys', 'director')

    global test_signing_key
    test_signing_key = repo_tool.import_ed25519_privatekey_from_file(
        private_key_fname, 'pw')
Ejemplo n.º 11
0
def add_target(parsed_arguments):
    registry = repo_tool.load_repository(REPO_DIR)

    role = parsed_arguments.add[0]
    filepath = parsed_arguments.add[1]
    keypath = parsed_arguments.add[2]

    registry.targets(role).add_target(filepath)

    password = securesystemslib.interface.get_password('Enter a password for'
                                                       ' the encrypted key (' +
                                                       repr(keypath) + '): ',
                                                       confirm=False)
    role_key = repo_tool.import_ed25519_privatekey_from_file(keypath,
                                                             password=password)

    registry.targets(role).load_signing_key(role_key)

    registry.writeall(snapshot_merkle=True)
Ejemplo n.º 12
0
def import_private_key(keyname):
    """
  Import a private key according to the demo's current default key config.

    Passphrase: 'pw'
    Key type: ed25519
    Key location: DEMO_KEYS_DIR
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[import_private_key()]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(
        str('%s %s %s' %
            (I_TO_PRINT, 'Importing private key keyname:', keyname)))
    #TODO: Until here

    return rt.import_ed25519_privatekey_from_file(os.path.join(
        DEMO_KEYS_DIR, keyname),
                                                  password='******')
Ejemplo n.º 13
0
    def test_with_tuf(self):
        # The same scenario outlined in test_without_tuf() is followed here, except
        # with a TUF client (scenario description provided in the opening comment
        # block of that test case.) The TUF client performs a refresh of top-level
        # metadata, which also includes 'timestamp.json'.

        # Backup the current version of 'timestamp'.  It will be used as the
        # outdated version returned to the client.  The repository tool removes
        # obsolete metadadata, so do *not* save the backup version in the
        # repository's metadata directory.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')
        backup_timestamp = os.path.join(self.repository_directory,
                                        'timestamp.json.backup')
        shutil.copy(timestamp_path, backup_timestamp)

        # The fileinfo of the previous version is saved to verify that it is indeed
        # accepted by the non-TUF client.
        length, hashes = securesystemslib.util.get_file_details(
            backup_timestamp)
        previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Modify the timestamp file on the remote repository.
        repository = repo_tool.load_repository(self.repository_directory)
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)

        # Set an arbitrary expiration so that the repository tool generates a new
        # version.
        repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
        repository.writeall()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Save the fileinfo of the new version generated to verify that it is
        # saved by the client.
        length, hashes = securesystemslib.util.get_file_details(timestamp_path)
        new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Refresh top-level metadata, including 'timestamp.json'.  Installation of
        # new version of 'timestamp.json' is expected.
        self.repository_updater.refresh()

        client_timestamp_path = os.path.join(self.client_directory,
                                             self.repository_name, 'metadata',
                                             'current', 'timestamp.json')
        length, hashes = securesystemslib.util.get_file_details(
            client_timestamp_path)
        download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the new version.
        self.assertEqual(download_fileinfo, new_fileinfo)

        # Restore the previous version of 'timestamp.json' on the remote repository
        # and verify that the non-TUF client downloads it (expected, but not ideal).
        shutil.move(backup_timestamp, timestamp_path)
        logger.info('Moving the timestamp.json backup to the current version.')

        # Verify that the TUF client detects replayed metadata and refuses to
        # continue the update process.
        try:
            self.repository_updater.refresh()

        # Verify that the specific 'tuf.exceptions.ReplayedMetadataError' is raised by each
        # mirror.
        except tuf.exceptions.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata',
                                        'timestamp.json')

                # Verify that 'timestamp.json' is the culprit.
                self.assertEqual(url_file.replace('\\', '/'), mirror_url)
                self.assertTrue(
                    isinstance(mirror_error,
                               tuf.exceptions.ReplayedMetadataError))

        else:
            self.fail('TUF did not prevent a replay attack.')
Ejemplo n.º 14
0
    def test_without_tuf(self):
        # Scenario:
        # 'timestamp.json' specifies the latest version of the repository files.
        # A client should only accept the same version number (specified in the
        # file) of the metadata, or greater.  A version number less than the one
        # currently trusted should be rejected.  A non-TUF client may use a
        # different mechanism for determining versions of metadata, but version
        # numbers in this integrations because that is what TUF uses.
        #
        # Modify the repository's timestamp.json' so that a new version is generated
        # and accepted by the client, and backup the previous version.  The previous
        # is then returned the next time the client requests an update.  A non-TUF
        # client (without a way to detect older versions of metadata, and thus
        # updates) is expected to download older metadata and outdated files.
        # Verify that the older version of timestamp.json' is downloaded by the
        # non-TUF client.

        # Backup the current version of 'timestamp'.  It will be used as the
        # outdated version returned to the client.  The repository tool removes
        # obsolete metadadata, so do *not* save the backup version in the
        # repository's metadata directory.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')
        backup_timestamp = os.path.join(self.repository_directory,
                                        'timestamp.json.backup')
        shutil.copy(timestamp_path, backup_timestamp)

        # The fileinfo of the previous version is saved to verify that it is indeed
        # accepted by the non-TUF client.
        length, hashes = securesystemslib.util.get_file_details(
            backup_timestamp)
        previous_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Modify the timestamp file on the remote repository.
        repository = repo_tool.load_repository(self.repository_directory)
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)

        # Set an arbitrary expiration so that the repository tool generates a new
        # version.
        repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
        repository.writeall()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Save the fileinfo of the new version generated to verify that it is
        # saved by the client.
        length, hashes = securesystemslib.util.get_file_details(timestamp_path)
        new_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        url_prefix = self.repository_mirrors['mirror1']['url_prefix']
        url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json')
        client_timestamp_path = os.path.join(self.client_directory,
                                             self.repository_name, 'metadata',
                                             'current', 'timestamp.json')

        # On Windows, the URL portion should not contain back slashes.
        six.moves.urllib.request.urlretrieve(url_file.replace('\\', '/'),
                                             client_timestamp_path)

        length, hashes = securesystemslib.util.get_file_details(
            client_timestamp_path)
        download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the new version.
        self.assertEqual(download_fileinfo, new_fileinfo)

        # Restore the previous version of 'timestamp.json' on the remote repository
        # and verify that the non-TUF client downloads it (expected, but not ideal).
        shutil.move(backup_timestamp, timestamp_path)

        # On Windows, the URL portion should not contain back slashes.
        six.moves.urllib.request.urlretrieve(url_file.replace('\\', '/'),
                                             client_timestamp_path)

        length, hashes = securesystemslib.util.get_file_details(
            client_timestamp_path)
        download_fileinfo = tuf.formats.make_targets_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the previous version.
        self.assertEqual(download_fileinfo, previous_fileinfo)
        self.assertNotEqual(download_fileinfo, new_fileinfo)
Ejemplo n.º 15
0
  def test_with_tuf(self):
    # Three tests are conducted here.
    #
    # Test 1: If we find that the timestamp acquired from a mirror indicates
    #         that there is no new snapshot file, and our current snapshot
    #         file is expired, is it recognized as such?
    # Test 2: If an expired timestamp is downloaded, is it recognized as such?
    # Test 3: If an expired Snapshot is downloaded, is it (1) rejected? (2) the
    # local Snapshot file deleted? (3) and is the client able to recover when
    # given a new, valid Snapshot?


    # Test 1 Begin:
    #
    # Addresses this issue: https://github.com/theupdateframework/tuf/issues/322
    #
    # If time has passed and our snapshot or targets role is expired, and
    # the mirror whose timestamp we fetched doesn't indicate the existence of a
    # new snapshot version, we still need to check that it's expired and notify
    # the software update system / application / user. This test creates that
    # scenario. The correct behavior is to raise an exception.
    #
    # Background: Expiration checks (updater._ensure_not_expired) were
    # previously conducted when the metadata file was downloaded. If no new
    # metadata file was downloaded, no expiry check would occur. In particular,
    # while root was checked for expiration at the beginning of each
    # updater.refresh() cycle, and timestamp was always checked because it was
    # always fetched, snapshot and targets were never checked if the user did
    # not receive evidence that they had changed. This bug allowed a class of
    # freeze attacks.
    # That bug was fixed and this test tests that fix going forward.

    # Modify the timestamp file on the remote repository.  'timestamp.json'
    # must be properly updated and signed with 'repository_tool.py', otherwise
    # the client will reject it as invalid metadata.

    # Load the repository
    repository = repo_tool.load_repository(self.repository_directory)

    # Load the snapshot and timestamp keys
    key_file = os.path.join(self.keystore_directory, 'timestamp_key')
    timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')
    repository.timestamp.load_signing_key(timestamp_private)
    key_file = os.path.join(self.keystore_directory, 'snapshot_key')
    snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')
    repository.snapshot.load_signing_key(snapshot_private)

    # sign snapshot with expiry in near future (earlier than e.g. timestamp)
    expiry = int(time.time() + 60*60)
    repository.snapshot.expiration = tuf.formats.unix_timestamp_to_datetime(
        expiry)
    repository.mark_dirty(['snapshot', 'timestamp'])
    repository.writeall()

    # And move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Refresh metadata on the client. For this refresh, all data is not expired.
    logger.info('Test: Refreshing #1 - Initial metadata refresh occurring.')
    self.repository_updater.refresh()

    logger.info('Test: Refreshing #2 - refresh after local snapshot expiry.')

    # mock current time to one second after snapshot expiry
    mock_time = mock.Mock()
    mock_time.return_value = expiry + 1
    with mock.patch('time.time', mock_time):
      try:
        self.repository_updater.refresh() # We expect this to fail!

      except tuf.exceptions.ExpiredMetadataError:
        logger.info('Test: Refresh #2 - failed as expected. Expired local'
                    ' snapshot case generated a tuf.exceptions.ExpiredMetadataError'
                    ' exception as expected. Test pass.')

      else:
        self.fail('TUF failed to detect expired stale snapshot metadata. Freeze'
          ' attack successful.')




    # Test 2 Begin:
    #
    # 'timestamp.json' specifies the latest version of the repository files.
    # A client should only accept the same version of this file up to a certain
    # point, or else it cannot detect that new files are available for download.
    # Modify the repository's 'timestamp.json' so that it is about to expire,
    # copy it over the to client, wait a moment until it expires, and attempt to
    # re-fetch the same expired version.

    # The same scenario as in test_without_tuf() is followed here, except with
    # a TUF client. The TUF client performs a refresh of top-level metadata,
    # which includes 'timestamp.json', and should detect a freeze attack if
    # the repository serves an outdated 'timestamp.json'.

    # Modify the timestamp file on the remote repository.  'timestamp.json'
    # must be properly updated and signed with 'repository_tool.py', otherwise
    # the client will reject it as invalid metadata.  The resulting
    # 'timestamp.json' should be valid metadata, but expired (as intended).
    repository = repo_tool.load_repository(self.repository_directory)

    key_file = os.path.join(self.keystore_directory, 'timestamp_key')
    timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')

    repository.timestamp.load_signing_key(timestamp_private)

    # Set timestamp metadata to expire soon.
    # We cannot set the timestamp expiration with
    # 'repository.timestamp.expiration = ...' with already-expired timestamp
    # metadata because of consistency checks that occur during that assignment.
    expiry_time = time.time() + 60*60
    datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time))
    repository.timestamp.expiration = datetime_object
    repository.writeall()

    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # mock current time to one second after timestamp expiry
    mock_time = mock.Mock()
    mock_time.return_value = expiry_time + 1
    with mock.patch('time.time', mock_time):
      try:
        self.repository_updater.refresh() # We expect NoWorkingMirrorError.

      except tuf.exceptions.NoWorkingMirrorError as e:
        # Make sure the contained error is ExpiredMetadataError
        for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
          self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError))

      else:
        self.fail('TUF failed to detect expired, stale timestamp metadata.'
          ' Freeze attack successful.')




    # Test 3 Begin:
    #
    # Serve the client expired Snapshot.  The client should reject the given,
    # expired Snapshot and the locally trusted one, which should now be out of
    # date.
    # After the attack, attempt to re-issue a valid Snapshot to verify that
    # the client is still able to update. A bug previously caused snapshot
    # expiration or replay to result in an indefinite freeze; see
    # github.com/theupdateframework/tuf/issues/736
    repository = repo_tool.load_repository(self.repository_directory)

    ts_key_file = os.path.join(self.keystore_directory, 'timestamp_key')
    snapshot_key_file = os.path.join(self.keystore_directory, 'snapshot_key')
    timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
        ts_key_file, 'password')
    snapshot_private = repo_tool.import_ed25519_privatekey_from_file(
        snapshot_key_file, 'password')

    repository.timestamp.load_signing_key(timestamp_private)
    repository.snapshot.load_signing_key(snapshot_private)

    # Set ts to expire in 1 month.
    ts_expiry_time = time.time() + 2630000

    # Set snapshot to expire in 1 hour.
    snapshot_expiry_time = time.time() + 60*60

    ts_datetime_object = tuf.formats.unix_timestamp_to_datetime(
        int(ts_expiry_time))
    snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime(
        int(snapshot_expiry_time))
    repository.timestamp.expiration = ts_datetime_object
    repository.snapshot.expiration = snapshot_datetime_object
    repository.writeall()

    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # mock current time to one second after snapshot expiry
    mock_time = mock.Mock()
    mock_time.return_value = snapshot_expiry_time + 1
    with mock.patch('time.time', mock_time):
      try:
        # We expect the following refresh() to raise a NoWorkingMirrorError.
        self.repository_updater.refresh()

      except tuf.exceptions.NoWorkingMirrorError as e:
        # Make sure the contained error is ExpiredMetadataError
        for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
          self.assertTrue(isinstance(mirror_error, tuf.exceptions.ExpiredMetadataError))
          self.assertTrue(mirror_url.endswith('snapshot.json'))

      else:
        self.fail('TUF failed to detect expired, stale Snapshot metadata.'
          ' Freeze attack successful.')

    # The client should have rejected the malicious Snapshot metadata, and
    # distrusted the local snapshot file that is no longer valid.
    self.assertTrue('snapshot' not in self.repository_updater.metadata['current'])
    self.assertEqual(sorted(['root', 'targets', 'timestamp']),
        sorted(self.repository_updater.metadata['current']))

    # Verify that the client is able to recover from the malicious Snapshot.
    # Re-sign a valid Snapshot file that the client should accept.
    repository = repo_tool.load_repository(self.repository_directory)

    repository.timestamp.load_signing_key(timestamp_private)
    repository.snapshot.load_signing_key(snapshot_private)

    # Set snapshot to expire in 1 month.
    snapshot_expiry_time = time.time() + 2630000

    snapshot_datetime_object = tuf.formats.unix_timestamp_to_datetime(
        int(snapshot_expiry_time))
    repository.snapshot.expiration = snapshot_datetime_object
    repository.writeall()

    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Verify that the client accepts the valid metadata file.
    self.repository_updater.refresh()
    self.assertTrue('snapshot' in self.repository_updater.metadata['current'])
    self.assertEqual(sorted(['root', 'targets', 'timestamp', 'snapshot']),
        sorted(self.repository_updater.metadata['current']))
Ejemplo n.º 16
0
    def test_repository_tool(self):

        self.assertEqual(self.repository_name, str(self.repository_updater))
        self.assertEqual(self.repository_name2, str(self.repository_updater2))

        repository = repo_tool.load_repository(self.repository_directory,
                                               self.repository_name)
        repository2 = repo_tool.load_repository(self.repository_directory2,
                                                self.repository_name2)

        repository.timestamp.version = 88
        self.assertEqual(['timestamp'],
                         tuf.roledb.get_dirty_roles(self.repository_name))
        self.assertEqual([], tuf.roledb.get_dirty_roles(self.repository_name2))

        repository2.timestamp.version = 100
        self.assertEqual(['timestamp'],
                         tuf.roledb.get_dirty_roles(self.repository_name2))

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, "password")

        repository.timestamp.load_signing_key(timestamp_private)
        repository2.timestamp.load_signing_key(timestamp_private)

        repository.write('timestamp', increment_version_number=False)
        repository2.write('timestamp', increment_version_number=False)

        # And move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.rmtree(os.path.join(self.repository_directory2, 'metadata'))

        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory2, 'metadata.staged'),
            os.path.join(self.repository_directory2, 'metadata'))

        # Verify that the client retrieves the expected updates.
        logger.info('Downloading timestamp from server 1.')
        self.repository_updater.refresh()

        self.assertEqual(
            88, self.repository_updater.metadata['current']['timestamp']
            ['version'])
        logger.info('Downloading timestamp from server 2.')
        self.repository_updater2.refresh()

        self.assertEqual(
            100, self.repository_updater2.metadata['current']['timestamp']
            ['version'])

        # Test the behavior of the multi-repository updater.
        map_file = securesystemslib.util.load_json_file(self.map_file)
        map_file['repositories'][self.repository_name] = [
            'http://localhost:' + str(self.SERVER_PORT)
        ]
        map_file['repositories'][self.repository_name2] = [
            'http://localhost:' + str(self.SERVER_PORT2)
        ]
        with open(self.map_file, 'w') as file_object:
            file_object.write(json.dumps(map_file))

        # Try to load a non-existent map file.
        self.assertRaises(tuf.exceptions.Error, updater.MultiRepoUpdater,
                          'bad_path')

        multi_repo_updater = updater.MultiRepoUpdater(self.map_file)
        valid_targetinfo = multi_repo_updater.get_valid_targetinfo('file3.txt')

        for my_updater, my_targetinfo in six.iteritems(valid_targetinfo):
            my_updater.download_target(my_targetinfo, self.temporary_directory)
            self.assertTrue(
                os.path.exists(
                    os.path.join(self.temporary_directory, 'file3.txt')))
Ejemplo n.º 17
0
    def test_with_tuf(self):
        # Two tests are conducted here.
        #
        # Test 1: If we find that the timestamp acquired from a mirror indicates
        #         that there is no new snapshot file, and our current snapshot
        #         file is expired, is it recognized as such?
        # Test 2: If an expired timestamp is downloaded, is it recognized as such?

        # Test 1 Begin:
        #
        # Addresses this issue: https://github.com/theupdateframework/tuf/issues/322
        #
        # If time has passed and our snapshot or targets role is expired, and
        # the mirror whose timestamp we fetched doesn't indicate the existence of a
        # new snapshot version, we still need to check that it's expired and notify
        # the software update system / application / user. This test creates that
        # scenario. The correct behavior is to raise an exception.
        #
        # Background: Expiration checks (updater._ensure_not_expired) were
        # previously conducted when the metadata file was downloaded. If no new
        # metadata file was downloaded, no expiry check would occur. In particular,
        # while root was checked for expiration at the beginning of each
        # updater.refresh() cycle, and timestamp was always checked because it was
        # always fetched, snapshot and targets were never checked if the user did
        # not receive evidence that they had changed. This bug allowed a class of
        # freeze attacks.
        # That bug was fixed and this test tests that fix going forward.

        # Modify the timestamp file on the remote repository.  'timestamp.json'
        # must be properly updated and signed with 'repository_tool.py', otherwise
        # the client will reject it as invalid metadata.

        # Load the repository
        repository = repo_tool.load_repository(self.repository_directory)

        # Load the timestamp and snapshot keys, since we will be signing a new
        # timestamp and a new snapshot file.
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)
        key_file = os.path.join(self.keystore_directory, 'snapshot_key')
        snapshot_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        repository.snapshot.load_signing_key(snapshot_private)

        # Expire snapshot in 8s. This should be far enough into the future that we
        # haven't reached it before the first refresh validates timestamp expiry.
        # We want a successful refresh before expiry, then a second refresh after
        # expiry (which we then expect to raise an exception due to expired
        # metadata).
        expiry_time = time.time() + 8
        datetime_object = tuf.formats.unix_timestamp_to_datetime(
            int(expiry_time))

        repository.snapshot.expiration = datetime_object

        # Now write to the repository.
        repository.write()

        # And move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Refresh metadata on the client. For this refresh, all data is not expired.
        logger.info(
            'Test: Refreshing #1 - Initial metadata refresh occurring.')
        self.repository_updater.refresh()
        logger.info(
            'Test: Refreshed #1 - Initial metadata refresh completed '
            'successfully. Now sleeping until snapshot metadata expires.')

        # Sleep until expiry_time ('repository.snapshot.expiration')
        time.sleep(max(0, expiry_time - time.time()))

        logger.info(
            'Test: Refreshing #2 - Now trying to refresh again after local'
            ' snapshot expiry.')
        try:
            self.repository_updater.refresh()  # We expect this to fail!

        except tuf.ExpiredMetadataError:
            logger.info('Test: Refresh #2 - failed as expected. Expired local'
                        ' snapshot case generated a tuf.ExpiredMetadataError'
                        ' exception as expected. Test pass.')

        # I think that I only expect tuf.ExpiredMetadata error here. A
        # NoWorkingMirrorError indicates something else in this case - unavailable
        # repo, for example.
        else:
            self.fail(
                'TUF failed to detect expired stale snapshot metadata. Freeze'
                ' attack successful.')

        # Test 2 Begin:
        #
        # 'timestamp.json' specifies the latest version of the repository files.
        # A client should only accept the same version of this file up to a certain
        # point, or else it cannot detect that new files are available for download.
        # Modify the repository's 'timestamp.json' so that it is about to expire,
        # copy it over the to client, wait a moment until it expires, and attempt to
        # re-fetch the same expired version.

        # The same scenario as in test_without_tuf() is followed here, except with
        # a TUF client. The TUF client performs a refresh of top-level metadata,
        # which includes 'timestamp.json', and should detect a freeze attack if
        # the repository serves an outdated 'timestamp.json'.

        # Modify the timestamp file on the remote repository.  'timestamp.json'
        # must be properly updated and signed with 'repository_tool.py', otherwise
        # the client will reject it as invalid metadata.  The resulting
        # 'timestamp.json' should be valid metadata, but expired (as intended).
        repository = repo_tool.load_repository(self.repository_directory)

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')

        repository.timestamp.load_signing_key(timestamp_private)

        # Set timestamp metadata to expire soon.
        # We cannot set the timestamp expiration with
        # 'repository.timestamp.expiration = ...' with already-expired timestamp
        # metadata because of consistency checks that occur during that assignment.
        expiry_time = time.time() + 1
        datetime_object = tuf.formats.unix_timestamp_to_datetime(
            int(expiry_time))
        repository.timestamp.expiration = datetime_object
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Wait just long enough for the timestamp metadata (which is now both on
        # the repository and on the client) to expire.
        time.sleep(max(0, expiry_time - time.time()))

        # Try to refresh top-level metadata on the client. Since we're already past
        # 'repository.timestamp.expiration', the TUF client is expected to detect
        # that timestamp metadata is outdated and refuse to continue the update
        # process.
        try:
            self.repository_updater.refresh(
            )  # We expect NoWorkingMirrorError.

        except tuf.NoWorkingMirrorError as e:
            # NoWorkingMirrorError indicates that we did not find valid, unexpired
            # metadata at any mirror. That exception class preserves the errors from
            # each mirror. We now assert that for each mirror, the particular error
            # detected was that metadata was expired (the timestamp we manually
            # expired).
            for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
                self.assertTrue(
                    isinstance(mirror_error, tuf.ExpiredMetadataError))

        else:
            self.fail('TUF failed to detect expired, stale timestamp metadata.'
                      ' Freeze attack successful.')
Ejemplo n.º 18
0
    def test_with_tuf(self):
        # Scenario:
        # An attacker tries to trick the client into installing files indicated by
        # a previous release of its corresponding metatadata.  The outdated metadata
        # is properly named and was previously valid, but is no longer current
        # according to the latest 'snapshot.json' role.  Generate a new snapshot of
        # the repository after modifying a target file of 'role1.json'.
        # Backup 'role1.json' (the delegated role to be updated, and then inserted
        # again for the mix-and-match attack.)
        role1_path = os.path.join(self.repository_directory, 'metadata',
                                  'role1.json')
        backup_role1 = os.path.join(self.repository_directory,
                                    'role1.json.backup')
        shutil.copy(role1_path, backup_role1)

        # Backup 'file3.txt', specified by 'role1.json'.
        file3_path = os.path.join(self.repository_directory, 'targets',
                                  'file3.txt')
        shutil.copy(file3_path, file3_path + '.backup')

        # Re-generate the required metadata on the remote repository.  The affected
        # metadata must be properly updated and signed with 'repository_tool.py',
        # otherwise the client will reject them as invalid metadata.  The resulting
        # metadata should be valid metadata.
        repository = repo_tool.load_repository(self.repository_directory)

        # Load the signing keys so that newly generated metadata is properly signed.
        timestamp_keyfile = os.path.join(self.keystore_directory,
                                         'timestamp_key')
        role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key')
        snapshot_keyfile = os.path.join(self.keystore_directory,
                                        'snapshot_key')
        timestamp_private = \
          repo_tool.import_ed25519_privatekey_from_file(timestamp_keyfile, 'password')
        role1_private = \
          repo_tool.import_ed25519_privatekey_from_file(role1_keyfile, 'password')
        snapshot_private = \
          repo_tool.import_ed25519_privatekey_from_file(snapshot_keyfile, 'password')

        repository.targets('role1').load_signing_key(role1_private)
        repository.snapshot.load_signing_key(snapshot_private)
        repository.timestamp.load_signing_key(timestamp_private)

        # Modify a 'role1.json' target file, and add it to its metadata so that a
        # new version is generated.
        with open(file3_path, 'wt') as file_object:
            file_object.write('This is role2\'s target file.')
        repository.targets('role1').add_target(os.path.basename(file3_path))

        repository.writeall()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Insert the previously valid 'role1.json'.  The TUF client should reject it.
        shutil.move(backup_role1, role1_path)

        # Verify that the TUF client detects unexpected metadata (previously valid,
        # but not up-to-date with the latest snapshot of the repository) and
        # refuses to continue the update process.  Refresh top-level metadata so
        # that the client is aware of the latest snapshot of the repository.
        self.repository_updater.refresh()

        try:
            with utils.ignore_deprecation_warnings('tuf.client.updater'):
                self.repository_updater.targets_of_role('role1')

        # Verify that the specific
        # 'tuf.exceptions.BadVersionNumberError' exception is raised by
        # each mirror.
        except tuf.exceptions.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata', 'role1.json')

                # Verify that 'role1.json' is the culprit.
                self.assertEqual(url_file.replace('\\', '/'), mirror_url)
                self.assertTrue(
                    isinstance(mirror_error,
                               tuf.exceptions.BadVersionNumberError))

        else:
            self.fail('TUF did not prevent a mix-and-match attack.')
  def test_root_role_versioning(self):
    # Test root role versioning
    #
    # 1. Import public and private keys.
    # 2. Add verification keys.
    # 3. Load signing keys.
    # 4. Add target files.
    # 5. Perform delegation.
    # 6. writeall()
    #
    # Copy the target files from 'tuf/tests/repository_data' so that writeall()
    # has target fileinfo to include in metadata.
    temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
    targets_directory = os.path.join(temporary_directory, 'repository',
                                     repo_tool.TARGETS_DIRECTORY_NAME)
    original_targets_directory = os.path.join('repository_data',
                                              'repository', 'targets')
    shutil.copytree(original_targets_directory, targets_directory)

    # In this case, create_new_repository() creates the 'repository/'
    # sub-directory in 'temporary_directory' if it does not exist.
    repository_directory = os.path.join(temporary_directory, 'repository')
    metadata_directory = os.path.join(repository_directory,
                                      repo_tool.METADATA_STAGED_DIRECTORY_NAME)
    repository = repo_tool.create_new_repository(repository_directory)




    # (1) Load the public and private keys of the top-level roles, and one
    # delegated role.
    keystore_directory = os.path.join('repository_data', 'keystore')

    # Load the public keys.
    root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub')
    targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub')
    snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub')
    timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub')
    role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub')

    root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path)
    targets_pubkey = repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path)
    snapshot_pubkey = \
      repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path)
    timestamp_pubkey = \
      repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path)
    role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path)

    # Load the private keys.
    root_privkey_path = os.path.join(keystore_directory, 'root_key')
    targets_privkey_path = os.path.join(keystore_directory, 'targets_key')
    snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key')
    timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key')
    role1_privkey_path = os.path.join(keystore_directory, 'delegation_key')

    root_privkey = \
      repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password')
    targets_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, 'password')
    snapshot_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path,
                                                'password')
    timestamp_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path,
                                                'password')
    role1_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path,
                                                'password')


    # (2) Add top-level verification keys.
    repository.root.add_verification_key(root_pubkey)
    repository.targets.add_verification_key(targets_pubkey)
    repository.snapshot.add_verification_key(snapshot_pubkey)
    repository.timestamp.add_verification_key(timestamp_pubkey)


    # (3) Load top-level signing keys.
    repository.root.load_signing_key(root_privkey)
    repository.targets.load_signing_key(targets_privkey)
    repository.snapshot.load_signing_key(snapshot_privkey)
    repository.timestamp.load_signing_key(timestamp_privkey)

    # (4) Add target files.
    target1 = 'file1.txt'
    target2 = 'file2.txt'
    target3 = 'file3.txt'
    repository.targets.add_target(target1)
    repository.targets.add_target(target2)


    # (5) Perform delegation.
    repository.targets.delegate('role1', [role1_pubkey], [target3])
    repository.targets('role1').load_signing_key(role1_privkey)

    # (6) Write repository.
    repository.writeall()

    self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json')))
    self.assertTrue(os.path.exists(os.path.join(metadata_directory, '1.root.json')))


    # Verify that the expected metadata is written.
    root_filepath = os.path.join(metadata_directory, 'root.json')
    root_1_filepath = os.path.join(metadata_directory, '1.root.json')
    root_2_filepath = os.path.join(metadata_directory, '2.root.json')
    old_root_signable = securesystemslib.util.load_json_file(root_filepath)
    root_1_signable = securesystemslib.util.load_json_file(root_1_filepath)

    # Make a change to the root keys
    repository.root.add_verification_key(targets_pubkey)
    repository.root.load_signing_key(targets_privkey)
    repository.root.threshold = 2
    repository.writeall()

    new_root_signable = securesystemslib.util.load_json_file(root_filepath)
    root_2_signable = securesystemslib.util.load_json_file(root_2_filepath)

    for role_signable in [old_root_signable, new_root_signable, root_1_signable, root_2_signable]:
      # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is an
      # invalid signable.
      tuf.formats.check_signable_object_format(role_signable)

    # Verify contents of versioned roots
    self.assertEqual(old_root_signable, root_1_signable)
    self.assertEqual(new_root_signable, root_2_signable)

    self.assertEqual(root_1_signable['signed']['version'], 1)
    self.assertEqual(root_2_signable['signed']['version'], 2)

    repository.root.remove_verification_key(root_pubkey)
    repository.root.unload_signing_key(root_privkey)
    repository.root.threshold = 2

    # Errors, not enough signing keys to satisfy old threshold
    self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall)

    # No error, write() ignore's root's threshold and allows it to be written
    # to disk partially signed.
    repository.write('root')
Ejemplo n.º 20
0
signature = Signature()
signature['keyid'] = rootPublicKey['publicKeyid']
signature['method'] = int(SignatureMethod('ed25519'))
hash = Hash().subtype(
    implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))
hash['function'] = int(HashFunction('sha256'))
digest = BinaryData().subtype(
    explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))
hexString = hashlib.sha256(encoder.encode(signed)).hexdigest()
digest['hexString'] = hexString
hash['digest'] = digest
signature['hash'] = hash

import tuf.repository_tool as rt
key_root_pub = rt.import_ed25519_publickey_from_file('mainroot.pub')
key_root_pri = rt.import_ed25519_privatekey_from_file('mainroot',
                                                      password='******')

import tuf.keys
signed_hash = tuf.keys.create_signature(key_root_pri, hexString)

signature['value'] = signed_hash['sig']
signatures[0] = signature
metadata['signatures'] = signatures

print(metadata.prettyPrint())
before = encoder.encode(metadata)
filename = 'root.cer'
with open(filename, 'wb') as a:
    a.write(before)

# Decode
Ejemplo n.º 21
0
  def setUp(self):
    # We are inheriting from custom class.
    unittest_toolbox.Modified_TestCase.setUp(self)

    self.repository_name = 'test_repository1'

    # Copy the original repository files provided in the test folder so that
    # any modifications made to repository files are restricted to the copies.
    # The 'repository_data' directory is expected to exist in 'tuf/tests/'.
    original_repository_files = os.path.join(os.getcwd(), 'repository_data')
    temporary_repository_root = \
      self.make_temp_directory(directory=self.temporary_directory)

    # The original repository, keystore, and client directories will be copied
    # for each test case.
    original_repository = os.path.join(original_repository_files, 'repository')
    original_client = os.path.join(original_repository_files, 'client')
    original_keystore = os.path.join(original_repository_files, 'keystore')

    # Save references to the often-needed client repository directories.
    # Test cases need these references to access metadata and target files.
    self.repository_directory = \
      os.path.join(temporary_repository_root, 'repository')
    self.client_directory = os.path.join(temporary_repository_root, 'client')
    self.keystore_directory = os.path.join(temporary_repository_root, 'keystore')

    # Copy the original 'repository', 'client', and 'keystore' directories
    # to the temporary repository the test cases can use.
    shutil.copytree(original_repository, self.repository_directory)
    shutil.copytree(original_client, self.client_directory)
    shutil.copytree(original_keystore, self.keystore_directory)


    # Produce a longer target file than exists in the other test repository
    # data, to provide for a long-duration slow attack. Then we'll write new
    # top-level metadata that includes a hash over that file, and provide that
    # metadata to the client as well.

    # The slow retrieval server, in mode 2 (1 byte per second), will only
    # sleep for a  total of (target file size) seconds.  Add a target file
    # that contains sufficient number of bytes to trigger a slow retrieval
    # error. A transfer should not be permitted to take 1 second per byte
    # transferred. Because this test is currently expected to fail, I'm
    # limiting the size to 10 bytes (10 seconds) to avoid expected testing
    # delays.... Consider increasing again after fix, to, e.g. 400.
    total_bytes = 10

    repository = repo_tool.load_repository(self.repository_directory)
    file1_filepath = os.path.join(self.repository_directory, 'targets',
                                  'file1.txt')
    with open(file1_filepath, 'wb') as file_object:
      data = 'a' * int(round(total_bytes))
      file_object.write(data.encode('utf-8'))

    key_file = os.path.join(self.keystore_directory, 'timestamp_key')
    timestamp_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')
    key_file = os.path.join(self.keystore_directory, 'snapshot_key')
    snapshot_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')
    key_file = os.path.join(self.keystore_directory, 'targets_key')
    targets_private = repo_tool.import_ed25519_privatekey_from_file(key_file,
                                                                  'password')

    repository.targets.load_signing_key(targets_private)
    repository.snapshot.load_signing_key(snapshot_private)
    repository.timestamp.load_signing_key(timestamp_private)

    repository.writeall()

    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Since we've changed the repository metadata in this setup (by lengthening
    # a target file and then writing new metadata), we also have to update the
    # client metadata to get to the expected initial state, where the client
    # knows the right target info (and so expects the right, longer target
    # length.
    # We'll skip using updater.refresh since we don't have a server running,
    # and we'll update the metadata locally, manually.
    shutil.rmtree(os.path.join(
        self.client_directory, self.repository_name, 'metadata', 'current'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata'),
        os.path.join(self.client_directory, self.repository_name, 'metadata',
        'current'))

    # Set the url prefix required by the 'tuf/client/updater.py' updater.
    # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'.
    repository_basepath = self.repository_directory[len(os.getcwd()):]
    url_prefix = \
      'http://localhost:' + str(self.SERVER_PORT) + repository_basepath

    # Setting 'tuf.settings.repository_directory' with the temporary client
    # directory copied from the original repository files.
    tuf.settings.repositories_directory = self.client_directory
    self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix,
                                           'metadata_path': 'metadata',
                                           'targets_path': 'targets',
                                           'confined_target_dirs': ['']}}

    # Create the repository instance.  The test cases will use this client
    # updater to refresh metadata, fetch target files, etc.
    self.repository_updater = updater.Updater(self.repository_name,
                                              self.repository_mirrors)
Ejemplo n.º 22
0
    def setUp(self):
        # We are inheriting from custom class.
        unittest_toolbox.Modified_TestCase.setUp(self)

        # Copy the original repository files provided in the test folder so that
        # any modifications made to repository files are restricted to the copies.
        # The 'repository_data' directory is expected to exist in 'tuf/tests/'.
        original_repository_files = os.path.join(os.getcwd(),
                                                 'repository_data')
        temporary_repository_root = \
          self.make_temp_directory(directory=self.temporary_directory)

        # The original repository, keystore, and client directories will be copied
        # for each test case.
        original_repository = os.path.join(original_repository_files,
                                           'repository')
        original_client = os.path.join(original_repository_files, 'client')
        original_keystore = os.path.join(original_repository_files, 'keystore')

        # Save references to the often-needed client repository directories.
        # Test cases need these references to access metadata and target files.
        self.repository_directory = \
          os.path.join(temporary_repository_root, 'repository')
        self.client_directory = os.path.join(temporary_repository_root,
                                             'client')
        self.keystore_directory = os.path.join(temporary_repository_root,
                                               'keystore')

        # Copy the original 'repository', 'client', and 'keystore' directories
        # to the temporary repository the test cases can use.
        shutil.copytree(original_repository, self.repository_directory)
        shutil.copytree(original_client, self.client_directory)
        shutil.copytree(original_keystore, self.keystore_directory)

        # The slow retrieval server, in mode 2 (1 byte per second), will only
        # sleep for a  total of (target file size) seconds.  Add a target file
        # that contains sufficient number of bytes to trigger a slow retrieval
        # error.  "sufficient number of bytes" assumed to be
        # >> 'tuf.conf.SLOW_START_GRACE_PERIOD' bytes.
        extra_bytes = 8
        total_bytes = tuf.conf.SLOW_START_GRACE_PERIOD + extra_bytes

        repository = repo_tool.load_repository(self.repository_directory)
        file1_filepath = os.path.join(self.repository_directory, 'targets',
                                      'file1.txt')
        with open(file1_filepath, 'wb') as file_object:
            data = 'a' * total_bytes
            file_object.write(data.encode('utf-8'))

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        key_file = os.path.join(self.keystore_directory, 'snapshot_key')
        snapshot_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')
        key_file = os.path.join(self.keystore_directory, 'targets_key')
        targets_private = repo_tool.import_ed25519_privatekey_from_file(
            key_file, 'password')

        repository.targets.load_signing_key(targets_private)
        repository.snapshot.load_signing_key(snapshot_private)
        repository.timestamp.load_signing_key(timestamp_private)

        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Set the url prefix required by the 'tuf/client/updater.py' updater.
        # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'.
        repository_basepath = self.repository_directory[len(os.getcwd()):]
        url_prefix = \
          'http://localhost:' + str(self.SERVER_PORT) + repository_basepath

        # Setting 'tuf.conf.repository_directory' with the temporary client
        # directory copied from the original repository files.
        tuf.conf.repository_directory = self.client_directory

        # Creating a repository instance.  The test cases will use this client
        # updater to refresh metadata, fetch target files, etc.
        self.repository_updater = updater.Updater('testupdater')

        # Need to override pinned.json mirrors for testing. /:
        # Point it to the right URL with the randomly selected port generated in
        # this test setup.
        mirrors = self.repository_updater.pinned_metadata['repositories'][
            'defaultrepo']['mirrors']

        for i in range(0, len(mirrors)):
            if '<DETERMINED_IN_TEST_SETUP>' in mirrors[i]:
                mirrors[i] = mirrors[i].replace('<DETERMINED_IN_TEST_SETUP>',
                                                str(url_prefix))

        self.repository_updater.pinned_metadata['repositories']['defaultrepo'][
            'mirrors'] = mirrors