Beispiel #1
0
    def _add_metadata(self,
                      repository,
                      root_key_password=None,
                      targets_key_password=None,
                      snapshot_key_password=None,
                      timestamp_key_password=None):
        from tuf.repository_tool import import_rsa_publickey_from_file, \
                                        import_rsa_privatekey_from_file
        # Add root key to repository
        public_root_key = import_rsa_publickey_from_file(
            self._root_key_file + '.pub')
        if root_key_password is None:
            print('importing root key...')
        private_root_key = import_rsa_privatekey_from_file(
            self._root_key_file,
            root_key_password)
        repository.root.add_verification_key(public_root_key)
        repository.root.load_signing_key(private_root_key)
        repository.root.expiration = datetime.now() + self._root_lifetime

        # Add targets key to repository
        public_targets_key = import_rsa_publickey_from_file(
            self._targets_key_file + '.pub')
        if targets_key_password is None:
            print('importing targets key...')
        private_targets_key = import_rsa_privatekey_from_file(
            self._targets_key_file,
            targets_key_password)
        repository.targets.add_verification_key(public_targets_key)
        repository.targets.load_signing_key(private_targets_key)

        # Add snapshot key to repository
        public_snapshot_key = import_rsa_publickey_from_file(
            self._snapshot_key_file + '.pub')
        if snapshot_key_password is None:
            print('importing snapshot key...')
        private_snapshot_key = import_rsa_privatekey_from_file(
            self._snapshot_key_file,
            snapshot_key_password)
        repository.snapshot.add_verification_key(public_snapshot_key)
        repository.snapshot.load_signing_key(private_snapshot_key)

        # Add timestamp key to repository
        public_timestamp_key = import_rsa_publickey_from_file(
            self._timestamp_key_file + '.pub')
        if timestamp_key_password is None:
            print('importing timestamp key...')
        private_timestamp_key = import_rsa_privatekey_from_file(
            self._timestamp_key_file,
            timestamp_key_password)
        repository.timestamp.add_verification_key(public_timestamp_key)
        repository.timestamp.load_signing_key(private_timestamp_key)

        # Write out metadata
        repository.write(consistent_snapshot=True)
Beispiel #2
0
    def _add_metadata(self,
                      repository,
                      root_key_password=None,
                      targets_key_password=None,
                      snapshot_key_password=None,
                      timestamp_key_password=None):
        from tuf.repository_tool import import_rsa_publickey_from_file, \
                                        import_rsa_privatekey_from_file
        # Add root key to repository
        public_root_key = import_rsa_publickey_from_file(
            self._root_key_file + '.pub')
        if root_key_password is None:
            print('importing root key...')
        private_root_key = import_rsa_privatekey_from_file(
            self._root_key_file,
            root_key_password)
        repository.root.add_verification_key(public_root_key)
        repository.root.load_signing_key(private_root_key)
        repository.root.expiration = datetime.utcnow() + self._root_lifetime

        # Add targets key to repository
        public_targets_key = import_rsa_publickey_from_file(
            self._targets_key_file + '.pub')
        if targets_key_password is None:
            print('importing targets key...')
        private_targets_key = import_rsa_privatekey_from_file(
            self._targets_key_file,
            targets_key_password)
        repository.targets.add_verification_key(public_targets_key)
        repository.targets.load_signing_key(private_targets_key)

        # Add snapshot key to repository
        public_snapshot_key = import_rsa_publickey_from_file(
            self._snapshot_key_file + '.pub')
        if snapshot_key_password is None:
            print('importing snapshot key...')
        private_snapshot_key = import_rsa_privatekey_from_file(
            self._snapshot_key_file,
            snapshot_key_password)
        repository.snapshot.add_verification_key(public_snapshot_key)
        repository.snapshot.load_signing_key(private_snapshot_key)

        # Add timestamp key to repository
        public_timestamp_key = import_rsa_publickey_from_file(
            self._timestamp_key_file + '.pub')
        if timestamp_key_password is None:
            print('importing timestamp key...')
        private_timestamp_key = import_rsa_privatekey_from_file(
            self._timestamp_key_file,
            timestamp_key_password)
        repository.timestamp.add_verification_key(public_timestamp_key)
        repository.timestamp.load_signing_key(private_timestamp_key)

        # Write out metadata
        repository.writeall(consistent_snapshot=True)
Beispiel #3
0
def _load_role_keys(keystore_directory):

    # Populating 'self.role_keys' by importing the required public and private
    # keys of 'tuf/tests/repository_data/'.  The role keys are needed when
    # modifying the remote repository used by the test cases in this unit test.

    # The pre-generated key files in 'repository_data/keystore' are all encrypted with
    # a 'password' passphrase.
    EXPECTED_KEYFILE_PASSWORD = '******'

    # Store and return the cryptography keys of the top-level roles, including 1
    # delegated role.
    role_keys = {}

    root_key_file = os.path.join(keystore_directory, 'root_key')
    targets_key_file = os.path.join(keystore_directory, 'targets_key')
    snapshot_key_file = os.path.join(keystore_directory, 'snapshot_key')
    timestamp_key_file = os.path.join(keystore_directory, 'timestamp_key')
    delegation_key_file = os.path.join(keystore_directory, 'delegation_key')

    role_keys = {
        'root': {},
        'targets': {},
        'snapshot': {},
        'timestamp': {},
        'role1': {}
    }

    # Import the top-level and delegated role public keys.
    role_keys['root']['public'] = \
      repo_tool.import_rsa_publickey_from_file(root_key_file+'.pub')
    role_keys['targets']['public'] = \
      repo_tool.import_ed25519_publickey_from_file(targets_key_file + '.pub')
    role_keys['snapshot']['public'] = \
      repo_tool.import_ed25519_publickey_from_file(snapshot_key_file + '.pub')
    role_keys['timestamp']['public'] = \
        repo_tool.import_ed25519_publickey_from_file(timestamp_key_file + '.pub')
    role_keys['role1']['public'] = \
        repo_tool.import_ed25519_publickey_from_file(delegation_key_file + '.pub')

    # Import the private keys of the top-level and delegated roles.
    role_keys['root']['private'] = \
      repo_tool.import_rsa_privatekey_from_file(root_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['targets']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(targets_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['snapshot']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(snapshot_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['timestamp']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(timestamp_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)
    role_keys['role1']['private'] = \
      repo_tool.import_ed25519_privatekey_from_file(delegation_key_file,
                                                EXPECTED_KEYFILE_PASSWORD)

    return role_keys
Beispiel #4
0
    def __init__(self, repo_path, key_path):
        """
        Constructor

        :param repo_path: path where the repo lives
        :type repo_path: str
        :param key_path: path where the private targets key lives
        :type key_path: str
        """
        self._repo_path = repo_path
        self._key = import_rsa_privatekey_from_file(key_path)
    def test_with_tuf(self):
        # The same scenario outlined in test_without_tuf() is followed here, except
        # with a TUF client.  The TUF client performs a refresh of top-level
        # metadata, which also includes 'timestamp.json'.

        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')

        # Modify the timestamp file on the remote repository.  'timestamp.json'
        # must be properly updated and signed with 'repository_tool.py', otherwise
        # the client will reject it as invalid metadata.  The resulting
        # 'timestamp.json' should be valid metadata, but expired (as intended).
        repository = repo_tool.load_repository(self.repository_directory)

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')

        repository.timestamp.load_signing_key(timestamp_private)

        # expire in 1 second.
        datetime_object = tuf.formats.unix_timestamp_to_datetime(
            int(time.time() + 1))
        repository.timestamp.expiration = datetime_object
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Verify that the TUF client detects outdated metadata and refuses to
        # continue the update process.  Sleep for at least 2 seconds to ensure
        # 'repository.timestamp.expiration' is reached.
        time.sleep(2)
        try:
            self.repository_updater.refresh()

        except tuf.NoWorkingMirrorError as e:
            for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
                self.assertTrue(
                    isinstance(mirror_error, tuf.ExpiredMetadataError))
Beispiel #6
0
  def test_with_tuf(self):
    # The same scenario outlined in test_without_tuf() is followed here, except
    # with a TUF client (scenario description provided in the opening comment
    # block of that test case.) The TUF client performs a refresh of top-level
    # metadata, which also includes 'timestamp.json'.
    
    # Backup the current version of 'timestamp'.  It will be used as the
    # outdated version returned to the client.  The repository tool removes
    # obsolete metadadata, so do *not* save the backup version in the 
    # repository's metadata directory.
    timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                  'timestamp.json')
    backup_timestamp = os.path.join(self.repository_directory,
                                    'timestamp.json.backup')
    shutil.copy(timestamp_path, backup_timestamp)
    
    # The fileinfo of the previous version is saved to verify that it is indeed
    # accepted by the non-TUF client.
    length, hashes = tuf.util.get_file_details(backup_timestamp)
    previous_fileinfo = tuf.formats.make_fileinfo(length, hashes)
    
    # Modify the timestamp file on the remote repository.
    repository = repo_tool.load_repository(self.repository_directory)
    key_file = os.path.join(self.keystore_directory, 'timestamp_key') 
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    repository.timestamp.load_signing_key(timestamp_private)
    
    # Set an arbitrary expiration so that the repository tool generates a new
    # version.
    repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
    repository.write()
   
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Save the fileinfo of the new version generated to verify that it is
    # saved by the client. 
    length, hashes = tuf.util.get_file_details(timestamp_path)
    new_fileinfo = tuf.formats.make_fileinfo(length, hashes)

    # Refresh top-level metadata, including 'timestamp.json'.  Installation of
    # new version of 'timestamp.json' is expected.
    self.repository_updater.refresh()

    client_timestamp_path = os.path.join(self.client_directory, 'metadata',
                                         'current', 'timestamp.json')
    length, hashes = tuf.util.get_file_details(client_timestamp_path)
    download_fileinfo = tuf.formats.make_fileinfo(length, hashes)
    
    # Verify 'download_fileinfo' is equal to the new version.
    self.assertEqual(download_fileinfo, new_fileinfo)

    # Restore the previous version of 'timestamp.json' on the remote repository
    # and verify that the non-TUF client downloads it (expected, but not ideal).
    shutil.move(backup_timestamp, timestamp_path)
    logger.info('Moving the timestamp.json backup to the current version.')
    
    # Verify that the TUF client detects replayed metadata and refuses to
    # continue the update process.
    try:
      self.repository_updater.refresh()
   
    # Verify that the specific 'tuf.ReplayedMetadataError' is raised by each
    # mirror.
    except tuf.NoWorkingMirrorError as exception:
      for mirror_url, mirror_error in six.iteritems(exception.mirror_errors):
        url_prefix = self.repository_mirrors['mirror1']['url_prefix']
        url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json')
       
        # Verify that 'timestamp.json' is the culprit.
        self.assertEqual(url_file, mirror_url)
        self.assertTrue(isinstance(mirror_error, tuf.ReplayedMetadataError))

    else:
      self.fail('TUF did not prevent a replay attack.')
Beispiel #7
0
  def test_without_tuf(self):
    # Scenario:
    # 'timestamp.json' specifies the latest version of the repository files.
    # A client should only accept the same version number (specified in the
    # file) of the metadata, or greater.  A version number less than the one
    # currently trusted should be rejected.  A non-TUF client may use a
    # different mechanism for determining versions of metadata, but version
    # numbers in this integrations because that is what TUF uses.
    # 
    # Modify the repository's timestamp.json' so that a new version is generated
    # and accepted by the client, and backup the previous version.  The previous
    # is then returned the next time the client requests an update.  A non-TUF
    # client (without a way to detect older versions of metadata, and thus
    # updates) is expected to download older metadata and outdated files.
    # Verify that the older version of timestamp.json' is downloaded by the
    # non-TUF client.

    # Backup the current version of 'timestamp'.  It will be used as the
    # outdated version returned to the client.  The repository tool removes
    # obsolete metadadata, so do *not* save the backup version in the 
    # repository's metadata directory.
    timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                  'timestamp.json')
    backup_timestamp = os.path.join(self.repository_directory,
                                    'timestamp.json.backup')
    shutil.copy(timestamp_path, backup_timestamp)
    
    # The fileinfo of the previous version is saved to verify that it is indeed
    # accepted by the non-TUF client.
    length, hashes = tuf.util.get_file_details(backup_timestamp)
    previous_fileinfo = tuf.formats.make_fileinfo(length, hashes)
    
    # Modify the timestamp file on the remote repository.
    repository = repo_tool.load_repository(self.repository_directory)
    key_file = os.path.join(self.keystore_directory, 'timestamp_key') 
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    repository.timestamp.load_signing_key(timestamp_private)
    
    # Set an arbitrary expiration so that the repository tool generates a new
    # version.
    repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
    repository.write()
    
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Save the fileinfo of the new version generated to verify that it is
    # saved by the client. 
    length, hashes = tuf.util.get_file_details(timestamp_path)
    new_fileinfo = tuf.formats.make_fileinfo(length, hashes)

    url_prefix = self.repository_mirrors['mirror1']['url_prefix']
    url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json')
    client_timestamp_path = os.path.join(self.client_directory, 'metadata',
                                         'current', 'timestamp.json')
   
    six.moves.urllib.request.urlretrieve(url_file, client_timestamp_path)
   
    length, hashes = tuf.util.get_file_details(client_timestamp_path)
    download_fileinfo = tuf.formats.make_fileinfo(length, hashes)
    
    # Verify 'download_fileinfo' is equal to the new version.
    self.assertEqual(download_fileinfo, new_fileinfo)

    # Restore the previous version of 'timestamp.json' on the remote repository
    # and verify that the non-TUF client downloads it (expected, but not ideal).
    shutil.move(backup_timestamp, timestamp_path)
    
    six.moves.urllib.request.urlretrieve(url_file, client_timestamp_path)
   
    length, hashes = tuf.util.get_file_details(client_timestamp_path)
    download_fileinfo = tuf.formats.make_fileinfo(length, hashes)
    
    # Verify 'download_fileinfo' is equal to the previous version.
    self.assertEqual(download_fileinfo, previous_fileinfo)
    self.assertNotEqual(download_fileinfo, new_fileinfo)
Beispiel #8
0
 def ready(self):
     key = rt.import_rsa_privatekey_from_file(os.path.join(
         settings.KEY_PATH, 'timeserver'),
                                              password='******')
     timeserver.set_timeserver_key(key)
  def test_with_tuf(self):
    # Scenario:
    # An attacker tries to trick the client into installing files indicated by
    # a previous release of its corresponding metatadata.  The outdated metadata
    # is properly named and was previously valid, but is no longer current
    # according to the latest 'snapshot.json' role.  Generate a new snapshot of
    # the repository after modifying a target file of 'role1.json'.
    # Backup 'role1.json' (the delegated role to be updated, and then inserted
    # again for the mix-and-match attack.)
    role1_path = os.path.join(self.repository_directory, 'metadata', 'targets',
                                  'role1.json')
    backup_role1 = os.path.join(self.repository_directory, 'role1.json.backup') 
    shutil.copy(role1_path, backup_role1)

    # Backup 'file3.txt', specified by 'role1.json'.
    file3_path = os.path.join(self.repository_directory, 'targets', 'file3.txt')
    shutil.copy(file3_path, file3_path + '.backup')
    
    # Re-generate the required metadata on the remote repository.  The affected
    # metadata must be properly updated and signed with 'repository_tool.py',
    # otherwise the client will reject them as invalid metadata.  The resulting
    # metadata should be valid metadata.
    repository = repo_tool.load_repository(self.repository_directory)

    # Load the signing keys so that newly generated metadata is properly signed.
    timestamp_keyfile = os.path.join(self.keystore_directory, 'timestamp_key') 
    role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key') 
    snapshot_keyfile = os.path.join(self.keystore_directory, 'snapshot_key') 
    timestamp_private = \
      repo_tool.import_rsa_privatekey_from_file(timestamp_keyfile, 'password')
    role1_private = \
      repo_tool.import_rsa_privatekey_from_file(role1_keyfile, 'password')
    snapshot_private = \
      repo_tool.import_rsa_privatekey_from_file(snapshot_keyfile, 'password')

    repository.targets('role1').load_signing_key(role1_private)
    repository.snapshot.load_signing_key(snapshot_private)
    repository.timestamp.load_signing_key(timestamp_private)
  
    # Modify a 'role1.json' target file, and add it to its metadata so that a
    # new version is generated.
    with open(file3_path, 'wt') as file_object:
      file_object.write('This is role2\'s target file.')
    repository.targets('role1').add_target(file3_path)

    repository.write()
    
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))
  
    # Insert the previously valid 'role1.json'.  The TUF client should reject it.
    shutil.move(backup_role1, role1_path)
    
    # Verify that the TUF client detects unexpected metadata (previously valid,
    # but not up-to-date with the latest snapshot of the repository) and refuses
    # to continue the update process.
    # Refresh top-level metadata so that the client is aware of the latest
    # snapshot of the repository.
    self.repository_updater.refresh()

    try:
      self.repository_updater.targets_of_role('targets/role1')
   
    # Verify that the specific 'tuf.BadVersionNumberError' exception is raised
    # by each mirror.
    except tuf.NoWorkingMirrorError as exception:
      for mirror_url, mirror_error in six.iteritems(exception.mirror_errors):
        url_prefix = self.repository_mirrors['mirror1']['url_prefix']
        url_file = os.path.join(url_prefix, 'metadata', 'targets', 'role1.json')
       
        # Verify that 'role1.json' is the culprit.
        self.assertEqual(url_file, mirror_url)
        self.assertTrue(isinstance(mirror_error, tuf.BadVersionNumberError))

    else:
      self.fail('TUF did not prevent a mix-and-match attack.')
Beispiel #10
0
    def test_without_tuf(self):
        # Scenario:
        # 'timestamp.json' specifies the latest version of the repository files.
        # A client should only accept the same version number (specified in the
        # file) of the metadata, or greater.  A version number less than the one
        # currently trusted should be rejected.  A non-TUF client may use a
        # different mechanism for determining versions of metadata, but version
        # numbers in this integrations because that is what TUF uses.
        #
        # Modify the repository's timestamp.json' so that a new version is generated
        # and accepted by the client, and backup the previous version.  The previous
        # is then returned the next time the client requests an update.  A non-TUF
        # client (without a way to detect older versions of metadata, and thus
        # updates) is expected to download older metadata and outdated files.
        # Verify that the older version of timestamp.json' is downloaded by the
        # non-TUF client.

        # Backup the current version of 'timestamp'.  It will be used as the
        # outdated version returned to the client.  The repository tool removes
        # obsolete metadadata, so do *not* save the backup version in the
        # repository's metadata directory.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')
        backup_timestamp = os.path.join(self.repository_directory,
                                        'timestamp.json.backup')
        shutil.copy(timestamp_path, backup_timestamp)

        # The fileinfo of the previous version is saved to verify that it is indeed
        # accepted by the non-TUF client.
        length, hashes = tuf.util.get_file_details(backup_timestamp)
        previous_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Modify the timestamp file on the remote repository.
        repository = repo_tool.load_repository(self.repository_directory)
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)

        # Set an arbitrary expiration so that the repository tool generates a new
        # version.
        repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Save the fileinfo of the new version generated to verify that it is
        # saved by the client.
        length, hashes = tuf.util.get_file_details(timestamp_path)
        new_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        url_prefix = self.repository_mirrors['mirror1']['url_prefix']
        url_file = os.path.join(url_prefix, 'metadata', 'timestamp.json')
        client_timestamp_path = os.path.join(self.client_directory, 'metadata',
                                             'current', 'timestamp.json')

        six.moves.urllib.request.urlretrieve(url_file, client_timestamp_path)

        length, hashes = tuf.util.get_file_details(client_timestamp_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the new version.
        self.assertEqual(download_fileinfo, new_fileinfo)

        # Restore the previous version of 'timestamp.json' on the remote repository
        # and verify that the non-TUF client downloads it (expected, but not ideal).
        shutil.move(backup_timestamp, timestamp_path)

        six.moves.urllib.request.urlretrieve(url_file, client_timestamp_path)

        length, hashes = tuf.util.get_file_details(client_timestamp_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the previous version.
        self.assertEqual(download_fileinfo, previous_fileinfo)
        self.assertNotEqual(download_fileinfo, new_fileinfo)
Beispiel #11
0
    def push_metadata(self,
                      targets_key_password=None,
                      snapshot_key_password=None,
                      timestamp_key_password=None,
                      progress=None):
        """
        Upload local TUF metadata to the repository.

        The TUF metadata consists of a list of targets (which were uploaded by
        :meth:`push_target`), a snapshot of the state of the metadata (list of
        hashes), a timestamp and a list of public keys.

        This function signs the metadata except for the list of public keys,
        so you'll need to supply the password to the respective private keys.

        The list of public keys was signed (along with the rest of the metadata)
        with the root private key when you called :meth:`create_metadata`
        (or :meth:`reset_keys`).

        :param targets_key_password: Password to use for decrypting the TUF targets private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param snapshot_key_password: Password to use for decrypting the TUF snapshot private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param timestamp_key_password: Password to use for decrypting the TUF timestamp private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param progress: Optional function to call as the upload progresses. The function will be called with the hash of the content of the file currently being uploaded, the blob just read from the file and the total size of the file.
        :type progress: function(dgst, chunk, total)
        """
        from tuf.repository_tool import load_repository, \
                                        Repository, \
                                        import_rsa_privatekey_from_file
        # Load repository object
        repository = load_repository(self._master_repo_dir)
        #  pylint: disable=no-member

        # Update targets
        repository.targets.clear_targets()
        repository.targets.add_targets([
            _strip_consistent_target_digest(f) for f in
            Repository.get_filepaths_in_directory(self._master_targets_dir)
        ])

        # Update expirations
        repository.targets.expiration = datetime.now() + self._targets_lifetime
        repository.snapshot.expiration = datetime.now(
        ) + self._snapshot_lifetime
        repository.timestamp.expiration = datetime.now(
        ) + self._timestamp_lifetime

        # Load targets key
        if targets_key_password is None:
            print('importing targets key...')
        private_targets_key = import_rsa_privatekey_from_file(
            self._targets_key_file, targets_key_password)
        repository.targets.load_signing_key(private_targets_key)

        # Load snapshot key
        if snapshot_key_password is None:
            print('importing snapshot key...')
        private_snapshot_key = import_rsa_privatekey_from_file(
            self._snapshot_key_file, snapshot_key_password)
        repository.snapshot.load_signing_key(private_snapshot_key)

        # Load timestamp key
        if timestamp_key_password is None:
            print('importing timestamp key...')
        private_timestamp_key = import_rsa_privatekey_from_file(
            self._timestamp_key_file, timestamp_key_password)
        repository.timestamp.load_signing_key(private_timestamp_key)

        # Update metadata
        repository.write('targets', consistent_snapshot=True)
        repository.write('snapshot', consistent_snapshot=True)
        repository.write('timestamp', consistent_snapshot=True)

        # Upload root.json and timestamp.json without version prefix
        for f in ['root.json', 'timestamp.json']:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)
            self._dxf.set_alias(f, dgst)

        # Upload consistent snapshot versions of current metadata files...
        # first load timestamp.json
        with open(path.join(self._master_staged_dir, 'timestamp.json'),
                  'rb') as f:
            timestamp = json.loads(f.read().decode('utf-8'))
        # get timestamp prefix
        files = ['{}.timestamp.json'.format(timestamp['signed']['version'])]
        # get snapshot prefix
        snapshot_cs = '{}.snapshot.json'.format(
            timestamp['signed']['meta']['snapshot.json']['version'])
        files.append(snapshot_cs)
        # load prefixed snapshot.json
        with open(path.join(self._master_staged_dir, snapshot_cs), 'rb') as f:
            snapshot = json.loads(f.read().decode('utf-8'))
        # get targets and root prefixes
        files.append('{}.targets.json'.format(
            snapshot['signed']['meta']['targets.json']['version']))
        files.append('{}.root.json'.format(
            snapshot['signed']['meta']['root.json']['version']))
        # Upload metadata
        for f in files:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)
            self._dxf.set_alias(f, dgst)
    def setUp(self):
        # We are inheriting from custom class.
        unittest_toolbox.Modified_TestCase.setUp(self)

        # Copy the original repository files provided in the test folder so that
        # any modifications made to repository files are restricted to the copies.
        # The 'repository_data' directory is expected to exist in 'tuf/tests/'.
        original_repository_files = os.path.join(os.getcwd(),
                                                 'repository_data')
        temporary_repository_root = \
          self.make_temp_directory(directory=self.temporary_directory)

        # The original repository, keystore, and client directories will be copied
        # for each test case.
        original_repository = os.path.join(original_repository_files,
                                           'repository')
        original_client = os.path.join(original_repository_files, 'client')
        original_keystore = os.path.join(original_repository_files, 'keystore')

        # Save references to the often-needed client repository directories.
        # Test cases need these references to access metadata and target files.
        self.repository_directory = \
          os.path.join(temporary_repository_root, 'repository')
        self.client_directory = os.path.join(temporary_repository_root,
                                             'client')
        self.keystore_directory = os.path.join(temporary_repository_root,
                                               'keystore')

        # Copy the original 'repository', 'client', and 'keystore' directories
        # to the temporary repository the test cases can use.
        shutil.copytree(original_repository, self.repository_directory)
        shutil.copytree(original_client, self.client_directory)
        shutil.copytree(original_keystore, self.keystore_directory)

        # The slow retrieval server, in mode 2 (1 byte per second), will only
        # sleep for a  total of (target file size) seconds.  Add a target file
        # that contains sufficient number of bytes to trigger a slow retrieval
        # error.  "sufficient number of bytes" assumed to be
        # >> 'tuf.conf.SLOW_START_GRACE_PERIOD' bytes.
        extra_bytes = 8
        total_bytes = tuf.conf.SLOW_START_GRACE_PERIOD + extra_bytes

        repository = repo_tool.load_repository(self.repository_directory)
        file1_filepath = os.path.join(self.repository_directory, 'targets',
                                      'file1.txt')
        with open(file1_filepath, 'wb') as file_object:
            data = 'a' * total_bytes
            file_object.write(data.encode('utf-8'))

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        key_file = os.path.join(self.keystore_directory, 'snapshot_key')
        snapshot_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        key_file = os.path.join(self.keystore_directory, 'targets_key')
        targets_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')

        repository.targets.load_signing_key(targets_private)
        repository.snapshot.load_signing_key(snapshot_private)
        repository.timestamp.load_signing_key(timestamp_private)

        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Set the url prefix required by the 'tuf/client/updater.py' updater.
        # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'.
        repository_basepath = self.repository_directory[len(os.getcwd()):]
        url_prefix = \
          'http://localhost:' + str(self.SERVER_PORT) + repository_basepath

        # Setting 'tuf.conf.repository_directory' with the temporary client
        # directory copied from the original repository files.
        tuf.conf.repository_directory = self.client_directory
        self.repository_mirrors = {
            'mirror1': {
                'url_prefix': url_prefix,
                'metadata_path': 'metadata',
                'targets_path': 'targets',
                'confined_target_dirs': ['']
            }
        }

        # Create the repository instance.  The test cases will use this client
        # updater to refresh metadata, fetch target files, etc.
        self.repository_updater = updater.Updater('test_repository',
                                                  self.repository_mirrors)
Beispiel #13
0
    def push_metadata(self,
                      targets_key_password=None,
                      snapshot_key_password=None,
                      timestamp_key_password=None,
                      progress=None):
        """
        Upload local TUF metadata to the repository.

        The TUF metadata consists of a list of targets (which were uploaded by
        :meth:`push_target`), a snapshot of the state of the metadata (list of
        hashes), a timestamp and a list of public keys.

        This function signs the metadata except for the list of public keys,
        so you'll need to supply the password to the respective private keys.

        The list of public keys was signed (along with the rest of the metadata)
        with the root private key when you called :meth:`create_metadata`
        (or :meth:`reset_keys`).

        :param targets_key_password: Password to use for decrypting the TUF targets private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param snapshot_key_password: Password to use for decrypting the TUF snapshot private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param timestamp_key_password: Password to use for decrypting the TUF timestamp private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param progress: Optional function to call as the upload progresses. The function will be called with the hash of the content of the file currently being uploaded, the blob just read from the file and the total size of the file.
        :type progress: function(dgst, chunk, total)
        """
        from tuf.repository_tool import load_repository, \
                                        Repository, \
                                        import_rsa_privatekey_from_file
        # Load repository object
        repository = load_repository(self._master_repo_dir)
        #  pylint: disable=no-member

        # Update targets
        repository.targets.clear_targets()
        repository.targets.add_targets([
            _strip_consistent_target_digest(f)
            for f in Repository.get_filepaths_in_directory(self._master_targets_dir)])

        # Update expirations
        repository.targets.expiration = datetime.now() + self._targets_lifetime
        repository.snapshot.expiration = datetime.now() + self._snapshot_lifetime
        repository.timestamp.expiration = datetime.now() + self._timestamp_lifetime

        # Load targets key
        if targets_key_password is None:
            print('importing targets key...')
        private_targets_key = import_rsa_privatekey_from_file(
            self._targets_key_file,
            targets_key_password)
        repository.targets.load_signing_key(private_targets_key)

        # Load snapshot key
        if snapshot_key_password is None:
            print('importing snapshot key...')
        private_snapshot_key = import_rsa_privatekey_from_file(
            self._snapshot_key_file,
            snapshot_key_password)
        repository.snapshot.load_signing_key(private_snapshot_key)

        # Load timestamp key
        if timestamp_key_password is None:
            print('importing timestamp key...')
        private_timestamp_key = import_rsa_privatekey_from_file(
            self._timestamp_key_file,
            timestamp_key_password)
        repository.timestamp.load_signing_key(private_timestamp_key)

        # Update metadata
        repository.write(consistent_snapshot=True)

        # Upload root.json and timestamp.json without hash prefix
        for f in ['root.json', 'timestamp.json']:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)
            self._dxf.set_alias(f, dgst)

        # Upload consistent snapshot versions of current metadata files...
        # first load timestamp.json
        with open(path.join(self._master_staged_dir, 'timestamp.json'), 'rb') as f:
            timestamp_data = f.read()
        # hash of content is timestamp prefix
        timestamp_cs = hash_bytes(timestamp_data) + '.timestamp.json'
        files = [timestamp_cs]
        # parse timestamp data
        timestamp = json.loads(timestamp_data.decode('utf-8'))
        # get snapshot prefix
        snapshot_cs = timestamp['signed']['meta']['snapshot.json']['hashes']['sha256'] + '.snapshot.json'
        files.append(snapshot_cs)
        # load prefixed snapshot.json
        with open(path.join(self._master_staged_dir, snapshot_cs), 'rb') as f:
            snapshot_data = f.read()
        # parse snapshot data
        snapshot = json.loads(snapshot_data.decode('utf-8'))
        # get targets and root prefixes
        targets_cs = snapshot['signed']['meta']['targets.json']['hashes']['sha256'] + '.targets.json'
        files.append(targets_cs)
        root_cs = snapshot['signed']['meta']['root.json']['hashes']['sha256'] + '.root.json'
        files.append(root_cs)
        # Upload metadata
        for f in files:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)
  def test_with_tuf(self):
    # Two tests are conducted here.
    #
    # Test 1: If we find that the timestamp acquired from a mirror indicates
    #         that there is no new snapshot file, and our current snapshot
    #         file is expired, is it recognized as such?
    # Test 2: If an expired timestamp is downloaded, is it recognized as such?


    # Test 1 Begin:
    #
    # Addresses this issue: https://github.com/theupdateframework/tuf/issues/322
    #
    # If time has passed and our snapshot or targets role is expired, and
    # the mirror whose timestamp we fetched doesn't indicate the existence of a
    # new snapshot version, we still need to check that it's expired and notify
    # the software update system / application / user. This test creates that
    # scenario. The correct behavior is to raise an exception.
    #
    # Background: Expiration checks (updater._ensure_not_expired) were
    # previously conducted when the metadata file was downloaded. If no new
    # metadata file was downloaded, no expiry check would occur. In particular, 
    # while root was checked for expiration at the beginning of each
    # updater.refresh() cycle, and timestamp was always checked because it was
    # always fetched, snapshot and targets were never checked if the user did 
    # not receive evidence that they had changed. This bug allowed a class of
    # freeze attacks.
    # That bug was fixed and this test tests that fix going forward.

    # Modify the timestamp file on the remote repository.  'timestamp.json'
    # must be properly updated and signed with 'repository_tool.py', otherwise
    # the client will reject it as invalid metadata.

    # Load the repository
    repository = repo_tool.load_repository(self.repository_directory)

    # Load the timestamp and snapshot keys, since we will be signing a new
    # timestamp and a new snapshot file.
    key_file = os.path.join(self.keystore_directory, 'timestamp_key') 
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    repository.timestamp.load_signing_key(timestamp_private)
    key_file = os.path.join(self.keystore_directory, 'snapshot_key') 
    snapshot_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    repository.snapshot.load_signing_key(snapshot_private)

    # Expire snapshot in 8s. This should be far enough into the future that we
    # haven't reached it before the first refresh validates timestamp expiry.
    # We want a successful refresh before expiry, then a second refresh after
    # expiry (which we then expect to raise an exception due to expired
    # metadata).
    expiry_time = time.time() + 8
    datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time))

    repository.snapshot.expiration = datetime_object

    # Now write to the repository.
    repository.write()

    # And move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Refresh metadata on the client. For this refresh, all data is not expired.
    logger.info('Test: Refreshing #1 - Initial metadata refresh occurring.')
    self.repository_updater.refresh()
    logger.info('Test: Refreshed #1 - Initial metadata refresh completed '
                'successfully. Now sleeping until snapshot metadata expires.')

    # Sleep until expiry_time ('repository.snapshot.expiration')
    time.sleep(max(0, expiry_time - time.time()))

    logger.info('Test: Refreshing #2 - Now trying to refresh again after local'
      ' snapshot expiry.')
    try:
      self.repository_updater.refresh() # We expect this to fail!

    except tuf.ExpiredMetadataError:
      logger.info('Test: Refresh #2 - failed as expected. Expired local'
                  ' snapshot case generated a tuf.ExpiredMetadataError'
                  ' exception as expected. Test pass.')
    
    # I think that I only expect tuf.ExpiredMetadata error here. A
    # NoWorkingMirrorError indicates something else in this case - unavailable
    # repo, for example.
    else:
      self.fail('TUF failed to detect expired stale snapshot metadata. Freeze'
        ' attack successful.')




    # Test 2 Begin:
    #
    # 'timestamp.json' specifies the latest version of the repository files.
    # A client should only accept the same version of this file up to a certain
    # point, or else it cannot detect that new files are available for download.
    # Modify the repository's 'timestamp.json' so that it is about to expire,
    # copy it over the to client, wait a moment until it expires, and attempt to
    # re-fetch the same expired version.

    # The same scenario as in test_without_tuf() is followed here, except with
    # a TUF client. The TUF client performs a refresh of top-level metadata,
    # which includes 'timestamp.json', and should detect a freeze attack if
    # the repository serves an outdated 'timestamp.json'.
    
    # Modify the timestamp file on the remote repository.  'timestamp.json'
    # must be properly updated and signed with 'repository_tool.py', otherwise
    # the client will reject it as invalid metadata.  The resulting
    # 'timestamp.json' should be valid metadata, but expired (as intended).
    repository = repo_tool.load_repository(self.repository_directory)
 
    key_file = os.path.join(self.keystore_directory, 'timestamp_key')
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')

    repository.timestamp.load_signing_key(timestamp_private)
    
    # Set timestamp metadata to expire soon.
    # We cannot set the timestamp expiration with
    # 'repository.timestamp.expiration = ...' with already-expired timestamp
    # metadata because of consistency checks that occur during that assignment.
    expiry_time = time.time() + 1
    datetime_object = tuf.formats.unix_timestamp_to_datetime(int(expiry_time))
    repository.timestamp.expiration = datetime_object
    repository.write()
    
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))

    # Wait just long enough for the timestamp metadata (which is now both on
    # the repository and on the client) to expire.
    time.sleep(max(0, expiry_time - time.time()))

    # Try to refresh top-level metadata on the client. Since we're already past
    # 'repository.timestamp.expiration', the TUF client is expected to detect
    # that timestamp metadata is outdated and refuse to continue the update
    # process.
    try:
      self.repository_updater.refresh() # We expect NoWorkingMirrorError.
    
    except tuf.NoWorkingMirrorError as e:
      # NoWorkingMirrorError indicates that we did not find valid, unexpired
      # metadata at any mirror. That exception class preserves the errors from
      # each mirror. We now assert that for each mirror, the particular error
      # detected was that metadata was expired (the timestamp we manually
      # expired).
      for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
        self.assertTrue(isinstance(mirror_error, tuf.ExpiredMetadataError))
    
    else:
      self.fail('TUF failed to detect expired, stale timestamp metadata.'
        ' Freeze attack successful.')
  def test_root_role_versioning(self):
    # Test root role versioning
    #
    # 1. Import public and private keys.
    # 2. Add verification keys.
    # 3. Load signing keys.
    # 4. Add target files.
    # 5. Perform delegation.
    # 6. writeall()
    #
    # Copy the target files from 'tuf/tests/repository_data' so that writeall()
    # has target fileinfo to include in metadata.
    temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
    targets_directory = os.path.join(temporary_directory, 'repository',
                                     repo_tool.TARGETS_DIRECTORY_NAME)
    original_targets_directory = os.path.join('repository_data',
                                              'repository', 'targets')
    shutil.copytree(original_targets_directory, targets_directory)

    # In this case, create_new_repository() creates the 'repository/'
    # sub-directory in 'temporary_directory' if it does not exist.
    repository_directory = os.path.join(temporary_directory, 'repository')
    metadata_directory = os.path.join(repository_directory,
                                      repo_tool.METADATA_STAGED_DIRECTORY_NAME)
    repository = repo_tool.create_new_repository(repository_directory)




    # (1) Load the public and private keys of the top-level roles, and one
    # delegated role.
    keystore_directory = os.path.join('repository_data', 'keystore')

    # Load the public keys.
    root_pubkey_path = os.path.join(keystore_directory, 'root_key.pub')
    targets_pubkey_path = os.path.join(keystore_directory, 'targets_key.pub')
    snapshot_pubkey_path = os.path.join(keystore_directory, 'snapshot_key.pub')
    timestamp_pubkey_path = os.path.join(keystore_directory, 'timestamp_key.pub')
    role1_pubkey_path = os.path.join(keystore_directory, 'delegation_key.pub')

    root_pubkey = repo_tool.import_rsa_publickey_from_file(root_pubkey_path)
    targets_pubkey = repo_tool.import_ed25519_publickey_from_file(targets_pubkey_path)
    snapshot_pubkey = \
      repo_tool.import_ed25519_publickey_from_file(snapshot_pubkey_path)
    timestamp_pubkey = \
      repo_tool.import_ed25519_publickey_from_file(timestamp_pubkey_path)
    role1_pubkey = repo_tool.import_ed25519_publickey_from_file(role1_pubkey_path)

    # Load the private keys.
    root_privkey_path = os.path.join(keystore_directory, 'root_key')
    targets_privkey_path = os.path.join(keystore_directory, 'targets_key')
    snapshot_privkey_path = os.path.join(keystore_directory, 'snapshot_key')
    timestamp_privkey_path = os.path.join(keystore_directory, 'timestamp_key')
    role1_privkey_path = os.path.join(keystore_directory, 'delegation_key')

    root_privkey = \
      repo_tool.import_rsa_privatekey_from_file(root_privkey_path, 'password')
    targets_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(targets_privkey_path, 'password')
    snapshot_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(snapshot_privkey_path,
                                                'password')
    timestamp_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(timestamp_privkey_path,
                                                'password')
    role1_privkey = \
      repo_tool.import_ed25519_privatekey_from_file(role1_privkey_path,
                                                'password')


    # (2) Add top-level verification keys.
    repository.root.add_verification_key(root_pubkey)
    repository.targets.add_verification_key(targets_pubkey)
    repository.snapshot.add_verification_key(snapshot_pubkey)
    repository.timestamp.add_verification_key(timestamp_pubkey)


    # (3) Load top-level signing keys.
    repository.root.load_signing_key(root_privkey)
    repository.targets.load_signing_key(targets_privkey)
    repository.snapshot.load_signing_key(snapshot_privkey)
    repository.timestamp.load_signing_key(timestamp_privkey)

    # (4) Add target files.
    target1 = 'file1.txt'
    target2 = 'file2.txt'
    target3 = 'file3.txt'
    repository.targets.add_target(target1)
    repository.targets.add_target(target2)


    # (5) Perform delegation.
    repository.targets.delegate('role1', [role1_pubkey], [target3])
    repository.targets('role1').load_signing_key(role1_privkey)

    # (6) Write repository.
    repository.writeall()

    self.assertTrue(os.path.exists(os.path.join(metadata_directory, 'root.json')))
    self.assertTrue(os.path.exists(os.path.join(metadata_directory, '1.root.json')))


    # Verify that the expected metadata is written.
    root_filepath = os.path.join(metadata_directory, 'root.json')
    root_1_filepath = os.path.join(metadata_directory, '1.root.json')
    root_2_filepath = os.path.join(metadata_directory, '2.root.json')
    old_root_signable = securesystemslib.util.load_json_file(root_filepath)
    root_1_signable = securesystemslib.util.load_json_file(root_1_filepath)

    # Make a change to the root keys
    repository.root.add_verification_key(targets_pubkey)
    repository.root.load_signing_key(targets_privkey)
    repository.root.threshold = 2
    repository.writeall()

    new_root_signable = securesystemslib.util.load_json_file(root_filepath)
    root_2_signable = securesystemslib.util.load_json_file(root_2_filepath)

    for role_signable in [old_root_signable, new_root_signable, root_1_signable, root_2_signable]:
      # Raise 'securesystemslib.exceptions.FormatError' if 'role_signable' is an
      # invalid signable.
      tuf.formats.check_signable_object_format(role_signable)

    # Verify contents of versioned roots
    self.assertEqual(old_root_signable, root_1_signable)
    self.assertEqual(new_root_signable, root_2_signable)

    self.assertEqual(root_1_signable['signed']['version'], 1)
    self.assertEqual(root_2_signable['signed']['version'], 2)

    repository.root.remove_verification_key(root_pubkey)
    repository.root.unload_signing_key(root_privkey)
    repository.root.threshold = 2

    # Errors, not enough signing keys to satisfy old threshold
    self.assertRaises(tuf.exceptions.UnsignedMetadataError, repository.writeall)

    # No error, write() ignore's root's threshold and allows it to be written
    # to disk partially signed.
    repository.write('root')
Beispiel #16
0
def rotate_key(parsed_arguments):
    registry = repo_tool.load_repository(REPO_DIR)

    role = parsed_arguments.rotate[0]
    old_keypath = parsed_arguments.rotate[1]
    new_keypath = parsed_arguments.rotate[2]

    key_metadata = securesystemslib.util.load_json_file(old_keypath)
    old_keyobject, junk = securesystemslib.keys.format_metadata_to_key(
        key_metadata)

    key_metadata = securesystemslib.util.load_json_file(new_keypath)
    new_keyobject, junk = securesystemslib.keys.format_metadata_to_key(
        key_metadata)

    if role == 'root':
        registry.root.remove_verification_key(old_keyobject)
        registry.root.add_verification_key(new_keyobject)

    elif role == 'targets':
        registry.targets.remove_verification_key(old_keyobject)
        registry.targets.add_verification_key(new_keyobject)
        TARGETS_KEY_NAME = os.path.basename(new_keypath[:-len('.pub')])

    elif role == 'snapshot':
        registry.snapshot.remove_verification_key(old_keyobject)
        registry.snapshot.add_verification_key(new_keyobject)
        SNAPSHOT_KEY_NAME = os.path.basename(new_keypath[:-len('.pub')])

    elif role == 'timestamp':
        registry.timestamp.remove_verification_key(old_keyobject)
        registry.timestamp.add_verification_key(new_keyobject)
        TIMESTAMP_KEY_NAME = os.path.basename(new_keypath[:-len('.pub')])
        print(TIMESTAMP_KEY_NAME)

    else:
        registry.targets(role)._parent_targets_object.remove_verification_key(
            old_keyobject)
        registry.targets(role)._parent_targets_object.add_verification_key(
            new_keyobject)

    if role in ('root', 'targets', 'snapshot', 'timestamp'):
        root_keypath = os.path.join(KEYSTORE_DIR, ROOT_KEY_NAME)
        password = securesystemslib.interface.get_password(
            'Enter a password for'
            ' the encrypted key (' + repr(root_keypath) + '): ',
            confirm=False)
        root_private = repo_tool.import_rsa_privatekey_from_file(
            "keystore/root_key", password=password)
        root_keypath = os.path.join(KEYSTORE_DIR, ROOT2_KEY_NAME)
        password = securesystemslib.interface.get_password(
            'Enter a password for'
            ' the encrypted key (' + repr(root_keypath) + '): ',
            confirm=False)

        root_private2 = repo_tool.import_rsa_privatekey_from_file(
            "keystore/root_key2", password=password)

        registry.root.load_signing_key(root_private)
        registry.root.load_signing_key(root_private2)

        registry.write('root', increment_version_number=True)
Beispiel #17
0
    def test_with_tuf(self):
        # Scenario:
        # An attacker tries to trick the client into installing files indicated by
        # a previous release of its corresponding metatadata.  The outdated metadata
        # is properly named and was previously valid, but is no longer current
        # according to the latest 'snapshot.json' role.  Generate a new snapshot of
        # the repository after modifying a target file of 'role1.json'.
        # Backup 'role1.json' (the delegated role to be updated, and then inserted
        # again for the mix-and-match attack.)
        role1_path = os.path.join(self.repository_directory, 'metadata',
                                  'targets', 'role1.json')
        backup_role1 = os.path.join(self.repository_directory,
                                    'role1.json.backup')
        shutil.copy(role1_path, backup_role1)

        # Backup 'file3.txt', specified by 'role1.json'.
        file3_path = os.path.join(self.repository_directory, 'targets',
                                  'file3.txt')
        shutil.copy(file3_path, file3_path + '.backup')

        # Re-generate the required metadata on the remote repository.  The affected
        # metadata must be properly updated and signed with 'repository_tool.py',
        # otherwise the client will reject them as invalid metadata.  The resulting
        # metadata should be valid metadata.
        repository = repo_tool.load_repository(self.repository_directory)

        # Load the signing keys so that newly generated metadata is properly signed.
        timestamp_keyfile = os.path.join(self.keystore_directory,
                                         'timestamp_key')
        role1_keyfile = os.path.join(self.keystore_directory, 'delegation_key')
        snapshot_keyfile = os.path.join(self.keystore_directory,
                                        'snapshot_key')
        timestamp_private = \
          repo_tool.import_rsa_privatekey_from_file(timestamp_keyfile, 'password')
        role1_private = \
          repo_tool.import_rsa_privatekey_from_file(role1_keyfile, 'password')
        snapshot_private = \
          repo_tool.import_rsa_privatekey_from_file(snapshot_keyfile, 'password')

        repository.targets('role1').load_signing_key(role1_private)
        repository.snapshot.load_signing_key(snapshot_private)
        repository.timestamp.load_signing_key(timestamp_private)

        # Modify a 'role1.json' target file, and add it to its metadata so that a
        # new version is generated.
        with open(file3_path, 'wt') as file_object:
            file_object.write('This is role2\'s target file.')
        repository.targets('role1').add_target(file3_path)

        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Insert the previously valid 'role1.json'.  The TUF client should reject it.
        shutil.move(backup_role1, role1_path)

        # Verify that the TUF client detects unexpected metadata (previously valid,
        # but not up-to-date with the latest snapshot of the repository) and refuses
        # to continue the update process.
        # Refresh top-level metadata so that the client is aware of the latest
        # snapshot of the repository.
        self.repository_updater.refresh()

        try:
            self.repository_updater.targets_of_role('targets/role1')

        # Verify that the specific 'tuf.BadHashError' exception is raised by each
        # mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata', 'targets',
                                        'role1.json')

                # Verify that 'timestamp.json' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(isinstance(mirror_error, tuf.BadHashError))

        else:
            self.fail('TUF did not prevent a mix-and-match attack.')
    def test_with_tuf(self):
        # Two tests are conducted here.
        #
        # Test 1: If we find that the timestamp acquired from a mirror indicates
        #         that there is no new snapshot file, and our current snapshot
        #         file is expired, is it recognized as such?
        # Test 2: If an expired timestamp is downloaded, is it recognized as such?

        # Test 1 Begin:
        #
        # Addresses this issue: https://github.com/theupdateframework/tuf/issues/322
        #
        # If time has passed and our snapshot or targets role is expired, and
        # the mirror whose timestamp we fetched doesn't indicate the existence of a
        # new snapshot version, we still need to check that it's expired and notify
        # the software update system / application / user. This test creates that
        # scenario. The correct behavior is to raise an exception.
        #
        # Background: Expiration checks (updater._ensure_not_expired) were
        # previously conducted when the metadata file was downloaded. If no new
        # metadata file was downloaded, no expiry check would occur. In particular,
        # while root was checked for expiration at the beginning of each
        # updater.refresh() cycle, and timestamp was always checked because it was
        # always fetched, snapshot and targets were never checked if the user did
        # not receive evidence that they had changed. This bug allowed a class of
        # freeze attacks.
        # That bug was fixed and this test tests that fix going forward.

        # Modify the timestamp file on the remote repository.  'timestamp.json'
        # must be properly updated and signed with 'repository_tool.py', otherwise
        # the client will reject it as invalid metadata.

        # Load the repository
        repository = repo_tool.load_repository(self.repository_directory)

        # Load the timestamp and snapshot keys, since we will be signing a new
        # timestamp and a new snapshot file.
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)
        key_file = os.path.join(self.keystore_directory, 'snapshot_key')
        snapshot_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        repository.snapshot.load_signing_key(snapshot_private)

        # Expire snapshot in 8s. This should be far enough into the future that we
        # haven't reached it before the first refresh validates timestamp expiry.
        # We want a successful refresh before expiry, then a second refresh after
        # expiry (which we then expect to raise an exception due to expired
        # metadata).
        expiry_time = time.time() + 8
        datetime_object = tuf.formats.unix_timestamp_to_datetime(
            int(expiry_time))

        repository.snapshot.expiration = datetime_object

        # Now write to the repository.
        repository.write()

        # And move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Refresh metadata on the client. For this refresh, all data is not expired.
        logger.info(
            'Test: Refreshing #1 - Initial metadata refresh occurring.')
        self.repository_updater.refresh()
        logger.info(
            'Test: Refreshed #1 - Initial metadata refresh completed '
            'successfully. Now sleeping until snapshot metadata expires.')

        # Sleep until expiry_time ('repository.snapshot.expiration')
        time.sleep(max(0, expiry_time - time.time()))

        logger.info(
            'Test: Refreshing #2 - Now trying to refresh again after local'
            ' snapshot expiry.')
        try:
            self.repository_updater.refresh()  # We expect this to fail!

        except tuf.ExpiredMetadataError:
            logger.info('Test: Refresh #2 - failed as expected. Expired local'
                        ' snapshot case generated a tuf.ExpiredMetadataError'
                        ' exception as expected. Test pass.')

        # I think that I only expect tuf.ExpiredMetadata error here. A
        # NoWorkingMirrorError indicates something else in this case - unavailable
        # repo, for example.
        else:
            self.fail(
                'TUF failed to detect expired stale snapshot metadata. Freeze'
                ' attack successful.')

        # Test 2 Begin:
        #
        # 'timestamp.json' specifies the latest version of the repository files.
        # A client should only accept the same version of this file up to a certain
        # point, or else it cannot detect that new files are available for download.
        # Modify the repository's 'timestamp.json' so that it is about to expire,
        # copy it over the to client, wait a moment until it expires, and attempt to
        # re-fetch the same expired version.

        # The same scenario as in test_without_tuf() is followed here, except with
        # a TUF client. The TUF client performs a refresh of top-level metadata,
        # which includes 'timestamp.json', and should detect a freeze attack if
        # the repository serves an outdated 'timestamp.json'.

        # Modify the timestamp file on the remote repository.  'timestamp.json'
        # must be properly updated and signed with 'repository_tool.py', otherwise
        # the client will reject it as invalid metadata.  The resulting
        # 'timestamp.json' should be valid metadata, but expired (as intended).
        repository = repo_tool.load_repository(self.repository_directory)

        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')

        repository.timestamp.load_signing_key(timestamp_private)

        # Set timestamp metadata to expire soon.
        # We cannot set the timestamp expiration with
        # 'repository.timestamp.expiration = ...' with already-expired timestamp
        # metadata because of consistency checks that occur during that assignment.
        expiry_time = time.time() + 1
        datetime_object = tuf.formats.unix_timestamp_to_datetime(
            int(expiry_time))
        repository.timestamp.expiration = datetime_object
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Wait just long enough for the timestamp metadata (which is now both on
        # the repository and on the client) to expire.
        time.sleep(max(0, expiry_time - time.time()))

        # Try to refresh top-level metadata on the client. Since we're already past
        # 'repository.timestamp.expiration', the TUF client is expected to detect
        # that timestamp metadata is outdated and refuse to continue the update
        # process.
        try:
            self.repository_updater.refresh(
            )  # We expect NoWorkingMirrorError.

        except tuf.NoWorkingMirrorError as e:
            # NoWorkingMirrorError indicates that we did not find valid, unexpired
            # metadata at any mirror. That exception class preserves the errors from
            # each mirror. We now assert that for each mirror, the particular error
            # detected was that metadata was expired (the timestamp we manually
            # expired).
            for mirror_url, mirror_error in six.iteritems(e.mirror_errors):
                self.assertTrue(
                    isinstance(mirror_error, tuf.ExpiredMetadataError))

        else:
            self.fail('TUF failed to detect expired, stale timestamp metadata.'
                      ' Freeze attack successful.')
Beispiel #19
0
    def test_with_tuf(self):
        # The same scenario outlined in test_without_tuf() is followed here, except
        # with a TUF client (scenario description provided in the opening comment
        # block of that test case.) The TUF client performs a refresh of top-level
        # metadata, which also includes 'timestamp.json'.

        # Backup the current version of 'timestamp'.  It will be used as the
        # outdated version returned to the client.  The repository tool removes
        # obsolete metadadata, so do *not* save the backup version in the
        # repository's metadata directory.
        timestamp_path = os.path.join(self.repository_directory, 'metadata',
                                      'timestamp.json')
        backup_timestamp = os.path.join(self.repository_directory,
                                        'timestamp.json.backup')
        shutil.copy(timestamp_path, backup_timestamp)

        # The fileinfo of the previous version is saved to verify that it is indeed
        # accepted by the non-TUF client.
        length, hashes = tuf.util.get_file_details(backup_timestamp)
        previous_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Modify the timestamp file on the remote repository.
        repository = repo_tool.load_repository(self.repository_directory)
        key_file = os.path.join(self.keystore_directory, 'timestamp_key')
        timestamp_private = repo_tool.import_rsa_privatekey_from_file(
            key_file, 'password')
        repository.timestamp.load_signing_key(timestamp_private)

        # Set an arbitrary expiration so that the repository tool generates a new
        # version.
        repository.timestamp.expiration = datetime.datetime(2030, 1, 1, 12, 12)
        repository.write()

        # Move the staged metadata to the "live" metadata.
        shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
        shutil.copytree(
            os.path.join(self.repository_directory, 'metadata.staged'),
            os.path.join(self.repository_directory, 'metadata'))

        # Save the fileinfo of the new version generated to verify that it is
        # saved by the client.
        length, hashes = tuf.util.get_file_details(timestamp_path)
        new_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Refresh top-level metadata, including 'timestamp.json'.  Installation of
        # new version of 'timestamp.json' is expected.
        self.repository_updater.refresh()

        client_timestamp_path = os.path.join(self.client_directory, 'metadata',
                                             'current', 'timestamp.json')
        length, hashes = tuf.util.get_file_details(client_timestamp_path)
        download_fileinfo = tuf.formats.make_fileinfo(length, hashes)

        # Verify 'download_fileinfo' is equal to the new version.
        self.assertEqual(download_fileinfo, new_fileinfo)

        # Restore the previous version of 'timestamp.json' on the remote repository
        # and verify that the non-TUF client downloads it (expected, but not ideal).
        shutil.move(backup_timestamp, timestamp_path)
        logger.info('Moving the timestamp.json backup to the current version.')

        # Verify that the TUF client detects replayed metadata and refuses to
        # continue the update process.
        try:
            self.repository_updater.refresh()

        # Verify that the specific 'tuf.ReplayedMetadataError' is raised by each
        # mirror.
        except tuf.NoWorkingMirrorError as exception:
            for mirror_url, mirror_error in six.iteritems(
                    exception.mirror_errors):
                url_prefix = self.repository_mirrors['mirror1']['url_prefix']
                url_file = os.path.join(url_prefix, 'metadata',
                                        'timestamp.json')

                # Verify that 'timestamp.json' is the culprit.
                self.assertEqual(url_file, mirror_url)
                self.assertTrue(
                    isinstance(mirror_error, tuf.ReplayedMetadataError))

        else:
            self.fail('TUF did not prevent a replay attack.')
  def setUp(self):
    # We are inheriting from custom class.
    unittest_toolbox.Modified_TestCase.setUp(self)
  
    # Copy the original repository files provided in the test folder so that
    # any modifications made to repository files are restricted to the copies.
    # The 'repository_data' directory is expected to exist in 'tuf/tests/'.
    original_repository_files = os.path.join(os.getcwd(), 'repository_data') 
    temporary_repository_root = \
      self.make_temp_directory(directory=self.temporary_directory)
  
    # The original repository, keystore, and client directories will be copied
    # for each test case. 
    original_repository = os.path.join(original_repository_files, 'repository')
    original_client = os.path.join(original_repository_files, 'client')
    original_keystore = os.path.join(original_repository_files, 'keystore')
    
    # Save references to the often-needed client repository directories.
    # Test cases need these references to access metadata and target files. 
    self.repository_directory = \
      os.path.join(temporary_repository_root, 'repository')
    self.client_directory = os.path.join(temporary_repository_root, 'client')
    self.keystore_directory = os.path.join(temporary_repository_root, 'keystore')
    
    # Copy the original 'repository', 'client', and 'keystore' directories
    # to the temporary repository the test cases can use.
    shutil.copytree(original_repository, self.repository_directory)
    shutil.copytree(original_client, self.client_directory)
    shutil.copytree(original_keystore, self.keystore_directory)
    
    # The slow retrieval server, in mode 2 (1 byte per second), will only
    # sleep for a  total of (target file size) seconds.  Add a target file
    # that contains sufficient number of bytes to trigger a slow retrieval
    # error.  "sufficient number of bytes" assumed to be
    # >> 'tuf.conf.SLOW_START_GRACE_PERIOD' bytes.
    extra_bytes = 8
    total_bytes = tuf.conf.SLOW_START_GRACE_PERIOD + extra_bytes 

    repository = repo_tool.load_repository(self.repository_directory)
    file1_filepath = os.path.join(self.repository_directory, 'targets',
                                  'file1.txt')
    with open(file1_filepath, 'wb') as file_object:
      data = 'a' * total_bytes
      file_object.write(data.encode('utf-8'))

    key_file = os.path.join(self.keystore_directory, 'timestamp_key') 
    timestamp_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    key_file = os.path.join(self.keystore_directory, 'snapshot_key') 
    snapshot_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')
    key_file = os.path.join(self.keystore_directory, 'targets_key') 
    targets_private = repo_tool.import_rsa_privatekey_from_file(key_file,
                                                                  'password')

    repository.targets.load_signing_key(targets_private)
    repository.snapshot.load_signing_key(snapshot_private)
    repository.timestamp.load_signing_key(timestamp_private)
    
    repository.write()
    
    # Move the staged metadata to the "live" metadata.
    shutil.rmtree(os.path.join(self.repository_directory, 'metadata'))
    shutil.copytree(os.path.join(self.repository_directory, 'metadata.staged'),
                    os.path.join(self.repository_directory, 'metadata'))
    
    # Set the url prefix required by the 'tuf/client/updater.py' updater.
    # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. 
    repository_basepath = self.repository_directory[len(os.getcwd()):]
    url_prefix = \
      'http://localhost:' + str(self.SERVER_PORT) + repository_basepath 
    
    # Setting 'tuf.conf.repository_directory' with the temporary client
    # directory copied from the original repository files.
    tuf.conf.repository_directory = self.client_directory 
    self.repository_mirrors = {'mirror1': {'url_prefix': url_prefix,
                                           'metadata_path': 'metadata',
                                           'targets_path': 'targets',
                                           'confined_target_dirs': ['']}}

    # Create the repository instance.  The test cases will use this client
    # updater to refresh metadata, fetch target files, etc.
    self.repository_updater = updater.Updater('test_repository',
                                              self.repository_mirrors)