def test_write_metadata_file(self):
        # Test normal case.
        temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
        metadata_directory = os.path.join('repository_data', 'repository',
                                          'metadata')
        root_filename = os.path.join(metadata_directory, 'root.json')
        root_signable = tuf.util.load_json_file(root_filename)

        output_filename = os.path.join(temporary_directory, 'root.json')
        compression_algorithms = ['gz']
        version_number = root_signable['signed']['version'] + 1

        self.assertFalse(os.path.exists(output_filename))
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     compression_algorithms,
                                     consistent_snapshot=False)
        self.assertTrue(os.path.exists(output_filename))
        self.assertTrue(os.path.exists(output_filename + '.gz'))

        # Test improperly formatted arguments.
        self.assertRaises(tuf.FormatError, repo_lib.write_metadata_file, 3,
                          output_filename, version_number,
                          compression_algorithms, False)
        self.assertRaises(tuf.FormatError, repo_lib.write_metadata_file,
                          root_signable, 3, version_number,
                          compression_algorithms, False)
        self.assertRaises(tuf.FormatError, repo_lib.write_metadata_file,
                          root_signable, output_filename, '3',
                          compression_algorithms, False)
        self.assertRaises(tuf.FormatError, repo_lib.write_metadata_file,
                          root_signable, output_filename, version_number,
                          compression_algorithms, 3)
Esempio n. 2
0
  def test_write_metadata_file(self):
    # Test normal case.
    temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
    metadata_directory = os.path.join('repository_data', 'repository', 'metadata')
    root_filename = os.path.join(metadata_directory, 'root.json')
    root_signable = securesystemslib.util.load_json_file(root_filename)

    output_filename = os.path.join(temporary_directory, 'root.json')
    version_number = root_signable['signed']['version'] + 1

    self.assertFalse(os.path.exists(output_filename))
    storage_backend = securesystemslib.storage.FilesystemBackend()
    repo_lib.write_metadata_file(root_signable, output_filename, version_number,
        consistent_snapshot=False, storage_backend=storage_backend)
    self.assertTrue(os.path.exists(output_filename))

    # Attempt to over-write the previously written metadata file.  An exception
    # is not raised in this case, only a debug message is logged.
    repo_lib.write_metadata_file(root_signable, output_filename, version_number,
        consistent_snapshot=False, storage_backend=storage_backend)

    # Test improperly formatted arguments.
    self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
        3, output_filename, version_number, False, storage_backend)
    self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
        root_signable, 3, version_number, False, storage_backend)
    self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
        root_signable, output_filename, '3', False, storage_backend)
    self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.write_metadata_file,
        root_signable, output_filename, storage_backend, version_number, 3)
Esempio n. 3
0
  def test__load_top_level_metadata(self):
    repository_name = 'test_repository'

    temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
    repository_directory = os.path.join(temporary_directory, 'repository')
    metadata_directory = os.path.join(repository_directory,
        repo_lib.METADATA_STAGED_DIRECTORY_NAME)
    targets_directory = os.path.join(repository_directory,
        repo_lib.TARGETS_DIRECTORY_NAME)
    shutil.copytree(os.path.join('repository_data', 'repository', 'metadata'),
        metadata_directory)
    shutil.copytree(os.path.join('repository_data', 'repository', 'targets'),
        targets_directory)

    # Add a duplicate signature to the Root file for testing purposes).
    root_file = os.path.join(metadata_directory, 'root.json')
    signable = securesystemslib.util.load_json_file(os.path.join(metadata_directory, 'root.json'))
    signable['signatures'].append(signable['signatures'][0])

    repo_lib.write_metadata_file(signable, root_file, 8, False)

    # Attempt to load a repository that contains a compressed Root file.
    repository = repo_tool.create_new_repository(repository_directory, repository_name)
    filenames = repo_lib.get_metadata_filenames(metadata_directory)
    repo_lib._load_top_level_metadata(repository, filenames, repository_name)

    filenames = repo_lib.get_metadata_filenames(metadata_directory)
    repository = repo_tool.create_new_repository(repository_directory, repository_name)
    repo_lib._load_top_level_metadata(repository, filenames, repository_name)

    # Partially write all top-level roles (we increase the threshold of each
    # top-level role so that they are flagged as partially written.
    repository.root.threshold = repository.root.threshold + 1
    repository.snapshot.threshold = repository.snapshot.threshold + 1
    repository.targets.threshold = repository.targets.threshold + 1
    repository.timestamp.threshold = repository.timestamp.threshold + 1
    repository.write('root', )
    repository.write('snapshot')
    repository.write('targets')
    repository.write('timestamp')

    repo_lib._load_top_level_metadata(repository, filenames, repository_name)

    # Attempt to load a repository with missing top-level metadata.
    for role_file in os.listdir(metadata_directory):
      if role_file.endswith('.json') and not role_file.startswith('root'):
        role_filename = os.path.join(metadata_directory, role_file)
        os.remove(role_filename)
    repo_lib._load_top_level_metadata(repository, filenames, repository_name)

    # Remove the required Root file and verify that an exception is raised.
    os.remove(os.path.join(metadata_directory, 'root.json'))
    self.assertRaises(tuf.exceptions.RepositoryError,
        repo_lib._load_top_level_metadata, repository, filenames,
        repository_name)
    def test_write_metadata_file(self):
        # Test normal case.
        temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
        metadata_directory = os.path.join("repository_data", "repository", "metadata")
        root_filename = os.path.join(metadata_directory, "root.json")
        root_signable = tuf.util.load_json_file(root_filename)

        output_filename = os.path.join(temporary_directory, "root.json")
        compression_algorithms = ["gz"]
        version_number = root_signable["signed"]["version"] + 1

        self.assertFalse(os.path.exists(output_filename))
        repo_lib.write_metadata_file(
            root_signable, output_filename, version_number, compression_algorithms, consistent_snapshot=False
        )
        self.assertTrue(os.path.exists(output_filename))
        self.assertTrue(os.path.exists(output_filename + ".gz"))

        # Test improperly formatted arguments.
        self.assertRaises(
            tuf.FormatError,
            repo_lib.write_metadata_file,
            3,
            output_filename,
            version_number,
            compression_algorithms,
            False,
        )
        self.assertRaises(
            tuf.FormatError,
            repo_lib.write_metadata_file,
            root_signable,
            3,
            version_number,
            compression_algorithms,
            False,
        )
        self.assertRaises(
            tuf.FormatError,
            repo_lib.write_metadata_file,
            root_signable,
            output_filename,
            "3",
            compression_algorithms,
            False,
        )
        self.assertRaises(
            tuf.FormatError,
            repo_lib.write_metadata_file,
            root_signable,
            output_filename,
            version_number,
            compression_algorithms,
            3,
        )
Esempio n. 5
0
    def test_write_metadata_file(self):
        # Test normal case.
        temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory)
        metadata_directory = os.path.join('repository_data', 'repository',
                                          'metadata')
        root_filename = os.path.join(metadata_directory, 'root.json')
        root_signable = securesystemslib.util.load_json_file(root_filename)

        output_filename = os.path.join(temporary_directory, 'root.json')
        version_number = root_signable['signed']['version'] + 1

        self.assertFalse(os.path.exists(output_filename))
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     consistent_snapshot=False)
        self.assertTrue(os.path.exists(output_filename))

        # Attempt to over-write the previously written metadata file.  An exception
        # is not raised in this case, only a debug message is logged.
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     consistent_snapshot=False)

        # Try to write a consistent metadate file. An exception is not raised in
        # this case.  For testing purposes, root.json should be a hard link to the
        # consistent metadata file.  We should verify that root.json points to
        # the latest consistent files.
        tuf.settings.CONSISTENT_METHOD = 'hard_link'
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     consistent_snapshot=True)

        # Test if the consistent files are properly named
        # Filename format of a consistent file: <version number>.rolename.json
        version_and_filename = str(version_number) + '.' + 'root.json'
        first_version_output_file = os.path.join(temporary_directory,
                                                 version_and_filename)
        self.assertTrue(os.path.exists(first_version_output_file))

        # Verify that the consistent file content is equal to 'output_filename'.
        self.assertEqual(
            securesystemslib.util.get_file_details(output_filename),
            securesystemslib.util.get_file_details(first_version_output_file))

        # Try to add more consistent metadata files.
        version_number += 1
        root_signable['signed']['version'] = version_number
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     consistent_snapshot=True)

        # Test if the latest root.json points to the expected consistent file
        # and consistent metadata do not all point to the same root.json
        version_and_filename = str(version_number) + '.' + 'root.json'
        second_version_output_file = os.path.join(temporary_directory,
                                                  version_and_filename)
        self.assertTrue(os.path.exists(second_version_output_file))

        # Verify that the second version is equal to the second output file, and
        # that the second output filename differs from the first.
        self.assertEqual(
            securesystemslib.util.get_file_details(output_filename),
            securesystemslib.util.get_file_details(second_version_output_file))
        self.assertNotEqual(
            securesystemslib.util.get_file_details(output_filename),
            securesystemslib.util.get_file_details(first_version_output_file))

        # Test for an improper settings.CONSISTENT_METHOD string value.
        tuf.settings.CONSISTENT_METHOD = 'somebadidea'

        # Test for invalid consistent methods on systems other than Windows,
        # which always uses the copy method.
        if platform.system() == 'Windows':
            pass

        else:
            self.assertRaises(
                securesystemslib.exceptions.InvalidConfigurationError,
                repo_lib.write_metadata_file,
                root_signable,
                output_filename,
                version_number,
                consistent_snapshot=True)

        # Try to create a link to root.json when root.json doesn't exist locally.
        # repository_lib should log a message if this is the case.
        tuf.settings.CONSISTENT_METHOD = 'hard_link'
        os.remove(output_filename)
        repo_lib.write_metadata_file(root_signable,
                                     output_filename,
                                     version_number,
                                     consistent_snapshot=True)

        # Reset CONSISTENT_METHOD so that subsequent tests work as expected.
        tuf.settings.CONSISTENT_METHOD = 'copy'

        # Test improperly formatted arguments.
        self.assertRaises(securesystemslib.exceptions.FormatError,
                          repo_lib.write_metadata_file, 3, output_filename,
                          version_number, False)
        self.assertRaises(securesystemslib.exceptions.FormatError,
                          repo_lib.write_metadata_file, root_signable, 3,
                          version_number, False)
        self.assertRaises(securesystemslib.exceptions.FormatError,
                          repo_lib.write_metadata_file, root_signable,
                          output_filename, '3', False)
        self.assertRaises(securesystemslib.exceptions.FormatError,
                          repo_lib.write_metadata_file, root_signable,
                          output_filename, version_number, 3)
Esempio n. 6
0
def _generate_and_write_metadata(rolename,
                                 metadata_filename,
                                 write_partial,
                                 targets_directory,
                                 prefix='',
                                 repository_name='default'):
    """
    Non-public function that can generate and write the metadata of the
    specified 'rolename'.  It also increments version numbers if:

    1.  write_partial==True and the metadata is the first to be written.

    2.  write_partial=False (i.e., write()), the metadata was not loaded as
        partially written, and a write_partial is not needed.
  """

    metadata = None

    # Retrieve the roleinfo of 'rolename' to extract the needed metadata
    # attributes, such as version number, expiration, etc.
    roleinfo = roledb.get_roleinfo(rolename, repository_name)

    metadata = generate_targets_metadata(targets_directory, roleinfo['paths'],
                                         roleinfo['version'],
                                         roleinfo['expires'],
                                         roleinfo['delegations'], False)

    # Prepend the prefix to the project's filepath to avoid signature errors in
    # upstream.
    for element in list(metadata['targets']):
        junk, relative_target = os.path.split(element)
        prefixed_path = os.path.join(prefix, relative_target)
        metadata['targets'][prefixed_path] = metadata['targets'][element]
        if prefix != '':
            del (metadata['targets'][element])

    signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'],
                                      metadata_filename, repository_name)

    # Check if the version number of 'rolename' may be automatically incremented,
    # depending on whether if partial metadata is loaded or if the metadata is
    # written with write() / write_partial().
    # Increment the version number if this is the first partial write.
    if write_partial:
        temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename,
                                               repository_name)
        temp_signable['signatures'].extend(roleinfo['signatures'])
        status = sig.get_signature_status(temp_signable, rolename,
                                          repository_name)
        if len(status['good_sigs']) == 0:
            metadata['version'] = metadata['version'] + 1
            signable = repo_lib.sign_metadata(metadata,
                                              roleinfo['signing_keyids'],
                                              metadata_filename,
                                              repository_name)

    # non-partial write()
    else:
        if sig.verify(signable, rolename, repository_name):
            metadata['version'] = metadata['version'] + 1
            signable = repo_lib.sign_metadata(metadata,
                                              roleinfo['signing_keyids'],
                                              metadata_filename,
                                              repository_name)

    # Write the metadata to file if contains a threshold of signatures.
    signable['signatures'].extend(roleinfo['signatures'])

    if sig.verify(signable, rolename, repository_name) or write_partial:
        repo_lib._remove_invalid_and_duplicate_signatures(
            signable, repository_name)
        storage_backend = sslib_storage.FilesystemBackend()
        filename = repo_lib.write_metadata_file(signable, metadata_filename,
                                                metadata['version'], False,
                                                storage_backend)

    # 'signable' contains an invalid threshold of signatures.
    else:
        message = 'Not enough signatures for ' + repr(metadata_filename)
        raise sslib_exceptions.Error(message, signable)

    return signable, filename
Esempio n. 7
0
def _generate_and_write_metadata(rolename, metadata_filename, write_partial,
                                 targets_directory, metadata_directory,
                                 filenames=None,
                                 prefix=''):
  """
    Non-public function that can generate and write the metadata of the
    specified 'rolename'.  It also increments version numbers if:
    
    1.  write_partial==True and the metadata is the first to be written.
              
    2.  write_partial=False (i.e., write()), the metadata was not loaded as
        partially written, and a write_partial is not needed.
  """

  metadata = None 
  
  # Retrieve the roleinfo of 'rolename' to extract the needed metadata
  # attributes, such as version number, expiration, etc.
  roleinfo = tuf.roledb.get_roleinfo(rolename) 

  metadata = generate_targets_metadata(targets_directory,
                                       roleinfo['paths'],
                                       roleinfo['version'],
                                       roleinfo['expires'],
                                       roleinfo['delegations'],
                                       False) 

  # Prepend the prefix to the project's filepath to avoid signature errors in
  # upstream.
  target_filepaths = metadata['targets'].items()
  for element in list(metadata['targets']):
    junk_path, relative_target = os.path.split(element)
    prefixed_path = os.path.join(prefix,relative_target)
    metadata['targets'][prefixed_path] = metadata['targets'][element]
    if prefix != '':
      del(metadata['targets'][element])

  signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                           metadata_filename)

  # Check if the version number of 'rolename' may be automatically incremented,
  # depending on whether if partial metadata is loaded or if the metadata is
  # written with write() / write_partial(). 
  # Increment the version number if this is the first partial write.
  if write_partial:
    temp_signable = sign_metadata(metadata, [], metadata_filename)
    temp_signable['signatures'].extend(roleinfo['signatures'])
    status = tuf.sig.get_signature_status(temp_signable, rolename)
    if len(status['good_sigs']) == 0:
      metadata['version'] = metadata['version'] + 1
      signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                               metadata_filename)
  
  # non-partial write()
  else:
    if tuf.sig.verify(signable, rolename): #and not roleinfo['partial_loaded']:
      metadata['version'] = metadata['version'] + 1
      signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                               metadata_filename)

  # Write the metadata to file if contains a threshold of signatures. 
  signable['signatures'].extend(roleinfo['signatures']) 
  
  if tuf.sig.verify(signable, rolename) or write_partial:
    _remove_invalid_and_duplicate_signatures(signable)
    compressions = roleinfo['compressions']
    filename = write_metadata_file(signable, metadata_filename,
                                   metadata['version'], compressions,
                                   False)
    
  # 'signable' contains an invalid threshold of signatures. 
  else:
    message = 'Not enough signatures for ' + repr(metadata_filename)
    raise tuf.Error(message, signable)

  return signable, filename