def test_generate_targets_metadata(self): # Test normal case. temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) targets_directory = os.path.join(temporary_directory, 'targets') file1_path = os.path.join(targets_directory, 'file.txt') securesystemslib.util.ensure_parent_dir(file1_path) with open(file1_path, 'wt') as file_object: file_object.write('test file.') # Set valid generate_targets_metadata() arguments. Add a custom field for # the 'target_files' target set below. version = 1 datetime_object = datetime.datetime(2030, 1, 1, 12, 0) expiration_date = datetime_object.isoformat() + 'Z' file_permissions = oct(os.stat(file1_path).st_mode)[4:] target_files = {'file.txt': {'file_permission': file_permissions}} delegations = { "keys": { "a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf": { "keytype": "ed25519", "keyval": { "public": "3eb81026ded5af2c61fb3d4b272ac53cd1049a810ee88f4df1fc35cdaf918157" } } }, "roles": [{ "keyids": [ "a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf" ], "name": "targets/warehouse", "paths": ["/file1.txt", "/README.txt", '/warehouse/'], "threshold": 1 }] } targets_metadata = \ repo_lib.generate_targets_metadata(targets_directory, target_files, version, expiration_date, delegations, False) self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) # Valid arguments with 'delegations' set to None. targets_metadata = \ repo_lib.generate_targets_metadata(targets_directory, target_files, version, expiration_date, None, False) self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) # Verify that 'digest.filename' file is saved to 'targets_directory' if # the 'write_consistent_targets' argument is True. list_targets_directory = os.listdir(targets_directory) targets_metadata = \ repo_lib.generate_targets_metadata(targets_directory, target_files, version, expiration_date, delegations, write_consistent_targets=True) new_list_targets_directory = os.listdir(targets_directory) # Verify that 'targets_directory' contains only one extra item. self.assertTrue( len(list_targets_directory) + 1, len(new_list_targets_directory)) # Verify that an exception is not raised if the target files already exist. repo_lib.generate_targets_metadata(targets_directory, target_files, version, expiration_date, delegations, write_consistent_targets=True) # Verify that 'targets_metadata' contains a 'custom' entry (optional) # for 'file.txt'. self.assertTrue('custom' in targets_metadata['targets']['file.txt']) # Test improperly formatted arguments. self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, 3, target_files, version, expiration_date) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, targets_directory, 3, version, expiration_date) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, '3', expiration_date) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, '3') # Improperly formatted 'delegations' and 'write_consistent_targets' self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, expiration_date, 3, False) self.assertRaises(securesystemslib.exceptions.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, expiration_date, delegations, 3) # Test non-existent target file. bad_target_file = \ {'non-existent.txt': {'file_permission': file_permissions}} self.assertRaises(securesystemslib.exceptions.Error, repo_lib.generate_targets_metadata, targets_directory, bad_target_file, version, expiration_date)
def _generate_and_write_metadata(rolename, metadata_filename, write_partial, targets_directory, metadata_directory, filenames=None, prefix=''): """ Non-public function that can generate and write the metadata of the specified 'rolename'. It also increments version numbers if: 1. write_partial==True and the metadata is the first to be written. 2. write_partial=False (i.e., write()), the metadata was not loaded as partially written, and a write_partial is not needed. """ metadata = None # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. roleinfo = tuf.roledb.get_roleinfo(rolename) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], False) # Prepend the prefix to the project's filepath to avoid signature errors in # upstream. target_filepaths = metadata['targets'].items() for element in list(metadata['targets']): junk_path, relative_target = os.path.split(element) prefixed_path = os.path.join(prefix,relative_target) metadata['targets'][prefixed_path] = metadata['targets'][element] if prefix != '': del(metadata['targets'][element]) signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # Check if the version number of 'rolename' may be automatically incremented, # depending on whether if partial metadata is loaded or if the metadata is # written with write() / write_partial(). # Increment the version number if this is the first partial write. if write_partial: temp_signable = sign_metadata(metadata, [], metadata_filename) temp_signable['signatures'].extend(roleinfo['signatures']) status = tuf.sig.get_signature_status(temp_signable, rolename) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # non-partial write() else: if tuf.sig.verify(signable, rolename): #and not roleinfo['partial_loaded']: metadata['version'] = metadata['version'] + 1 signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) if tuf.sig.verify(signable, rolename) or write_partial: _remove_invalid_and_duplicate_signatures(signable) compressions = roleinfo['compressions'] filename = write_metadata_file(signable, metadata_filename, metadata['version'], compressions, False) # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) raise tuf.Error(message, signable) return signable, filename
def _generate_and_write_metadata(rolename, metadata_filename, write_partial, targets_directory, prefix='', repository_name='default'): """ Non-public function that can generate and write the metadata of the specified 'rolename'. It also increments version numbers if: 1. write_partial==True and the metadata is the first to be written. 2. write_partial=False (i.e., write()), the metadata was not loaded as partially written, and a write_partial is not needed. """ metadata = None # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. roleinfo = roledb.get_roleinfo(rolename, repository_name) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], False) # Prepend the prefix to the project's filepath to avoid signature errors in # upstream. for element in list(metadata['targets']): junk, relative_target = os.path.split(element) prefixed_path = os.path.join(prefix, relative_target) metadata['targets'][prefixed_path] = metadata['targets'][element] if prefix != '': del (metadata['targets'][element]) signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # Check if the version number of 'rolename' may be automatically incremented, # depending on whether if partial metadata is loaded or if the metadata is # written with write() / write_partial(). # Increment the version number if this is the first partial write. if write_partial: temp_signable = repo_lib.sign_metadata(metadata, [], metadata_filename, repository_name) temp_signable['signatures'].extend(roleinfo['signatures']) status = sig.get_signature_status(temp_signable, rolename, repository_name) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # non-partial write() else: if sig.verify(signable, rolename, repository_name): metadata['version'] = metadata['version'] + 1 signable = repo_lib.sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename, repository_name) # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) if sig.verify(signable, rolename, repository_name) or write_partial: repo_lib._remove_invalid_and_duplicate_signatures( signable, repository_name) storage_backend = sslib_storage.FilesystemBackend() filename = repo_lib.write_metadata_file(signable, metadata_filename, metadata['version'], False, storage_backend) # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) raise sslib_exceptions.Error(message, signable) return signable, filename
def test_generate_targets_metadata(self): # Test normal case. temporary_directory = tempfile.mkdtemp(dir=self.temporary_directory) targets_directory = os.path.join(temporary_directory, "targets") file1_path = os.path.join(targets_directory, "file.txt") tuf.util.ensure_parent_dir(file1_path) with open(file1_path, "wt") as file_object: file_object.write("test file.") # Set valid generate_targets_metadata() arguments. Add a custom field for # the 'target_files' target set below. version = 1 datetime_object = datetime.datetime(2030, 1, 1, 12, 0) expiration_date = datetime_object.isoformat() + "Z" file_permissions = oct(os.stat(file1_path).st_mode)[4:] target_files = {"file.txt": {"file_permission": file_permissions}} delegations = { "keys": { "a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf": { "keytype": "ed25519", "keyval": {"public": "3eb81026ded5af2c61fb3d4b272ac53cd1049a810ee88f4df1fc35cdaf918157"}, } }, "roles": [ { "keyids": ["a394c28384648328b16731f81440d72243c77bb44c07c040be99347f0df7d7bf"], "name": "targets/warehouse", "paths": ["/file1.txt", "/README.txt", "/warehouse/"], "threshold": 1, } ], } targets_metadata = repo_lib.generate_targets_metadata( targets_directory, target_files, version, expiration_date, delegations, False ) self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) # Valid arguments with 'delegations' set to None. targets_metadata = repo_lib.generate_targets_metadata( targets_directory, target_files, version, expiration_date, None, False ) self.assertTrue(tuf.formats.TARGETS_SCHEMA.matches(targets_metadata)) # Verify that 'digest.filename' file is saved to 'targets_directory' if # the 'write_consistent_targets' argument is True. list_targets_directory = os.listdir(targets_directory) targets_metadata = repo_lib.generate_targets_metadata( targets_directory, target_files, version, expiration_date, delegations, write_consistent_targets=True ) new_list_targets_directory = os.listdir(targets_directory) # Verify that 'targets_directory' contains only one extra item. self.assertTrue(len(list_targets_directory) + 1, len(new_list_targets_directory)) # Verify that 'targets_metadata' contains a 'custom' entry (optional) # for 'file.txt'. self.assertTrue("custom" in targets_metadata["targets"]["file.txt"]) # Test improperly formatted arguments. self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, 3, target_files, version, expiration_date ) self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, targets_directory, 3, version, expiration_date ) self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, "3", expiration_date ) self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, "3" ) # Improperly formatted 'delegations' and 'write_consistent_targets' self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, expiration_date, 3, False, ) self.assertRaises( tuf.FormatError, repo_lib.generate_targets_metadata, targets_directory, target_files, version, expiration_date, delegations, 3, ) # Test non-existent target file. bad_target_file = {"non-existent.txt": {"file_permission": file_permissions}} self.assertRaises( tuf.Error, repo_lib.generate_targets_metadata, targets_directory, bad_target_file, version, expiration_date )