def test_get_signature_status_below_threshold_unauthorized_sigs(self): signable = {'signatures' : [], 'signed' : 'test'} # Two keys sign it, but one of them is only trusted for a different # role. tuf.sig.add_signature(signable, KEYS[0]) tuf.sig.add_signature(signable, KEYS[1]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) keydb.add_key(KEYS[1]) threshold = 2 roleinfo = tuf.formats.make_role_meta( [KEYS[0].get_key_id(), KEYS[2].get_key_id()], threshold) keydb.add_role('Root', roleinfo) roleinfo = tuf.formats.make_role_meta( [KEYS[1].get_key_id(), KEYS[2].get_key_id()], threshold) keydb.add_role('Release', roleinfo) sig_status = tuf.sig.get_signature_status(signable, keydb, 'Root') self.assertEqual(2, sig_status.threshold) self.assertEqual([KEYS[0].get_key_id()], sig_status.good) self.assertEqual([], sig_status.bad) self.assertEqual([], sig_status.unrecognized) self.assertEqual([KEYS[1].get_key_id()], sig_status.unauthorized) self.assertEqual([], sig_status.uknown_method) self.assertFalse(sig_status.is_valid())
def test_check_signatures_no_role(self): signable = {'signatures' : [], 'signed' : 'test'} tuf.sig.add_signature(signable, KEYS[0]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) # No specific role we're considering. It's invalid to use the # function check_signatures without a role specified because # check_signatures is checking trust, as well. args = (signable, keydb, None) self.assertRaises(tuf.Error, tuf.sig.check_signatures, *args)
def test_check_signatures_single_key(self): signable = {'signatures' : [], 'signed' : 'test'} tuf.sig.add_signature(signable, KEYS[0]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) threshold = 1 roleinfo = tuf.formats.make_role_meta( [KEYS[0].get_key_id()], threshold) keydb.add_role('Root', roleinfo) # This will call is_valid() and raise an exception if it's not. sig_status = tuf.sig.check_signatures(signable, keydb, 'Root') self.assertTrue(sig_status.is_valid())
def test_check_signatures_unrecognized_sig(self): signable = {'signatures' : [], 'signed' : 'test'} # Two keys sign it, but only one of them will be trusted. tuf.sig.add_signature(signable, KEYS[0]) tuf.sig.add_signature(signable, KEYS[2]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) keydb.add_key(KEYS[1]) threshold = 2 roleinfo = tuf.formats.make_role_meta( [KEYS[0].get_key_id(), KEYS[1].get_key_id()], threshold) keydb.add_role('Root', roleinfo) args = (signable, keydb, 'Root') self.assertRaises(tuf.BadSignature, tuf.sig.check_signatures, *args)
def test_get_signature_status_no_role(self): signable = {'signatures' : [], 'signed' : 'test'} tuf.sig.add_signature(signable, KEYS[0]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) # No specific role we're considering. sig_status = tuf.sig.get_signature_status(signable, keydb, None) self.assertEqual(None, sig_status.threshold) self.assertEqual([KEYS[0].get_key_id()], sig_status.good) self.assertEqual([], sig_status.bad) self.assertEqual([], sig_status.unrecognized) self.assertEqual([], sig_status.unauthorized) self.assertEqual([], sig_status.uknown_method) # Not allowed to call is_valid without having specified a role. self.assertRaises(tuf.Error, sig_status.is_valid)
def test_get_signature_status_single_key(self): signable = {'signatures' : [], 'signed' : 'test'} tuf.sig.add_signature(signable, KEYS[0]) keydb = tuf.keydb.KeyDB() keydb.add_key(KEYS[0]) threshold = 1 roleinfo = tuf.formats.make_role_meta( [KEYS[0].get_key_id()], threshold) keydb.add_role('Root', roleinfo) sig_status = tuf.sig.get_signature_status(signable, keydb, 'Root') self.assertEqual(1, sig_status.threshold) self.assertEqual([KEYS[0].get_key_id()], sig_status.good) self.assertEqual([], sig_status.bad) self.assertEqual([], sig_status.unrecognized) self.assertEqual([], sig_status.unauthorized) self.assertEqual([], sig_status.uknown_method) self.assertTrue(sig_status.is_valid())
def load_project(project_directory, prefix='', new_targets_location=None, repository_name='default'): """ <Purpose> Return a Project object initialized with the contents of the metadata files loaded from 'project_directory'. <Arguments> project_directory: The path to the project's metadata and configuration file. prefix: The prefix for the metadata, if defined. It will replace the current prefix, by first removing the existing one (saved). new_targets_location: For flat project configurations, project owner might want to reload the project with a new location for the target files. This overwrites the previous path to search for the target files. repository_name: The name of the repository. If not supplied, 'rolename' is added to the 'default' repository. <Exceptions> securesystemslib.exceptions.FormatError, if 'project_directory' or any of the metadata files are improperly formatted. <Side Effects> All the metadata files found in the project are loaded and their contents stored in a libtuf.Repository object. <Returns> A tuf.developer_tool.Project object. """ # Does 'repository_directory' have the correct format? # Raise 'securesystemslib.exceptions.FormatError' if there is a mismatch. sslib_formats.PATH_SCHEMA.check_match(project_directory) sslib_formats.NAME_SCHEMA.check_match(repository_name) # Do the same for the prefix sslib_formats.ANY_STRING_SCHEMA.check_match(prefix) # Clear the role and key databases since we are loading in a new project. roledb.clear_roledb(clear_all=True) keydb.clear_keydb(clear_all=True) # Locate metadata filepaths and targets filepath. project_directory = os.path.abspath(project_directory) # Load the cfg file and the project. config_filename = os.path.join(project_directory, PROJECT_FILENAME) project_configuration = sslib_util.load_json_file(config_filename) formats.PROJECT_CFG_SCHEMA.check_match(project_configuration) targets_directory = os.path.join(project_directory, project_configuration['targets_location']) if project_configuration['layout_type'] == 'flat': project_directory, junk = os.path.split(project_directory) targets_directory = project_configuration['targets_location'] if new_targets_location is not None: targets_directory = new_targets_location metadata_directory = os.path.join( project_directory, project_configuration['metadata_location']) new_prefix = None if prefix != '': new_prefix = prefix prefix = project_configuration['prefix'] # Load the project's filename. project_name = project_configuration['project_name'] project_filename = project_name + METADATA_EXTENSION # Create a blank project on the target directory. project = Project(project_name, metadata_directory, targets_directory, prefix, repository_name) project.threshold = project_configuration['threshold'] project.prefix = project_configuration['prefix'] project.layout_type = project_configuration['layout_type'] # Traverse the public keys and add them to the project. keydict = project_configuration['public_keys'] for keyid in keydict: key, junk = format_metadata_to_key(keydict[keyid]) project.add_verification_key(key) # Load the project's metadata. targets_metadata_path = os.path.join(project_directory, metadata_directory, project_filename) signable = sslib_util.load_json_file(targets_metadata_path) try: formats.check_signable_object_format(signable) except exceptions.UnsignedMetadataError: # Downgrade the error to a warning because a use case exists where # metadata may be generated unsigned on one machine and signed on another. logger.warning('Unsigned metadata object: ' + repr(signable)) targets_metadata = signable['signed'] # Remove the prefix from the metadata. targets_metadata = _strip_prefix_from_targets_metadata( targets_metadata, prefix) for signature in signable['signatures']: project.add_signature(signature) # Update roledb.py containing the loaded project attributes. roleinfo = roledb.get_roleinfo(project_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = targets_metadata['version'] roleinfo['paths'] = targets_metadata['targets'] roleinfo['delegations'] = targets_metadata['delegations'] roleinfo['partial_loaded'] = False # Check if the loaded metadata was partially written and update the # flag in 'roledb.py'. if _metadata_is_partially_loaded(project_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(project_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) for key_metadata in targets_metadata['delegations']['keys'].values(): key_object, junk = format_metadata_to_key(key_metadata) keydb.add_key(key_object, repository_name=repository_name) for role in targets_metadata['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) # Load the delegated metadata and generate their fileinfo. targets_objects = {} loaded_metadata = [project_name] targets_objects[project_name] = project metadata_directory = os.path.join(project_directory, metadata_directory) if os.path.exists(metadata_directory) and \ os.path.isdir(metadata_directory): for metadata_role in os.listdir(metadata_directory): metadata_path = os.path.join(metadata_directory, metadata_role) metadata_name = \ metadata_path[len(metadata_directory):].lstrip(os.path.sep) # Strip the extension. The roledb does not include an appended '.json' # extension for each role. if metadata_name.endswith(METADATA_EXTENSION): extension_length = len(METADATA_EXTENSION) metadata_name = metadata_name[:-extension_length] else: continue if metadata_name in loaded_metadata: continue signable = None signable = sslib_util.load_json_file(metadata_path) # Strip the prefix from the local working copy, it will be added again # when the targets metadata is written to disk. metadata_object = signable['signed'] metadata_object = _strip_prefix_from_targets_metadata( metadata_object, prefix) roleinfo = roledb.get_roleinfo(metadata_name, repository_name) roleinfo['signatures'].extend(signable['signatures']) roleinfo['version'] = metadata_object['version'] roleinfo['expires'] = metadata_object['expires'] roleinfo['paths'] = {} for filepath, fileinfo in metadata_object['targets'].items(): roleinfo['paths'].update( {filepath: fileinfo.get('custom', {})}) roleinfo['delegations'] = metadata_object['delegations'] roleinfo['partial_loaded'] = False # If the metadata was partially loaded, update the roleinfo flag. if _metadata_is_partially_loaded(metadata_name, signable, repository_name=repository_name): roleinfo['partial_loaded'] = True roledb.update_roleinfo(metadata_name, roleinfo, mark_role_as_dirty=False, repository_name=repository_name) # Append to list of elements to avoid reloading repeated metadata. loaded_metadata.append(metadata_name) # Generate the Targets objects of the delegated roles. new_targets_object = Targets(targets_directory, metadata_name, roleinfo, repository_name=repository_name) targets_object = targets_objects[project_name] targets_object._delegated_roles[metadata_name] = new_targets_object # Add the keys specified in the delegations field of the Targets role. for key_metadata in metadata_object['delegations']['keys'].values( ): key_object, junk = format_metadata_to_key(key_metadata) try: keydb.add_key(key_object, repository_name=repository_name) except exceptions.KeyAlreadyExistsError: pass for role in metadata_object['delegations']['roles']: rolename = role['name'] roleinfo = { 'name': role['name'], 'keyids': role['keyids'], 'threshold': role['threshold'], 'signing_keyids': [], 'signatures': [], 'partial_loaded': False, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(rolename, roleinfo, repository_name=repository_name) if new_prefix: project.prefix = new_prefix return project
def sign_role(parsed_arguments): repository = repo_tool.load_repository( os.path.join(parsed_arguments.path, REPO_DIR)) consistent_snapshot = roledb.get_roleinfo( 'root', repository._repository_name)['consistent_snapshot'] for keypath in parsed_arguments.sign: role_privatekey = import_privatekey_from_file(keypath) if parsed_arguments.role == 'targets': repository.targets.load_signing_key(role_privatekey) elif parsed_arguments.role == 'root': repository.root.load_signing_key(role_privatekey) elif parsed_arguments.role == 'snapshot': repository.snapshot.load_signing_key(role_privatekey) elif parsed_arguments.role == 'timestamp': repository.timestamp.load_signing_key(role_privatekey) else: # TODO: repository_tool.py will be refactored to clean up the following # code, which adds and signs for a non-existent role. if not roledb.role_exists(parsed_arguments.role): # Load the private key keydb and set the roleinfo in roledb so that # metadata can be written with repository.write(). keydb.remove_key(role_privatekey['keyid'], repository_name=repository._repository_name) keydb.add_key(role_privatekey, repository_name=repository._repository_name) # Set the delegated metadata file to expire in 3 months. expiration = formats.unix_timestamp_to_datetime( int(time.time() + 7889230)) expiration = expiration.isoformat() + 'Z' roleinfo = { 'name': parsed_arguments.role, 'keyids': [role_privatekey['keyid']], 'signing_keyids': [role_privatekey['keyid']], 'partial_loaded': False, 'paths': {}, 'signatures': [], 'version': 1, 'expires': expiration, 'delegations': { 'keys': {}, 'roles': [] } } roledb.add_role(parsed_arguments.role, roleinfo, repository_name=repository._repository_name) # Generate the Targets object of --role, and add it to the top-level # 'targets' object. new_targets_object = repo_tool.Targets( repository._targets_directory, parsed_arguments.role, roleinfo, repository_name=repository._repository_name) repository.targets._delegated_roles[ parsed_arguments.role] = new_targets_object else: repository.targets( parsed_arguments.role).load_signing_key(role_privatekey) # Write the Targets metadata now that it's been modified. Once write() is # called on a role, it is no longer considered "dirty" and the role will not # be written again if another write() or writeall() were subsequently made. repository.write(parsed_arguments.role, consistent_snapshot=consistent_snapshot, increment_version_number=False) # Write the updated top-level roles, if any. Also write Snapshot and # Timestamp to make a new release. Automatically making a new release can be # disabled via --no_release. if not parsed_arguments.no_release: snapshot_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, SNAPSHOT_KEY_NAME), parsed_arguments.snapshot_pw) timestamp_private = import_privatekey_from_file( os.path.join(parsed_arguments.path, KEYSTORE_DIR, TIMESTAMP_KEY_NAME), parsed_arguments.timestamp_pw) repository.snapshot.load_signing_key(snapshot_private) repository.timestamp.load_signing_key(timestamp_private) repository.writeall(consistent_snapshot=consistent_snapshot) # Move staged metadata directory to "live" metadata directory. write_to_live_repo(parsed_arguments)