def test_update_root_new_root_cannot_be_verified_with_threshold(self): # new_root data with threshold which cannot be verified. root = Metadata.from_bytes(self.metadata["root"]) # remove root role keyids representing root signatures root.signed.roles["root"].keyids = [] with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_root(root.to_bytes())
def test_to_from_bytes(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") with open(path, "rb") as f: metadata_bytes = f.read() md_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. self.assertEqual(md_obj.to_bytes(JSONSerializer()), metadata_bytes) # Case 2: test compact by using the default serializer. obj_bytes = md_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual(metadata_obj_2.to_bytes(), obj_bytes)
def update_delegated_targets(self, data: bytes, role_name: str, delegator_name: str): """Verifies and loads 'data' as new metadata for target 'role_name'. Args: data: unverified new metadata as bytes role_name: The role name of the new metadata delegator_name: The name of the role delegating to the new metadata Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self.snapshot is None: raise RuntimeError("Cannot load targets before snapshot") delegator: Optional[Metadata] = self.get(delegator_name) if delegator is None: raise RuntimeError("Cannot load targets before delegator") logger.debug("Updating %s delegated by %s", role_name, delegator_name) # Verify against the hashes in snapshot, if any meta = self.snapshot.signed.meta.get(f"{role_name}.json") if meta is None: raise exceptions.RepositoryError( f"Snapshot does not contain information for '{role_name}'") try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( f"{role_name} length or hashes do not match") from e try: new_delegate = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_delegate.signed.type != "targets": raise exceptions.RepositoryError( f"Expected 'targets', got '{new_delegate.signed.type}'") delegator.verify_delegate(role_name, new_delegate) if new_delegate.signed.version != meta.version: raise exceptions.BadVersionNumberError( f"Expected {role_name} version " f"{meta.version}, got {new_delegate.signed.version}.") if new_delegate.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError( f"New {role_name} is expired") self._trusted_set[role_name] = new_delegate logger.debug("Updated %s delegated by %s", role_name, delegator_name)
def test_valid_metadata_serialization(self, test_case_data: bytes) -> None: md = Metadata.from_bytes(test_case_data) # Convert to a JSON and sort the keys the way we do in JSONSerializer. separators = (",", ":") test_json = json.loads(test_case_data) test_bytes = json.dumps(test_json, separators=separators, sort_keys=True).encode("utf-8") self.assertEqual(test_bytes, md.to_bytes())
def test_to_from_bytes(self): for metadata in ["root", "snapshot", "timestamp", "targets"]: path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') with open(path, 'rb') as f: metadata_bytes = f.read() metadata_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. self.assertEqual( metadata_obj.to_bytes(JSONSerializer()), metadata_bytes ) # Case 2: test compact by using the default serializer. obj_bytes = metadata_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual( metadata_obj_2.to_bytes(), obj_bytes )
def test_update_with_invalid_json(self): repo_dir = os.path.join(os.getcwd(), 'repository_data', 'repository', 'metadata') data = {} for md in ["root", "timestamp", "snapshot", "targets", "role1"]: with open(os.path.join(repo_dir, f"{md}.json"), "rb") as f: data[md] = f.read() # root.json not a json file at all with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet(b"") # root.json is invalid root = Metadata.from_bytes(data["root"]) root.signed.version += 1 with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet(json.dumps(root.to_dict()).encode()) trusted_set = TrustedMetadataSet(data["root"]) trusted_set.root_update_finished() top_level_md = [ (data["timestamp"], trusted_set.update_timestamp), (data["snapshot"], trusted_set.update_snapshot), (data["targets"], trusted_set.update_targets), ] for metadata, update_func in top_level_md: # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") # metadata is invalid md = Metadata.from_bytes(metadata) md.signed.version += 1 with self.assertRaises(exceptions.RepositoryError): update_func(json.dumps(md.to_dict()).encode()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): update_func(data["root"]) update_func(metadata)
def test_generic_read(self): for metadata, inner_metadata_cls in [ ('root', Root), ('snapshot', Snapshot), ('timestamp', Timestamp), ('targets', Targets)]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') metadata_obj = Metadata.from_file(path) with open(path, 'rb') as f: metadata_obj2 = Metadata.from_bytes(f.read()) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue( isinstance(metadata_obj.signed, inner_metadata_cls)) self.assertTrue( isinstance(metadata_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual( metadata_obj.to_dict(), metadata_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = 'bad-metadata.json' bad_metadata = {'signed': {'_type': 'bad-metadata'}} bad_string = json.dumps(bad_metadata).encode('utf-8') with open(bad_metadata_path, 'wb') as f: f.write(bad_string) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) with self.assertRaises(DeserializationError): Metadata.from_bytes(bad_string) os.remove(bad_metadata_path)
def update_timestamp(self, data: bytes): """Verifies and loads 'data' as new timestamp metadata. Args: data: unverified new timestamp metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if not self._root_update_finished: raise RuntimeError("Cannot update timestamp before root") if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") try: new_timestamp = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load timestamp") from e if new_timestamp.signed.type != "timestamp": raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'") self.root.verify_delegate("timestamp", new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack if self.timestamp is not None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: raise exceptions.ReplayedMetadataError( "timestamp", new_timestamp.signed.version, self.timestamp.signed.version, ) # Prevent rolling back snapshot version if (new_timestamp.signed.meta["snapshot.json"].version < self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.ReplayedMetadataError( "snapshot", new_timestamp.signed.meta["snapshot.json"].version, self.timestamp.signed.meta["snapshot.json"].version, ) if new_timestamp.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("New timestamp is expired") self._trusted_set["timestamp"] = new_timestamp logger.debug("Updated timestamp")
def modify_metadata(cls, rolename: str, modification_func: Callable) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. Attributes: rolename: Denoting the name of the metadata which will be modified. modification_func: Function that will be called to modify the signed portion of metadata bytes. """ metadata = Metadata.from_bytes(cls.metadata[rolename]) modification_func(metadata.signed) metadata.sign(cls.keystore[rolename]) return metadata.to_bytes(JSONSerializer(validate=True))
def test_generic_read(self) -> None: for metadata, inner_metadata_cls in [ (Root.type, Root), (Snapshot.type, Snapshot), (Timestamp.type, Timestamp), (Targets.type, Targets), ]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) with open(path, "rb") as f: md_obj2 = Metadata.from_bytes(f.read()) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue(isinstance(md_obj.signed, inner_metadata_cls)) self.assertTrue(isinstance(md_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual(md_obj.to_dict(), md_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = "bad-metadata.json" bad_metadata = {"signed": {"_type": "bad-metadata"}} bad_string = json.dumps(bad_metadata).encode("utf-8") with open(bad_metadata_path, "wb") as f: f.write(bad_string) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) with self.assertRaises(DeserializationError): Metadata.from_bytes(bad_string) os.remove(bad_metadata_path)
def modify_metadata( self, rolename: str, modification_func: Callable[["Signed"], None]) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. Attributes: rolename: A denoting the name of the metadata which will be modified. modification_func: Function that will be called to modify the signed portion of metadata bytes. """ metadata = Metadata.from_bytes(self.metadata[rolename]) modification_func(metadata.signed) metadata.sign(self.keystore[rolename]) return metadata.to_bytes()
def test_root_with_invalid_json(self) -> None: # Test loading initial root and root update for test_func in [TrustedMetadataSet, self.trusted_set.update_root]: # root is not json with self.assertRaises(exceptions.RepositoryError): test_func(b"") # root is invalid root = Metadata.from_bytes(self.metadata[Root.type]) root.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): test_func(root.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): test_func(self.metadata[Snapshot.type])
def test_top_level_md_with_invalid_json(self): top_level_md = [ (self.metadata["timestamp"], self.trusted_set.update_timestamp), (self.metadata["snapshot"], self.trusted_set.update_snapshot), (self.metadata["targets"], self.trusted_set.update_targets), ] for metadata, update_func in top_level_md: md = Metadata.from_bytes(metadata) # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") # metadata is invalid md.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): update_func(md.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): update_func(self.metadata["root"]) update_func(metadata)
def update_root(self, data: bytes): """Verifies and loads 'data' as new root metadata. Note that an expired intermediate root is considered valid: expiry is only checked for the final root in root_update_finished(). Args: data: unverified new root metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self._root_update_finished: raise RuntimeError( "Cannot update root after root update is finished") logger.debug("Updating root") try: new_root = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load root") from e if new_root.signed.type != "root": raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'") if self.root is not None: # We are not loading initial trusted root: verify the new one self.root.verify_delegate("root", new_root) if new_root.signed.version != self.root.signed.version + 1: raise exceptions.ReplayedMetadataError( "root", new_root.signed.version, self.root.signed.version) new_root.verify_delegate("root", new_root) self._trusted_set["root"] = new_root logger.debug("Updated root")
def test_top_level_md_with_invalid_json(self) -> None: top_level_md: List[Tuple[bytes, Callable[[bytes], Metadata]]] = [ (self.metadata[Timestamp.type], self.trusted_set.update_timestamp), (self.metadata[Snapshot.type], self.trusted_set.update_snapshot), (self.metadata[Targets.type], self.trusted_set.update_targets), ] for metadata, update_func in top_level_md: md = Metadata.from_bytes(metadata) # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") # metadata is invalid md.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): update_func(md.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): update_func(self.metadata[Root.type]) update_func(metadata)
def test_invalid_metadata_serialization(self, test_data: bytes) -> None: # We expect a DeserializationError reraised from ValueError or KeyError. with self.assertRaises(DeserializationError): Metadata.from_bytes(test_data)
def test_update_snapshot_fail_threshold_verification(self) -> None: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) snapshot = Metadata.from_bytes(self.metadata[Snapshot.type]) snapshot.signatures.clear() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_snapshot(snapshot.to_bytes())
def test_update_snapshot_cannot_verify_snapshot_with_threshold(self): self.trusted_set.update_timestamp(self.metadata["timestamp"]) snapshot = Metadata.from_bytes(self.metadata["snapshot"]) snapshot.signatures.clear() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_snapshot(snapshot.to_bytes())
def update_snapshot(self, data: bytes): """Verifies and loads 'data' as new snapshot metadata. Args: data: unverified new snapshot metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self.timestamp is None: raise RuntimeError("Cannot update snapshot before timestamp") if self.targets is not None: raise RuntimeError("Cannot update snapshot after targets") logger.debug("Updating snapshot") meta = self.timestamp.signed.meta["snapshot.json"] # Verify against the hashes in timestamp, if any try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( "Snapshot length or hashes do not match") from e try: new_snapshot = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_snapshot.signed.type != "snapshot": raise exceptions.RepositoryError( f"Expected 'snapshot', got '{new_snapshot.signed.type}'") self.root.verify_delegate("snapshot", new_snapshot) if (new_snapshot.signed.version != self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.BadVersionNumberError( f"Expected snapshot version " f"{self.timestamp.signed.meta['snapshot.json'].version}, " f"got {new_snapshot.signed.version}") # If an existing trusted snapshot is updated, # check for a rollback attack if self.snapshot is not None: for filename, fileinfo in self.snapshot.signed.meta.items(): new_fileinfo = new_snapshot.signed.meta.get(filename) # Prevent removal of any metadata in meta if new_fileinfo is None: raise exceptions.RepositoryError( f"New snapshot is missing info for '{filename}'") # Prevent rollback of any metadata versions if new_fileinfo.version < fileinfo.version: raise exceptions.BadVersionNumberError( f"Expected {filename} version " f"{new_fileinfo.version}, got {fileinfo.version}.") if new_snapshot.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("New snapshot is expired") self._trusted_set["snapshot"] = new_snapshot logger.debug("Updated snapshot")