def update_delegated_targets(self, data: bytes, role_name: str, delegator_name: str) -> None: """Verifies and loads 'data' as new metadata for target 'role_name'. Args: data: unverified new metadata as bytes role_name: The role name of the new metadata delegator_name: The name of the role delegating to the new metadata Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self.snapshot is None: raise RuntimeError("Cannot load targets before snapshot") # Targets cannot be loaded if final snapshot is expired or its version # does not match meta version in timestamp self._check_final_snapshot() delegator: Optional[Metadata] = self.get(delegator_name) if delegator is None: raise RuntimeError("Cannot load targets before delegator") logger.debug("Updating %s delegated by %s", role_name, delegator_name) # Verify against the hashes in snapshot, if any meta = self.snapshot.signed.meta.get(f"{role_name}.json") if meta is None: raise exceptions.RepositoryError( f"Snapshot does not contain information for '{role_name}'") try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( f"{role_name} length or hashes do not match") from e try: new_delegate = Metadata[Targets].from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_delegate.signed.type != "targets": raise exceptions.RepositoryError( f"Expected 'targets', got '{new_delegate.signed.type}'") delegator.verify_delegate(role_name, new_delegate) version = new_delegate.signed.version if version != meta.version: raise exceptions.BadVersionNumberError( f"Expected {role_name} v{meta.version}, got v{version}.") if new_delegate.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError( f"New {role_name} is expired") self._trusted_set[role_name] = new_delegate logger.info("Updated %s v%d", role_name, version)
def update_timestamp(self, data: bytes): """Verifies and loads 'data' as new timestamp metadata. Args: data: unverified new timestamp metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if not self._root_update_finished: raise RuntimeError("Cannot update timestamp before root") if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") try: new_timestamp = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load timestamp") from e if new_timestamp.signed.type != "timestamp": raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'") self.root.verify_delegate("timestamp", new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack if self.timestamp is not None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: raise exceptions.ReplayedMetadataError( "timestamp", new_timestamp.signed.version, self.timestamp.signed.version, ) # Prevent rolling back snapshot version if (new_timestamp.signed.meta["snapshot.json"].version < self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.ReplayedMetadataError( "snapshot", new_timestamp.signed.meta["snapshot.json"].version, self.timestamp.signed.meta["snapshot.json"].version, ) if new_timestamp.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("New timestamp is expired") self._trusted_set["timestamp"] = new_timestamp logger.debug("Updated timestamp")
def _load_trusted_root(self, data: bytes) -> None: """Verifies and loads 'data' as trusted root metadata. Note that an expired initial root is considered valid: expiry is only checked for the final root in update_timestamp(). """ try: new_root = Metadata[Root].from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load root") from e if new_root.signed.type != "root": raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'") new_root.verify_delegate("root", new_root) self._trusted_set["root"] = new_root logger.info("Loaded trusted root v%d", new_root.signed.version)
def download_target( self, targetinfo: TargetFile, destination_directory: str, target_base_url: Optional[str] = None, ) -> None: """Downloads the target file specified by 'targetinfo'. Args: targetinfo: TargetFile instance received from get_one_valid_targetinfo() or updated_targets(). destination_directory: existing local directory to download into. Note that new directories may be created inside destination_directory as required. target_base_url: Optional; Base URL used to form the final target download URL. Default is the value provided in Updater() Raises: TODO: download-related errors TODO: file write errors """ if target_base_url is None: if self._target_base_url is None: raise ValueError( "target_base_url must be set in either " "download_target() or constructor" ) target_base_url = self._target_base_url else: target_base_url = _ensure_trailing_slash(target_base_url) target_filepath = targetinfo.path consistent_snapshot = self._trusted_set.root.signed.consistent_snapshot if consistent_snapshot and self.config.prefix_targets_with_hash: hashes = list(targetinfo.hashes.values()) target_filepath = f"{hashes[0]}.{target_filepath}" full_url = parse.urljoin(target_base_url, target_filepath) with self._fetcher.download_file( full_url, targetinfo.length ) as target_file: try: targetinfo.verify_length_and_hashes(target_file) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( f"{target_filepath} length or hashes do not match" ) from e # Store the target file name without the HASH prefix. local_filepath = os.path.join( destination_directory, targetinfo.path ) sslib_util.persist_temp_file(target_file, local_filepath)
def update_root(self, data: bytes): """Verifies and loads 'data' as new root metadata. Note that an expired intermediate root is considered valid: expiry is only checked for the final root in root_update_finished(). Args: data: unverified new root metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self._root_update_finished: raise RuntimeError( "Cannot update root after root update is finished") logger.debug("Updating root") try: new_root = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load root") from e if new_root.signed.type != "root": raise exceptions.RepositoryError( f"Expected 'root', got '{new_root.signed.type}'") if self.root is not None: # We are not loading initial trusted root: verify the new one self.root.verify_delegate("root", new_root) if new_root.signed.version != self.root.signed.version + 1: raise exceptions.ReplayedMetadataError( "root", new_root.signed.version, self.root.signed.version) new_root.verify_delegate("root", new_root) self._trusted_set["root"] = new_root logger.debug("Updated root")
def download_target( self, targetinfo: Dict, destination_directory: str, target_base_url: Optional[str] = None, ): """Downloads the target file specified by 'targetinfo'. Args: targetinfo: data received from get_one_valid_targetinfo() or updated_targets(). destination_directory: existing local directory to download into. Note that new directories may be created inside destination_directory as required. target_base_url: Optional; Base URL used to form the final target download URL. Default is the value provided in Updater() Raises: TODO: download-related errors TODO: file write errors """ if target_base_url is None and self._target_base_url is None: raise ValueError( "target_base_url must be set in either download_target() or " "constructor") if target_base_url is None: target_base_url = self._target_base_url else: target_base_url = _ensure_trailing_slash(target_base_url) target_filepath = targetinfo["filepath"] target_fileinfo: "TargetFile" = targetinfo["fileinfo"] full_url = parse.urljoin(target_base_url, target_filepath) with self._fetcher.download_file( full_url, target_fileinfo.length) as target_file: try: target_fileinfo.verify_length_and_hashes(target_file) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( f"{target_filepath} length or hashes do not match") from e filepath = os.path.join(destination_directory, target_filepath) sslib_util.persist_temp_file(target_file, filepath)
def update_snapshot(self, data: bytes): """Verifies and loads 'data' as new snapshot metadata. Args: data: unverified new snapshot metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self.timestamp is None: raise RuntimeError("Cannot update snapshot before timestamp") if self.targets is not None: raise RuntimeError("Cannot update snapshot after targets") logger.debug("Updating snapshot") meta = self.timestamp.signed.meta["snapshot.json"] # Verify against the hashes in timestamp, if any try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( "Snapshot length or hashes do not match") from e try: new_snapshot = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_snapshot.signed.type != "snapshot": raise exceptions.RepositoryError( f"Expected 'snapshot', got '{new_snapshot.signed.type}'") self.root.verify_delegate("snapshot", new_snapshot) if (new_snapshot.signed.version != self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.BadVersionNumberError( f"Expected snapshot version " f"{self.timestamp.signed.meta['snapshot.json'].version}, " f"got {new_snapshot.signed.version}") # If an existing trusted snapshot is updated, # check for a rollback attack if self.snapshot is not None: for filename, fileinfo in self.snapshot.signed.meta.items(): new_fileinfo = new_snapshot.signed.meta.get(filename) # Prevent removal of any metadata in meta if new_fileinfo is None: raise exceptions.RepositoryError( f"New snapshot is missing info for '{filename}'") # Prevent rollback of any metadata versions if new_fileinfo.version < fileinfo.version: raise exceptions.BadVersionNumberError( f"Expected {filename} version " f"{new_fileinfo.version}, got {fileinfo.version}.") if new_snapshot.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("New snapshot is expired") self._trusted_set["snapshot"] = new_snapshot logger.debug("Updated snapshot")
def update_snapshot(self, data: bytes) -> None: """Verifies and loads 'data' as new snapshot metadata. Note that an intermediate snapshot is allowed to be expired and version is allowed to not match timestamp meta version: TrustedMetadataSet will throw an ExpiredMetadataError/BadVersionNumberError in these cases but the intermediate snapshot will be loaded. This way a newer snapshot can still be loaded (and the intermediate snapshot will be used for rollback protection). Expired snapshot or snapshot that does not match timestamp meta version will prevent loading targets. Args: data: unverified new snapshot metadata as bytes Raises: RepositoryError: data failed to load or verify as final snapshot. The actual error type and content will contain more details. """ if self.timestamp is None: raise RuntimeError("Cannot update snapshot before timestamp") if self.targets is not None: raise RuntimeError("Cannot update snapshot after targets") logger.debug("Updating snapshot") # Snapshot cannot be loaded if final timestamp is expired self._check_final_timestamp() meta = self.timestamp.signed.meta["snapshot.json"] # Verify against the hashes in timestamp, if any try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( "Snapshot length or hashes do not match") from e try: new_snapshot = Metadata[Snapshot].from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_snapshot.signed.type != "snapshot": raise exceptions.RepositoryError( f"Expected 'snapshot', got '{new_snapshot.signed.type}'") self.root.verify_delegate("snapshot", new_snapshot) # version not checked against meta version to allow old snapshot to be # used in rollback protection: it is checked when targets is updated # If an existing trusted snapshot is updated, check for rollback attack if self.snapshot is not None: for filename, fileinfo in self.snapshot.signed.meta.items(): new_fileinfo = new_snapshot.signed.meta.get(filename) # Prevent removal of any metadata in meta if new_fileinfo is None: raise exceptions.RepositoryError( f"New snapshot is missing info for '{filename}'") # Prevent rollback of any metadata versions if new_fileinfo.version < fileinfo.version: raise exceptions.BadVersionNumberError( f"Expected {filename} version " f"{new_fileinfo.version}, got {fileinfo.version}.") # expiry not checked to allow old snapshot to be used for rollback # protection of new snapshot: it is checked when targets is updated self._trusted_set["snapshot"] = new_snapshot logger.info("Updated snapshot v%d", new_snapshot.signed.version) # snapshot is loaded, but we raise if it's not valid _final_ snapshot self._check_final_snapshot()
def update_timestamp(self, data: bytes) -> None: """Verifies and loads 'data' as new timestamp metadata. Note that an intermediate timestamp is allowed to be expired: TrustedMetadataSet will throw an ExpiredMetadataError in this case but the intermediate timestamp will be loaded. This way a newer timestamp can still be loaded (and the intermediate timestamp will be used for rollback protection). Expired timestamp will prevent loading snapshot metadata. Args: data: unverified new timestamp metadata as bytes Raises: RepositoryError: Metadata failed to load or verify as final timestamp. The actual error type and content will contain more details. """ if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") # client workflow 5.3.10: Make sure final root is not expired. if self.root.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("Final root.json is expired") # No need to check for 5.3.11 (fast forward attack recovery): # timestamp/snapshot can not yet be loaded at this point try: new_timestamp = Metadata[Timestamp].from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load timestamp") from e if new_timestamp.signed.type != "timestamp": raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'") self.root.verify_delegate("timestamp", new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack if self.timestamp is not None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: raise exceptions.ReplayedMetadataError( "timestamp", new_timestamp.signed.version, self.timestamp.signed.version, ) # Prevent rolling back snapshot version if (new_timestamp.signed.meta["snapshot.json"].version < self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.ReplayedMetadataError( "snapshot", new_timestamp.signed.meta["snapshot.json"].version, self.timestamp.signed.meta["snapshot.json"].version, ) # expiry not checked to allow old timestamp to be used for rollback # protection of new timestamp: expiry is checked in update_snapshot() self._trusted_set["timestamp"] = new_timestamp logger.info("Updated timestamp v%d", new_timestamp.signed.version) # timestamp is loaded: raise if it is not valid _final_ timestamp self._check_final_timestamp()