def test_generic_read(self): for metadata, inner_metadata_cls in [('snapshot', Snapshot), ('timestamp', Timestamp), ('targets', Targets)]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') metadata_obj = Metadata.from_file(path) with open(path, 'rb') as f: metadata_str = f.read() metadata_obj2 = JSONDeserializer().deserialize(metadata_str) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue(isinstance(metadata_obj.signed, inner_metadata_cls)) self.assertTrue( isinstance(metadata_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual(metadata_obj.to_dict(), metadata_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = 'bad-metadata.json' bad_metadata = {'signed': {'_type': 'bad-metadata'}} with open(bad_metadata_path, 'wb') as f: f.write(json.dumps(bad_metadata).encode('utf-8')) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) os.remove(bad_metadata_path)
def test_md_read_write_file_exceptions(self) -> None: # Test writing to a file with bad filename with self.assertRaises(exceptions.StorageError): Metadata.from_file("bad-metadata.json") # Test serializing to a file with bad filename with self.assertRaises(exceptions.StorageError): md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json")) md.to_file("")
def test_read_write_read_compare(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) path_2 = path + ".tmp" md_obj.to_file(path_2) md_obj_2 = Metadata.from_file(path_2) self.assertDictEqual(md_obj.to_dict(), md_obj_2.to_dict()) os.remove(path_2)
def test_read_write_read_compare(self): for metadata in ['snapshot', 'timestamp', 'targets']: path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') metadata_obj = Metadata.from_file(path) path_2 = path + '.tmp' metadata_obj.to_file(path_2) metadata_obj_2 = Metadata.from_file(path_2) self.assertDictEqual(metadata_obj.to_dict(), metadata_obj_2.to_dict()) os.remove(path_2)
def _initialize(self) -> None: """Setup a minimal valid repository.""" self.md_targets = Metadata(Targets(expires=self.safe_expiry)) self.md_snapshot = Metadata(Snapshot(expires=self.safe_expiry)) self.md_timestamp = Metadata(Timestamp(expires=self.safe_expiry)) self.md_root = Metadata(Root(expires=self.safe_expiry)) for role in TOP_LEVEL_ROLE_NAMES: key, signer = self.create_key() self.md_root.signed.add_key(key, role) self.add_signer(role, signer) self.publish_root()
def test_trusted_root_unsigned(self) -> None: # Local trusted root is not signed root_path = os.path.join(self.metadata_dir, "root.json") md_root = Metadata.from_file(root_path) md_root.signatures.clear() md_root.to_file(root_path) with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, no changes in metadata self._assert_files_exist([Root.type]) md_root_after = Metadata.from_file(root_path) self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes())
def _modify_repository_root( self, modification_func: Callable[[Metadata], None], bump_version: bool = False, ) -> None: """Apply 'modification_func' to root and persist it.""" role_path = os.path.join(self.repository_directory, "metadata", "root.json") root = Metadata.from_file(role_path) modification_func(root) if bump_version: root.signed.version += 1 root_key_path = os.path.join(self.keystore_directory, "root_key") root_key_dict = import_rsa_privatekey_from_file(root_key_path, password="******") signer = SSlibSigner(root_key_dict) root.sign(signer) root.to_file( os.path.join(self.repository_directory, "metadata", "root.json")) root.to_file( os.path.join( self.repository_directory, "metadata", f"{root.signed.version}.root.json", ))
def serialize(self, metadata_obj: Metadata) -> bytes: """Serialize Metadata object into utf-8 encoded JSON bytes.""" try: indent = None if self.compact else 1 separators = (",", ":") if self.compact else (",", ": ") json_bytes = json.dumps( metadata_obj.to_dict(), indent=indent, separators=separators, sort_keys=True, ).encode("utf-8") if self.validate: try: new_md_obj = JSONDeserializer().deserialize(json_bytes) if metadata_obj != new_md_obj: raise ValueError( "Metadata changes if you serialize and deserialize." ) except Exception as e: raise ValueError("Metadata cannot be validated!") from e except Exception as e: raise SerializationError("Failed to serialize JSON") from e return json_bytes
def test_serialize_with_validate(self) -> None: # Assert that by changing one required attribute validation will fail. root = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json")) root.signed.version = 0 with self.assertRaises(SerializationError): root.to_bytes(JSONSerializer(validate=True))
def test_to_from_bytes(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") with open(path, "rb") as f: metadata_bytes = f.read() md_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. self.assertEqual(md_obj.to_bytes(JSONSerializer()), metadata_bytes) # Case 2: test compact by using the default serializer. obj_bytes = md_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual(metadata_obj_2.to_bytes(), obj_bytes)
def test_metadata_signed_is_expired(self) -> None: # Use of Snapshot is arbitrary, we're just testing the base class # features with real data snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.utcnow() is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.utcnow() + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires
def test_update_root_new_root_cannot_be_verified_with_threshold(self): # new_root data with threshold which cannot be verified. root = Metadata.from_bytes(self.metadata["root"]) # remove root role keyids representing root signatures root.signed.roles["root"].keyids = [] with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_root(root.to_bytes())
def test_metadata_timestamp(self): timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') timestamp = Metadata.from_file(timestamp_path) self.assertEqual(timestamp.signed.version, 1) timestamp.signed.bump_version() self.assertEqual(timestamp.signed.version, 2) self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 1, 0, 0)) timestamp.signed.bump_expiration() self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 2, 0, 0)) timestamp.signed.bump_expiration(timedelta(days=365)) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test whether dateutil.relativedelta works, this provides a much # easier to use interface for callers delta = relativedelta(days=1) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 3, 0, 0)) delta = relativedelta(years=5) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2036, 1, 3, 0, 0)) hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} fileinfo = copy.deepcopy(timestamp.signed.meta['snapshot.json']) fileinfo['hashes'] = hashes fileinfo['version'] = 2 fileinfo['length'] = 520 self.assertNotEqual(timestamp.signed.meta['snapshot.json'], fileinfo) timestamp.signed.update(2, 520, hashes) self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo)
def add_succinct_roles(self, delegator_name: str, bit_length: int, name_prefix: str) -> None: """Add succinct roles info to a delegator with name "delegator_name". Note that for each delegated role represented by succinct roles an empty Targets instance is created. """ delegator = self._get_delegator(delegator_name) if (delegator.delegations is not None and delegator.delegations.roles is not None): raise ValueError( "Can't add a succinct_roles when delegated roles are used") key, signer = self.create_key() succinct_roles = SuccinctRoles([], 1, bit_length, name_prefix) delegator.delegations = Delegations({}, None, succinct_roles) # Add targets metadata for all bins. for delegated_name in succinct_roles.get_roles(): self.md_delegates[delegated_name] = Metadata( Targets(expires=self.safe_expiry)) self.add_signer(delegated_name, signer) delegator.add_key(key)
def add_delegation(self, delegator_name: str, role: DelegatedRole, targets: Targets) -> None: """Add delegated target role to the repository.""" delegator = self._get_delegator(delegator_name) if (delegator.delegations is not None and delegator.delegations.succinct_roles is not None): raise ValueError("Can't add a role when succinct_roles is used") # Create delegation if delegator.delegations is None: delegator.delegations = Delegations({}, roles={}) assert delegator.delegations.roles is not None # put delegation last by default delegator.delegations.roles[role.name] = role # By default add one new key for the role key, signer = self.create_key() delegator.add_key(key, role.name) self.add_signer(role.name, signer) # Add metadata for the role if role.name not in self.md_delegates: self.md_delegates[role.name] = Metadata(targets, {})
def test_metadata_root(self): root_path = os.path.join( self.repo_dir, 'metadata', 'root.json') root = Metadata.from_file(root_path) # Add a second key to root role root_key2 = import_ed25519_publickey_from_file( os.path.join(self.keystore_dir, 'root_key2.pub')) keyid = root_key2['keyid'] key_metadata = format_keyval_to_metadata( root_key2['keytype'], root_key2['scheme'], root_key2['keyval']) # Assert that root does not contain the new key self.assertNotIn(keyid, root.signed.roles['root']['keyids']) self.assertNotIn(keyid, root.signed.keys) # Add new root key root.signed.add_key('root', keyid, key_metadata) # Assert that key is added self.assertIn(keyid, root.signed.roles['root']['keyids']) self.assertIn(keyid, root.signed.keys) # Remove the key root.signed.remove_key('root', keyid) # Assert that root does not contain the new key anymore self.assertNotIn(keyid, root.signed.roles['root']['keyids']) self.assertNotIn(keyid, root.signed.keys)
def update_delegated_targets(self, data: bytes, role_name: str, delegator_name: str): """Verifies and loads 'data' as new metadata for target 'role_name'. Args: data: unverified new metadata as bytes role_name: The role name of the new metadata delegator_name: The name of the role delegating to the new metadata Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if self.snapshot is None: raise RuntimeError("Cannot load targets before snapshot") delegator: Optional[Metadata] = self.get(delegator_name) if delegator is None: raise RuntimeError("Cannot load targets before delegator") logger.debug("Updating %s delegated by %s", role_name, delegator_name) # Verify against the hashes in snapshot, if any meta = self.snapshot.signed.meta.get(f"{role_name}.json") if meta is None: raise exceptions.RepositoryError( f"Snapshot does not contain information for '{role_name}'") try: meta.verify_length_and_hashes(data) except exceptions.LengthOrHashMismatchError as e: raise exceptions.RepositoryError( f"{role_name} length or hashes do not match") from e try: new_delegate = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load snapshot") from e if new_delegate.signed.type != "targets": raise exceptions.RepositoryError( f"Expected 'targets', got '{new_delegate.signed.type}'") delegator.verify_delegate(role_name, new_delegate) if new_delegate.signed.version != meta.version: raise exceptions.BadVersionNumberError( f"Expected {role_name} version " f"{meta.version}, got {new_delegate.signed.version}.") if new_delegate.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError( f"New {role_name} is expired") self._trusted_set[role_name] = new_delegate logger.debug("Updated %s delegated by %s", role_name, delegator_name)
def deserialize(self, raw_data: bytes) -> Metadata: """Deserialize utf-8 encoded JSON bytes into Metadata object.""" try: json_dict = json.loads(raw_data.decode("utf-8")) metadata_obj = Metadata.from_dict(json_dict) except Exception as e: # pylint: disable=broad-except raise DeserializationError from e return metadata_obj
def deserialize(self, raw_data: bytes) -> Metadata: """Deserialize utf-8 encoded JSON bytes into Metadata object.""" try: json_dict = json.loads(raw_data.decode("utf-8")) metadata_obj = Metadata.from_dict(json_dict) except Exception as e: raise DeserializationError("Failed to deserialize JSON") from e return metadata_obj
def test_valid_metadata_serialization(self, test_case_data: bytes) -> None: md = Metadata.from_bytes(test_case_data) # Convert to a JSON and sort the keys the way we do in JSONSerializer. separators = (",", ":") test_json = json.loads(test_case_data) test_bytes = json.dumps(test_json, separators=separators, sort_keys=True).encode("utf-8") self.assertEqual(test_bytes, md.to_bytes())
def test_to_from_bytes(self): for metadata in ["root", "snapshot", "timestamp", "targets"]: path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') with open(path, 'rb') as f: metadata_bytes = f.read() metadata_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. self.assertEqual( metadata_obj.to_bytes(JSONSerializer()), metadata_bytes ) # Case 2: test compact by using the default serializer. obj_bytes = metadata_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual( metadata_obj_2.to_bytes(), obj_bytes )
def test_sign_failures(self) -> None: # Test throwing UnsignedMetadataError because of signing problems # related to bad information in the signer. md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "snapshot.json")) key_dict = copy(self.keystore[Snapshot.type]) key_dict["keytype"] = "rsa" key_dict["scheme"] = "bad_scheme" sslib_signer = SSlibSigner(key_dict) with self.assertRaises(exceptions.UnsignedMetadataError): md.sign(sslib_signer)
def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) md.signed.bump_expiration() self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) md.signed.bump_expiration(timedelta(days=365)) self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.utcnow() is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.utcnow() + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires # Test deserializing metadata with non-unique signatures: data = md.to_dict() data["signatures"].append({"keyid": data["signatures"][0]["keyid"], "sig": "foo"}) with self.assertRaises(ValueError): Metadata.from_dict(data)
def test_update_with_invalid_json(self): repo_dir = os.path.join(os.getcwd(), 'repository_data', 'repository', 'metadata') data = {} for md in ["root", "timestamp", "snapshot", "targets", "role1"]: with open(os.path.join(repo_dir, f"{md}.json"), "rb") as f: data[md] = f.read() # root.json not a json file at all with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet(b"") # root.json is invalid root = Metadata.from_bytes(data["root"]) root.signed.version += 1 with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet(json.dumps(root.to_dict()).encode()) trusted_set = TrustedMetadataSet(data["root"]) trusted_set.root_update_finished() top_level_md = [ (data["timestamp"], trusted_set.update_timestamp), (data["snapshot"], trusted_set.update_snapshot), (data["targets"], trusted_set.update_targets), ] for metadata, update_func in top_level_md: # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") # metadata is invalid md = Metadata.from_bytes(metadata) md.signed.version += 1 with self.assertRaises(exceptions.RepositoryError): update_func(json.dumps(md.to_dict()).encode()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): update_func(data["root"]) update_func(metadata)
def modify_metadata(cls, rolename: str, modification_func: Callable) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. Attributes: rolename: Denoting the name of the metadata which will be modified. modification_func: Function that will be called to modify the signed portion of metadata bytes. """ metadata = Metadata.from_bytes(cls.metadata[rolename]) modification_func(metadata.signed) metadata.sign(cls.keystore[rolename]) return metadata.to_bytes(JSONSerializer(validate=True))
def verify_generation(md: Metadata, path: str) -> None: """Verify that newly generated file equals the locally stored one. Args: md: Newly generated metadata object. path: Path to the locally stored metadata file. """ with open(path, "rb") as f: static_md_bytes = f.read() md_bytes = md.to_bytes(SERIALIZER) if static_md_bytes != md_bytes: raise ValueError( f"Generated data != local data at {path}. Generate a new " + "metadata with 'python generated_data/generate_md.py'")
def update_timestamp(self, data: bytes): """Verifies and loads 'data' as new timestamp metadata. Args: data: unverified new timestamp metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ if not self._root_update_finished: raise RuntimeError("Cannot update timestamp before root") if self.snapshot is not None: raise RuntimeError("Cannot update timestamp after snapshot") try: new_timestamp = Metadata.from_bytes(data) except DeserializationError as e: raise exceptions.RepositoryError("Failed to load timestamp") from e if new_timestamp.signed.type != "timestamp": raise exceptions.RepositoryError( f"Expected 'timestamp', got '{new_timestamp.signed.type}'") self.root.verify_delegate("timestamp", new_timestamp) # If an existing trusted timestamp is updated, # check for a rollback attack if self.timestamp is not None: # Prevent rolling back timestamp version if new_timestamp.signed.version < self.timestamp.signed.version: raise exceptions.ReplayedMetadataError( "timestamp", new_timestamp.signed.version, self.timestamp.signed.version, ) # Prevent rolling back snapshot version if (new_timestamp.signed.meta["snapshot.json"].version < self.timestamp.signed.meta["snapshot.json"].version): raise exceptions.ReplayedMetadataError( "snapshot", new_timestamp.signed.meta["snapshot.json"].version, self.timestamp.signed.meta["snapshot.json"].version, ) if new_timestamp.signed.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("New timestamp is expired") self._trusted_set["timestamp"] = new_timestamp logger.debug("Updated timestamp")
def generate_all_files(dump: Optional[bool] = False, verify: Optional[bool] = False) -> None: """Generate a new repository and optionally verify it. Args: dump: Wheter to dump the newly generated files. verify: Whether to verify the newly generated files with the local staored. """ md_root = Metadata(Root(expires=EXPIRY)) md_timestamp = Metadata(Timestamp(expires=EXPIRY)) md_snapshot = Metadata(Snapshot(expires=EXPIRY)) md_targets = Metadata(Targets(expires=EXPIRY)) md_root.signed.add_key(keys["ed25519_0"], "root") md_root.signed.add_key(keys["ed25519_1"], "timestamp") md_root.signed.add_key(keys["ed25519_2"], "snapshot") md_root.signed.add_key(keys["ed25519_3"], "targets") for i, md in enumerate([md_root, md_timestamp, md_snapshot, md_targets]): assert isinstance(md, Metadata) signer = SSlibSigner({ "keytype": "ed25519", "scheme": "ed25519", "keyid": keyids[i], "keyval": { "public": public_values[i], "private": private_values[i], }, }) md.sign(signer) path = os.path.join(OUT_DIR, f"{md.signed.type}_with_ed25519.json") if verify: verify_generation(md, path) if dump: md.to_file(path, SERIALIZER)
def test_sign_verify(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root targets_keyid = next(iter(root.roles[Targets.type].keyids)) targets_key = root.keys[targets_keyid] snapshot_keyid = next(iter(root.roles[Snapshot.type].keyids)) snapshot_key = root.keys[snapshot_keyid] timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (targets) and assert ... path = os.path.join(self.repo_dir, "metadata", "targets.json") md_obj = Metadata.from_file(path) # ... it has a single existing signature, self.assertEqual(len(md_obj.signatures), 1) # ... which is valid for the correct key. targets_key.verify_signature(md_obj) with self.assertRaises(exceptions.UnsignedMetadataError): snapshot_key.verify_signature(md_obj) # Test verifying with explicitly set serializer targets_key.verify_signature(md_obj, CanonicalJSONSerializer()) with self.assertRaises(exceptions.UnsignedMetadataError): targets_key.verify_signature( md_obj, JSONSerializer()) # type: ignore[arg-type] sslib_signer = SSlibSigner(self.keystore[Snapshot.type]) # Append a new signature with the unrelated key and assert that ... sig = md_obj.sign(sslib_signer, append=True) # ... there are now two signatures, and self.assertEqual(len(md_obj.signatures), 2) # ... both are valid for the corresponding keys. targets_key.verify_signature(md_obj) snapshot_key.verify_signature(md_obj) # ... the returned (appended) signature is for snapshot key self.assertEqual(sig.keyid, snapshot_keyid) sslib_signer = SSlibSigner(self.keystore[Timestamp.type]) # Create and assign (don't append) a new signature and assert that ... md_obj.sign(sslib_signer, append=False) # ... there now is only one signature, self.assertEqual(len(md_obj.signatures), 1) # ... valid for that key. timestamp_key.verify_signature(md_obj) with self.assertRaises(exceptions.UnsignedMetadataError): targets_key.verify_signature(md_obj)
def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) md.signed.bump_expiration() self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) md.signed.bump_expiration(timedelta(days=365)) self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0))