def test_generic_read(self): for metadata, inner_metadata_cls in [('snapshot', Snapshot), ('timestamp', Timestamp), ('targets', Targets)]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') metadata_obj = Metadata.from_file(path) with open(path, 'rb') as f: metadata_str = f.read() metadata_obj2 = JSONDeserializer().deserialize(metadata_str) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue(isinstance(metadata_obj.signed, inner_metadata_cls)) self.assertTrue( isinstance(metadata_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual(metadata_obj.to_dict(), metadata_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = 'bad-metadata.json' bad_metadata = {'signed': {'_type': 'bad-metadata'}} with open(bad_metadata_path, 'wb') as f: f.write(json.dumps(bad_metadata).encode('utf-8')) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) os.remove(bad_metadata_path)
def test_md_read_write_file_exceptions(self) -> None: # Test writing to a file with bad filename with self.assertRaises(exceptions.StorageError): Metadata.from_file("bad-metadata.json") # Test serializing to a file with bad filename with self.assertRaises(exceptions.StorageError): md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json")) md.to_file("")
def test_read_write_read_compare(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) path_2 = path + ".tmp" md_obj.to_file(path_2) md_obj_2 = Metadata.from_file(path_2) self.assertDictEqual(md_obj.to_dict(), md_obj_2.to_dict()) os.remove(path_2)
def test_read_write_read_compare(self): for metadata in ['snapshot', 'timestamp', 'targets']: path = os.path.join(self.repo_dir, 'metadata', metadata + '.json') metadata_obj = Metadata.from_file(path) path_2 = path + '.tmp' metadata_obj.to_file(path_2) metadata_obj_2 = Metadata.from_file(path_2) self.assertDictEqual(metadata_obj.to_dict(), metadata_obj_2.to_dict()) os.remove(path_2)
def test_trusted_root_unsigned(self) -> None: # Local trusted root is not signed root_path = os.path.join(self.metadata_dir, "root.json") md_root = Metadata.from_file(root_path) md_root.signatures.clear() md_root.to_file(root_path) with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, no changes in metadata self._assert_files_exist([Root.type]) md_root_after = Metadata.from_file(root_path) self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes())
def test_serialize_with_validate(self) -> None: # Assert that by changing one required attribute validation will fail. root = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json")) root.signed.version = 0 with self.assertRaises(SerializationError): root.to_bytes(JSONSerializer(validate=True))
def _modify_repository_root( self, modification_func: Callable[[Metadata], None], bump_version: bool = False, ) -> None: """Apply 'modification_func' to root and persist it.""" role_path = os.path.join(self.repository_directory, "metadata", "root.json") root = Metadata.from_file(role_path) modification_func(root) if bump_version: root.signed.version += 1 root_key_path = os.path.join(self.keystore_directory, "root_key") root_key_dict = import_rsa_privatekey_from_file(root_key_path, password="******") signer = SSlibSigner(root_key_dict) root.sign(signer) root.to_file( os.path.join(self.repository_directory, "metadata", "root.json")) root.to_file( os.path.join( self.repository_directory, "metadata", f"{root.signed.version}.root.json", ))
def test_metadata_signed_is_expired(self) -> None: # Use of Snapshot is arbitrary, we're just testing the base class # features with real data snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.utcnow() is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.utcnow() + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires
def test_metadata_timestamp(self): timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') timestamp = Metadata.from_file(timestamp_path) self.assertEqual(timestamp.signed.version, 1) timestamp.signed.bump_version() self.assertEqual(timestamp.signed.version, 2) self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 1, 0, 0)) timestamp.signed.bump_expiration() self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 2, 0, 0)) timestamp.signed.bump_expiration(timedelta(days=365)) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test whether dateutil.relativedelta works, this provides a much # easier to use interface for callers delta = relativedelta(days=1) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 3, 0, 0)) delta = relativedelta(years=5) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2036, 1, 3, 0, 0)) hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} fileinfo = copy.deepcopy(timestamp.signed.meta['snapshot.json']) fileinfo['hashes'] = hashes fileinfo['version'] = 2 fileinfo['length'] = 520 self.assertNotEqual(timestamp.signed.meta['snapshot.json'], fileinfo) timestamp.signed.update(2, 520, hashes) self.assertEqual(timestamp.signed.meta['snapshot.json'], fileinfo)
def test_metadata_root(self): root_path = os.path.join( self.repo_dir, 'metadata', 'root.json') root = Metadata.from_file(root_path) # Add a second key to root role root_key2 = import_ed25519_publickey_from_file( os.path.join(self.keystore_dir, 'root_key2.pub')) keyid = root_key2['keyid'] key_metadata = format_keyval_to_metadata( root_key2['keytype'], root_key2['scheme'], root_key2['keyval']) # Assert that root does not contain the new key self.assertNotIn(keyid, root.signed.roles['root']['keyids']) self.assertNotIn(keyid, root.signed.keys) # Add new root key root.signed.add_key('root', keyid, key_metadata) # Assert that key is added self.assertIn(keyid, root.signed.roles['root']['keyids']) self.assertIn(keyid, root.signed.keys) # Remove the key root.signed.remove_key('root', keyid) # Assert that root does not contain the new key anymore self.assertNotIn(keyid, root.signed.roles['root']['keyids']) self.assertNotIn(keyid, root.signed.keys)
def test_sign_failures(self) -> None: # Test throwing UnsignedMetadataError because of signing problems # related to bad information in the signer. md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "snapshot.json")) key_dict = copy(self.keystore[Snapshot.type]) key_dict["keytype"] = "rsa" key_dict["scheme"] = "bad_scheme" sslib_signer = SSlibSigner(key_dict) with self.assertRaises(exceptions.UnsignedMetadataError): md.sign(sslib_signer)
def test_sign_verify(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the public keys we need from root targets_keyid = next(iter(root.roles[Targets.type].keyids)) targets_key = root.keys[targets_keyid] snapshot_keyid = next(iter(root.roles[Snapshot.type].keyids)) snapshot_key = root.keys[snapshot_keyid] timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (targets) and assert ... path = os.path.join(self.repo_dir, "metadata", "targets.json") md_obj = Metadata.from_file(path) # ... it has a single existing signature, self.assertEqual(len(md_obj.signatures), 1) # ... which is valid for the correct key. targets_key.verify_signature(md_obj) with self.assertRaises(exceptions.UnsignedMetadataError): snapshot_key.verify_signature(md_obj) # Test verifying with explicitly set serializer targets_key.verify_signature(md_obj, CanonicalJSONSerializer()) with self.assertRaises(exceptions.UnsignedMetadataError): targets_key.verify_signature( md_obj, JSONSerializer()) # type: ignore[arg-type] sslib_signer = SSlibSigner(self.keystore[Snapshot.type]) # Append a new signature with the unrelated key and assert that ... sig = md_obj.sign(sslib_signer, append=True) # ... there are now two signatures, and self.assertEqual(len(md_obj.signatures), 2) # ... both are valid for the corresponding keys. targets_key.verify_signature(md_obj) snapshot_key.verify_signature(md_obj) # ... the returned (appended) signature is for snapshot key self.assertEqual(sig.keyid, snapshot_keyid) sslib_signer = SSlibSigner(self.keystore[Timestamp.type]) # Create and assign (don't append) a new signature and assert that ... md_obj.sign(sslib_signer, append=False) # ... there now is only one signature, self.assertEqual(len(md_obj.signatures), 1) # ... valid for that key. timestamp_key.verify_signature(md_obj) with self.assertRaises(exceptions.UnsignedMetadataError): targets_key.verify_signature(md_obj)
def test_generic_read(self) -> None: for metadata, inner_metadata_cls in [ (Root.type, Root), (Snapshot.type, Snapshot), (Timestamp.type, Timestamp), (Targets.type, Targets), ]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) with open(path, "rb") as f: md_obj2 = Metadata.from_bytes(f.read()) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue(isinstance(md_obj.signed, inner_metadata_cls)) self.assertTrue(isinstance(md_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual(md_obj.to_dict(), md_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = "bad-metadata.json" bad_metadata = {"signed": {"_type": "bad-metadata"}} bad_string = json.dumps(bad_metadata).encode("utf-8") with open(bad_metadata_path, "wb") as f: f.write(bad_string) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) with self.assertRaises(DeserializationError): Metadata.from_bytes(bad_string) os.remove(bad_metadata_path)
def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) md.signed.bump_expiration() self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) md.signed.bump_expiration(timedelta(days=365)) self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0))
def test_metadata_snapshot(self): snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') snapshot = Metadata.from_file(snapshot_path) # Create a dict representing what we expect the updated data to be fileinfo = copy.deepcopy(snapshot.signed.meta) hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} fileinfo['role1.json']['version'] = 2 fileinfo['role1.json']['hashes'] = hashes fileinfo['role1.json']['length'] = 123 self.assertNotEqual(snapshot.signed.meta, fileinfo) snapshot.signed.update('role1', 2, 123, hashes) self.assertEqual(snapshot.signed.meta, fileinfo)
def test_metadata_snapshot(self): snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') snapshot = Metadata.from_file(snapshot_path) # Create a MetaFile instance representing what we expect # the updated data to be. hashes = {'sha256': 'c2986576f5fdfd43944e2b19e775453b96748ec4fe2638a6d2f32f1310967095'} fileinfo = MetaFile(2, 123, hashes) self.assertNotEqual( snapshot.signed.meta['role1.json'].to_dict(), fileinfo.to_dict() ) snapshot.signed.update('role1', fileinfo) self.assertEqual( snapshot.signed.meta['role1.json'].to_dict(), fileinfo.to_dict() )
def test_sign_verify(self): # Load sample metadata (targets) and assert ... path = os.path.join(self.repo_dir, 'metadata', 'targets.json') metadata_obj = Metadata.from_file(path) # ... it has a single existing signature, self.assertTrue(len(metadata_obj.signatures) == 1) # ... which is valid for the correct key. self.assertTrue(metadata_obj.verify( self.keystore['targets']['public'])) sslib_signer = SSlibSigner(self.keystore['snapshot']['private']) # Append a new signature with the unrelated key and assert that ... metadata_obj.sign(sslib_signer, append=True) # ... there are now two signatures, and self.assertTrue(len(metadata_obj.signatures) == 2) # ... both are valid for the corresponding keys. self.assertTrue(metadata_obj.verify( self.keystore['targets']['public'])) self.assertTrue(metadata_obj.verify( self.keystore['snapshot']['public'])) sslib_signer.key_dict = self.keystore['timestamp']['private'] # Create and assign (don't append) a new signature and assert that ... metadata_obj.sign(sslib_signer, append=False) # ... there now is only one signature, self.assertTrue(len(metadata_obj.signatures) == 1) # ... valid for that key. self.assertTrue(metadata_obj.verify( self.keystore['timestamp']['public'])) # Assert exception if there are more than one signatures for a key metadata_obj.sign(sslib_signer, append=True) with self.assertRaises(tuf.exceptions.Error) as ctx: metadata_obj.verify(self.keystore['timestamp']['public']) self.assertTrue( '2 signatures for key' in str(ctx.exception), str(ctx.exception)) # Assert exception if there is no signature for a key with self.assertRaises(tuf.exceptions.Error) as ctx: metadata_obj.verify(self.keystore['targets']['public']) self.assertTrue( 'no signature for' in str(ctx.exception), str(ctx.exception))
def test_metadata_root(self): root_path = os.path.join( self.repo_dir, 'metadata', 'root.json') root = Metadata.from_file(root_path) # Add a second key to root role root_key2 = import_ed25519_publickey_from_file( os.path.join(self.keystore_dir, 'root_key2.pub')) keyid = root_key2['keyid'] key_metadata = Key(keyid, root_key2['keytype'], root_key2['scheme'], root_key2['keyval']) # Assert that root does not contain the new key self.assertNotIn(keyid, root.signed.roles['root'].keyids) self.assertNotIn(keyid, root.signed.keys) # Add new root key root.signed.add_key('root', key_metadata) # Assert that key is added self.assertIn(keyid, root.signed.roles['root'].keyids) self.assertIn(keyid, root.signed.keys) # Confirm that the newly added key does not break # the object serialization root.to_dict() # Try adding the same key again and assert its ignored. pre_add_keyid = root.signed.roles['root'].keyids.copy() root.signed.add_key('root', key_metadata) self.assertEqual(pre_add_keyid, root.signed.roles['root'].keyids) # Remove the key root.signed.remove_key('root', keyid) # Assert that root does not contain the new key anymore self.assertNotIn(keyid, root.signed.roles['root'].keyids) self.assertNotIn(keyid, root.signed.keys) with self.assertRaises(KeyError): root.signed.remove_key('root', 'nosuchkey')
def test_timestamp_eq_versions_check(self) -> None: # Test that a modified timestamp with different content, but the same # version doesn't replace the valid locally stored one. # Make a successful update of valid metadata which stores it in cache self._run_refresh() initial_timestamp_meta_ver = self.sim.timestamp.snapshot_meta.version # Change timestamp without bumping its version in order to test if a new # timestamp with the same version will be persisted. self.sim.timestamp.snapshot_meta.version = 100 self._run_refresh() # If the local timestamp md file has the same snapshot_meta.version as # the initial one, then the new modified timestamp has not been stored. timestamp_path = os.path.join(self.metadata_dir, "timestamp.json") timestamp: Metadata[Timestamp] = Metadata.from_file(timestamp_path) self.assertEqual( initial_timestamp_meta_ver, timestamp.signed.snapshot_meta.version )
def test_verify_failures(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the timestamp public key we need from root timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (timestamp) path = os.path.join(self.repo_dir, "metadata", "timestamp.json") md_obj = Metadata.from_file(path) # Test failure on unknown scheme (securesystemslib # UnsupportedAlgorithmError) scheme = timestamp_key.scheme timestamp_key.scheme = "foo" with self.assertRaises(exceptions.UnsignedMetadataError): timestamp_key.verify_signature(md_obj) timestamp_key.scheme = scheme # Test failure on broken public key data (securesystemslib # CryptoError) public = timestamp_key.keyval["public"] timestamp_key.keyval["public"] = "ffff" with self.assertRaises(exceptions.UnsignedMetadataError): timestamp_key.verify_signature(md_obj) timestamp_key.keyval["public"] = public # Test failure with invalid signature (securesystemslib # FormatError) sig = md_obj.signatures[timestamp_keyid] correct_sig = sig.signature sig.signature = "foo" with self.assertRaises(exceptions.UnsignedMetadataError): timestamp_key.verify_signature(md_obj) # Test failure with valid but incorrect signature sig.signature = "ff" * 64 with self.assertRaises(exceptions.UnsignedMetadataError): timestamp_key.verify_signature(md_obj) sig.signature = correct_sig
def test_metadata_targets(self): targets_path = os.path.join(self.repo_dir, 'metadata', 'targets.json') targets = Metadata.from_file(targets_path) # Create a fileinfo dict representing what we expect the updated data to be filename = 'file2.txt' hashes = { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, fileinfo = {'hashes': hashes, 'length': 28} # Assert that data is not aleady equal self.assertNotEqual(targets.signed.targets[filename], fileinfo) # Update an already existing fileinfo targets.signed.update(filename, fileinfo) # Verify that data is updated self.assertEqual(targets.signed.targets[filename], fileinfo)
def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) md.signed.bump_expiration() self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) md.signed.bump_expiration(timedelta(days=365)) self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.utcnow() is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.utcnow() + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires # Test deserializing metadata with non-unique signatures: data = md.to_dict() data["signatures"].append({"keyid": data["signatures"][0]["keyid"], "sig": "foo"}) with self.assertRaises(ValueError): Metadata.from_dict(data)
def test_expired_metadata(self, mock_time: Mock) -> None: """Verifies that expired local timestamp/snapshot can be used for updating from remote. The updates and verifications are performed with the following timing: - Timestamp v1 expiry set to day 7 - First updater refresh performed on day 0 - Repository bumps snapshot and targets to v2 on day 0 - Timestamp v2 expiry set to day 21 - Second updater refresh performed on day 18, it is successful and timestamp/snaphot final versions are v2""" now = datetime.datetime.utcnow() self.sim.timestamp.expires = now + datetime.timedelta(days=7) # Make a successful update of valid metadata which stores it in cache self._run_refresh() self.sim.targets.version += 1 self.sim.update_snapshot() self.sim.timestamp.expires = now + datetime.timedelta(days=21) # Mocking time so that local timestam has expired # but the new timestamp has not mock_time.utcnow.return_value = ( datetime.datetime.utcnow() + datetime.timedelta(days=18) ) with patch("datetime.datetime", mock_time): self._run_refresh() # Assert that the final version of timestamp/snapshot is version 2 # which means a successful refresh is performed # with expired local metadata for role in ["timestamp", "snapshot", "targets"]: md = Metadata.from_file( os.path.join(self.metadata_dir, f"{role}.json") ) self.assertEqual(md.signed.version, 2)
def test_max_root_rotations(self) -> None: # Root must stop looking for new versions after Y number of # intermediate files were downloaded. updater = self._init_updater() updater.config.max_root_rotations = 3 # Create some number of roots greater than 'max_root_rotations' while self.sim.root.version < updater.config.max_root_rotations + 3: self.sim.root.version += 1 self.sim.publish_root() md_root = Metadata.from_file( os.path.join(self.metadata_dir, "root.json") ) initial_root_version = md_root.signed.version updater.refresh() # Assert that root version was increased with no more # than 'max_root_rotations' self._assert_version_equals( Root.type, initial_root_version + updater.config.max_root_rotations )
def test_metadata_timestamp(self): timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') timestamp = Metadata.from_file(timestamp_path) self.assertEqual(timestamp.signed.version, 1) timestamp.signed.bump_version() self.assertEqual(timestamp.signed.version, 2) self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 1, 0, 0)) timestamp.signed.bump_expiration() self.assertEqual(timestamp.signed.expires, datetime(2030, 1, 2, 0, 0)) timestamp.signed.bump_expiration(timedelta(days=365)) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test whether dateutil.relativedelta works, this provides a much # easier to use interface for callers delta = relativedelta(days=1) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2031, 1, 3, 0, 0)) delta = relativedelta(years=5) timestamp.signed.bump_expiration(delta) self.assertEqual(timestamp.signed.expires, datetime(2036, 1, 3, 0, 0)) # Create a MetaFile instance representing what we expect # the updated data to be. hashes = {'sha256': '0ae9664468150a9aa1e7f11feecb32341658eb84292851367fea2da88e8a58dc'} fileinfo = MetaFile(2, 520, hashes) self.assertNotEqual( timestamp.signed.meta['snapshot.json'].to_dict(), fileinfo.to_dict() ) timestamp.signed.update(fileinfo) self.assertEqual( timestamp.signed.meta['snapshot.json'].to_dict(), fileinfo.to_dict() )
def test_metadata_base(self): # Use of Snapshot is arbitrary, we're just testing the base class features # with real data snapshot_path = os.path.join(self.repo_dir, 'metadata', 'snapshot.json') md = Metadata.from_file(snapshot_path) self.assertEqual(md.signed.version, 1) md.signed.bump_version() self.assertEqual(md.signed.version, 2) self.assertEqual(md.signed.expires, datetime(2030, 1, 1, 0, 0)) md.signed.bump_expiration() self.assertEqual(md.signed.expires, datetime(2030, 1, 2, 0, 0)) md.signed.bump_expiration(timedelta(days=365)) self.assertEqual(md.signed.expires, datetime(2031, 1, 2, 0, 0)) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.utcnow() is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.utcnow() + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires
def test_compact_json(self) -> None: path = os.path.join(self.repo_dir, "metadata", "targets.json") md_obj = Metadata.from_file(path) self.assertTrue( len(JSONSerializer(compact=True).serialize(md_obj)) < len( JSONSerializer().serialize(md_obj)))
def test_length_and_hash_validation(self): # Test metadata files' hash and length verification. # Use timestamp to get a MetaFile object and snapshot # for untrusted metadata file to verify. timestamp_path = os.path.join( self.repo_dir, 'metadata', 'timestamp.json') timestamp = Metadata.from_file(timestamp_path) snapshot_metafile = timestamp.signed.meta["snapshot.json"] snapshot_path = os.path.join( self.repo_dir, 'metadata', 'snapshot.json') with open(snapshot_path, "rb") as file: # test with data as a file object snapshot_metafile.verify_length_and_hashes(file) file.seek(0) data = file.read() # test with data as bytes snapshot_metafile.verify_length_and_hashes(data) # test exceptions expected_length = snapshot_metafile.length snapshot_metafile.length = 2345 self.assertRaises(exceptions.LengthOrHashMismatchError, snapshot_metafile.verify_length_and_hashes, data) snapshot_metafile.length = expected_length snapshot_metafile.hashes = {'sha256': 'incorrecthash'} self.assertRaises(exceptions.LengthOrHashMismatchError, snapshot_metafile.verify_length_and_hashes, data) snapshot_metafile.hashes = {'unsupported-alg': "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab"} self.assertRaises(exceptions.LengthOrHashMismatchError, snapshot_metafile.verify_length_and_hashes, data) # Test wrong algorithm format (sslib.FormatError) snapshot_metafile.hashes = { 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab"} self.assertRaises(exceptions.LengthOrHashMismatchError, snapshot_metafile.verify_length_and_hashes, data) # test optional length and hashes snapshot_metafile.length = None snapshot_metafile.hashes = None snapshot_metafile.verify_length_and_hashes(data) # Test target files' hash and length verification targets_path = os.path.join( self.repo_dir, 'metadata', 'targets.json') targets = Metadata.from_file(targets_path) file1_targetfile = targets.signed.targets['file1.txt'] filepath = os.path.join( self.repo_dir, 'targets', 'file1.txt') with open(filepath, "rb") as file1: file1_targetfile.verify_length_and_hashes(file1) # test exceptions expected_length = file1_targetfile.length file1_targetfile.length = 2345 self.assertRaises(exceptions.LengthOrHashMismatchError, file1_targetfile.verify_length_and_hashes, file1) file1_targetfile.length = expected_length file1_targetfile.hashes = {'sha256': 'incorrecthash'} self.assertRaises(exceptions.LengthOrHashMismatchError, file1_targetfile.verify_length_and_hashes, file1)
def _assert_version_equals(self, role: str, expected_version: int) -> None: """Assert that local metadata version is the expected""" md = Metadata.from_file(os.path.join(self.metadata_dir, f"{role}.json")) self.assertEqual(md.signed.version, expected_version)
def test_compact_json(self): path = os.path.join(self.repo_dir, 'metadata', 'targets.json') metadata_obj = Metadata.from_file(path) self.assertTrue( len(JSONSerializer(compact=True).serialize(metadata_obj)) < len(JSONSerializer().serialize(metadata_obj)))