Esempio n. 1
0
    def test_succinct_roles_graph_traversal(
            self, test_data: SuccinctRolesTestCase) -> None:
        # Test traversing the delegation tree when succinct roles is used. For a
        # successful traversal all top level metadata files plus the expected
        # bin should exist locally and only one bin must be downloaded.

        try:
            exp_files = [*TOP_LEVEL_ROLE_NAMES, test_data.expected_target_bin]
            exp_calls = [(test_data.expected_target_bin, 1)]

            self.sim = RepositorySimulator()
            self.sim.add_succinct_roles("targets", test_data.bit_length, "bin")
            self.sim.update_snapshot()

            self.setup_subtest()

            updater = self._init_updater()
            # Call explicitly refresh to simplify the expected_calls list.
            updater.refresh()
            self.sim.fetch_tracker.metadata.clear()
            # Check that metadata dir contains only top-level roles
            self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)

            # Looking for a non-existing targetpath forces updater
            # to visit a corresponding delegated role.
            targetfile = updater.get_targetinfo(test_data.target_path)
            self.assertIsNone(targetfile)

            # Check that the delegated roles were visited in the expected
            # order and the corresponding metadata files were persisted.
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            self._assert_files_exist(exp_files)

        finally:
            self.teardown_subtest()
Esempio n. 2
0
    def setUp(self) -> None:
        # pylint: disable-next=consider-using-with
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])
Esempio n. 3
0
    def setup_subtest(self) -> None:
        # Setup repository for subtest: make sure no roots have been published
        # pylint: disable=attribute-defined-outside-init
        self.sim = RepositorySimulator()
        self.sim.signed_roots.clear()
        self.sim.root.version = 0

        if self.dump_dir is not None:
            # create subtest dumpdir
            # pylint: disable=no-member
            name = f"{self.id().split('.')[-1]}-{self.case_name}"
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)
    def setUp(self):
        self.client_dir = tempfile.TemporaryDirectory()

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.client_dir.name, "root.json"), "bw") as f:
            root = self.sim.download_bytes("https://example.com/metadata/1.root.json", 100000)
            f.write(root)

        if self.dump_dir is not None:
            # create test specific dump directory
            name = self.id().split('.')[-1]
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)
    def _init_repo(
        self, consistent_snapshot: bool, prefix_targets: bool = True
    ) -> RepositorySimulator:
        """Create a new RepositorySimulator instance"""
        sim = RepositorySimulator()
        sim.root.consistent_snapshot = consistent_snapshot
        sim.root.version += 1
        sim.publish_root()
        sim.prefix_targets_with_hash = prefix_targets

        # Init trusted root with the latest consistent_snapshot
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(sim.signed_roots[-1])

        return sim
Esempio n. 6
0
class TestUpdater(unittest.TestCase):
    """Test ngclient Updater input validation."""

    def setUp(self) -> None:
        # pylint: disable-next=consider-using-with
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

    def tearDown(self) -> None:
        self.temp_dir.cleanup()

    def _new_updater(self) -> Updater:
        return Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            fetcher=self.sim,
        )

    def test_local_target_storage_fail(self) -> None:
        self.sim.add_target("targets", b"content", "targetpath")
        self.sim.targets.version += 1
        self.sim.update_snapshot()

        updater = self._new_updater()
        target_info = updater.get_targetinfo("targetpath")
        assert target_info is not None
        with self.assertRaises(FileNotFoundError):
            updater.download_target(target_info, filepath="")

    def test_non_existing_metadata_dir(self) -> None:
        with self.assertRaises(FileNotFoundError):
            # Initialize Updater with non-existing metadata_dir
            Updater(
                "non_existing_metadata_dir",
                "https://example.com/metadata/",
                fetcher=self.sim,
            )
Esempio n. 7
0
    def setUp(self) -> None:
        # pylint: disable-next=consider-using-with
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

        if self.dump_dir is not None:
            # create test specific dump directory
            name = self.id().split(".")[-1]
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)
Esempio n. 8
0
    def setUpClass(cls) -> None:
        # pylint: disable-next=consider-using-with
        cls.temp_dir = tempfile.TemporaryDirectory()

        # Pre-create a bunch of keys and signers
        cls.keys = []
        cls.signers = []
        for _ in range(10):
            key, signer = RepositorySimulator.create_key()
            cls.keys.append(key)
            cls.signers.append(signer)
Esempio n. 9
0
    def _init_repo(self, test_case: DelegationsTestCase) -> None:
        """Create a new RepositorySimulator instance and
        populate it with delegations and target files"""

        self.sim = RepositorySimulator()
        spec_version = ".".join(SPECIFICATION_VERSION)
        for d in test_case.delegations:
            if d.rolename in self.sim.md_delegates:
                targets = self.sim.md_delegates[d.rolename].signed
            else:
                targets = Targets(1, spec_version, self.sim.safe_expiry, {},
                                  None)
            # unpack 'd' but skip "delegator"
            role = DelegatedRole(*astuple(d)[1:])
            self.sim.add_delegation(d.delegator, role, targets)

        for target in test_case.target_files:
            self.sim.add_target(*astuple(target))

        if test_case.target_files:
            self.sim.targets.version += 1
        self.sim.update_snapshot()
Esempio n. 10
0
class TestUpdater(unittest.TestCase):
    # set dump_dir to trigger repository state dumps
    dump_dir:Optional[str] = None

    def setUp(self):
        self.client_dir = tempfile.TemporaryDirectory()

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.client_dir.name, "root.json"), "bw") as f:
            root = self.sim.download_bytes("https://example.com/metadata/1.root.json", 100000)
            f.write(root)

        if self.dump_dir is not None:
            # create test specific dump directory
            name = self.id().split('.')[-1]
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)

    def _run_refresh(self):
        if self.sim.dump_dir is not None:
            self.sim.write()

        updater = Updater(
            self.client_dir.name,
            "https://example.com/metadata/",
            "https://example.com/targets/",
            self.sim
        )
        updater.refresh()

    def test_refresh(self):
        # Update top level metadata
        self._run_refresh()

        # New root (root needs to be explicitly signed)
        self.sim.root.version += 1
        self.sim.publish_root()

        self._run_refresh()

        # New timestamp
        self.sim.update_timestamp()

        self._run_refresh()

        # New targets, snapshot, timestamp version
        self.sim.targets.version += 1
        self.sim.update_snapshot()

        self._run_refresh()

    def test_keys_and_signatures(self):
        """Example of the two trickiest test areas: keys and root updates"""

        # Update top level metadata
        self._run_refresh()

        # New targets: signed with a new key that is not in roles keys
        old_signer = self.sim.signers["targets"].pop()
        key, signer = self.sim.create_key()
        self.sim.signers["targets"] = [signer]
        self.sim.targets.version += 1
        self.sim.update_snapshot()

        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        # New root: Add the new key as targets role key
        # (root changes require explicit publishing)
        self.sim.root.add_key("targets", key)
        self.sim.root.version += 1
        self.sim.publish_root()

        self._run_refresh()

        # New root: Raise targets threshold to 2
        self.sim.root.roles["targets"].threshold = 2
        self.sim.root.version += 1
        self.sim.publish_root()

        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        # New targets: sign with both new and old key
        self.sim.signers["targets"] = [signer, old_signer]
        self.sim.targets.version += 1
        self.sim.update_snapshot()

        self._run_refresh()

    def tearDown(self):
        self.client_dir.cleanup()
Esempio n. 11
0
class TestRefresh(unittest.TestCase):
    """Test update of top-level metadata following
    'Detailed client workflow' in the specification."""

    # set dump_dir to trigger repository state dumps
    dump_dir: Optional[str] = None

    past_datetime = datetime.datetime.utcnow().replace(
        microsecond=0
    ) - datetime.timedelta(days=5)

    def setUp(self) -> None:
        # pylint: disable=consider-using-with
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)

        self.sim = RepositorySimulator()

        # boostrap client with initial root metadata
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

        if self.dump_dir is not None:
            # create test specific dump directory
            name = self.id().split(".")[-1]
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)

    def tearDown(self) -> None:
        self.temp_dir.cleanup()

    def _run_refresh(self) -> Updater:
        """Create a new Updater instance and refresh"""
        if self.dump_dir is not None:
            self.sim.write()

        updater = Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )
        updater.refresh()
        return updater

    def _init_updater(self) -> Updater:
        """Create a new Updater instance"""
        if self.dump_dir is not None:
            self.sim.write()

        return Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )

    def _assert_files_exist(self, roles: Iterable[str]) -> None:
        """Assert that local metadata files exist for 'roles'"""
        expected_files = sorted([f"{role}.json" for role in roles])
        local_metadata_files = sorted(os.listdir(self.metadata_dir))
        self.assertListEqual(local_metadata_files, expected_files)

    def _assert_content_equals(
        self, role: str, version: Optional[int] = None
    ) -> None:
        """Assert that local file content is the expected"""
        expected_content = self.sim.fetch_metadata(role, version)
        with open(os.path.join(self.metadata_dir, f"{role}.json"), "rb") as f:
            self.assertEqual(f.read(), expected_content)

    def _assert_version_equals(self, role: str, expected_version: int) -> None:
        """Assert that local metadata version is the expected"""
        md = Metadata.from_file(os.path.join(self.metadata_dir, f"{role}.json"))
        self.assertEqual(md.signed.version, expected_version)

    def test_first_time_refresh(self) -> None:
        # Metadata dir contains only the mandatory initial root.json
        self._assert_files_exist([Root.type])

        # Add one more root version to repository so that
        # refresh() updates from local trusted root (v1) to
        # remote root (v2)
        self.sim.root.version += 1
        self.sim.publish_root()

        self._run_refresh()

        self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)
        for role in TOP_LEVEL_ROLE_NAMES:
            version = 2 if role == Root.type else None
            self._assert_content_equals(role, version)

    def test_trusted_root_missing(self) -> None:
        os.remove(os.path.join(self.metadata_dir, "root.json"))
        with self.assertRaises(OSError):
            self._run_refresh()

        # Metadata dir is empty
        self.assertFalse(os.listdir(self.metadata_dir))

    def test_trusted_root_expired(self) -> None:
        # Create an expired root version
        self.sim.root.expires = self.past_datetime
        self.sim.root.version += 1
        self.sim.publish_root()

        # Update to latest root which is expired but still
        # saved as a local root.
        updater = self._init_updater()
        with self.assertRaises(ExpiredMetadataError):
            updater.refresh()

        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 2)

        # Local root metadata can be loaded even if expired
        updater = self._init_updater()

        # Create a non-expired root version and refresh
        self.sim.root.expires = self.sim.safe_expiry
        self.sim.root.version += 1
        self.sim.publish_root()
        updater.refresh()

        # Root is successfully updated to latest version
        self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)
        self._assert_content_equals(Root.type, 3)

    def test_trusted_root_unsigned(self) -> None:
        # Local trusted root is not signed
        root_path = os.path.join(self.metadata_dir, "root.json")
        md_root = Metadata.from_file(root_path)
        md_root.signatures.clear()
        md_root.to_file(root_path)

        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        # The update failed, no changes in metadata
        self._assert_files_exist([Root.type])
        md_root_after = Metadata.from_file(root_path)
        self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes())

    def test_max_root_rotations(self) -> None:
        # Root must stop looking for new versions after Y number of
        # intermediate files were downloaded.
        updater = self._init_updater()
        updater.config.max_root_rotations = 3

        # Create some number of roots greater than 'max_root_rotations'
        while self.sim.root.version < updater.config.max_root_rotations + 3:
            self.sim.root.version += 1
            self.sim.publish_root()

        md_root = Metadata.from_file(
            os.path.join(self.metadata_dir, "root.json")
        )
        initial_root_version = md_root.signed.version

        updater.refresh()

        # Assert that root version was increased with no more
        # than 'max_root_rotations'
        self._assert_version_equals(
            Root.type, initial_root_version + updater.config.max_root_rotations
        )

    def test_intermediate_root_incorrectly_signed(self) -> None:
        # Check for an arbitrary software attack

        # Intermediate root v2 is unsigned
        self.sim.root.version += 1
        root_signers = self.sim.signers[Root.type].copy()
        self.sim.signers[Root.type].clear()
        self.sim.publish_root()

        # Final root v3 is correctly signed
        self.sim.root.version += 1
        self.sim.signers[Root.type] = root_signers
        self.sim.publish_root()

        # Incorrectly signed intermediate root is detected
        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        # The update failed, latest root version is v1
        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 1)

    def test_intermediate_root_expired(self) -> None:
        # The expiration of the new (intermediate) root metadata file
        # does not matter yet

        # Intermediate root v2 is expired
        self.sim.root.expires = self.past_datetime
        self.sim.root.version += 1
        self.sim.publish_root()

        # Final root v3 is up to date
        self.sim.root.expires = self.sim.safe_expiry
        self.sim.root.version += 1
        self.sim.publish_root()

        self._run_refresh()

        # Successfully updated to root v3
        self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)
        self._assert_content_equals(Root.type, 3)

    def test_final_root_incorrectly_signed(self) -> None:
        # Check for an arbitrary software attack
        self.sim.root.version += 1  # root v2
        self.sim.signers[Root.type].clear()
        self.sim.publish_root()

        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        # The update failed, latest root version is v1
        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 1)

    def test_new_root_same_version(self) -> None:
        # Check for a rollback_attack
        # Repository serves a root file with the same version as previous
        self.sim.publish_root()
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        # The update failed, latest root version is v1
        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 1)

    def test_new_root_nonconsecutive_version(self) -> None:
        # Repository serves non-consecutive root version
        self.sim.root.version += 2
        self.sim.publish_root()
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        # The update failed, latest root version is v1
        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 1)

    def test_final_root_expired(self) -> None:
        # Check for a freeze attack
        # Final root is expired
        self.sim.root.expires = self.past_datetime
        self.sim.root.version += 1
        self.sim.publish_root()

        with self.assertRaises(ExpiredMetadataError):
            self._run_refresh()

        # The update failed but final root is persisted on the file system
        self._assert_files_exist([Root.type])
        self._assert_content_equals(Root.type, 2)

    def test_new_timestamp_unsigned(self) -> None:
        # Check for an arbitrary software attack
        self.sim.signers[Timestamp.type].clear()
        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type])

    @patch.object(datetime, "datetime", wraps=datetime.datetime)
    def test_expired_timestamp_version_rollback(self, mock_time: Mock) -> None:
        """Verifies that local timestamp is used in rollback checks even if it is expired.

        The timestamp updates and rollback checks are performed
        with the following timing:
         - Timestamp v1 expiry set to day 7
         - First updater refresh performed on day 0
         - Repository publishes timestamp v2 on day 0
         - Timestamp v2 expiry set to day 21
         - Second updater refresh performed on day 18:
           assert that rollback check uses expired timestamp v1"""

        now = datetime.datetime.utcnow()
        self.sim.timestamp.expires = now + datetime.timedelta(days=7)

        self.sim.timestamp.version = 2

        # Make a successful update of valid metadata which stores it in cache
        self._run_refresh()

        self.sim.timestamp.expires = now + datetime.timedelta(days=21)

        self.sim.timestamp.version = 1

        mock_time.utcnow.return_value = (
            datetime.datetime.utcnow() + datetime.timedelta(days=18)
        )
        with patch("datetime.datetime", mock_time):
            # Check that a rollback protection is performed even if
            # local timestamp has expired
            with self.assertRaises(BadVersionNumberError):
                self._run_refresh()

        self._assert_version_equals(Timestamp.type, 2)

    @patch.object(datetime, "datetime", wraps=datetime.datetime)
    def test_expired_timestamp_snapshot_rollback(self, mock_time: Mock) -> None:
        """Verifies that rollback protection is done even if local timestamp has expired.

        The snapshot updates and rollback protection checks are performed
        with the following timing:
         - Timestamp v1 expiry set to day 7
         - Repository bumps snapshot to v3 on day 0
         - First updater refresh performed on day 0
         - Timestamp v2 expiry set to day 21
         - Second updater refresh performed on day 18:
           assert that rollback protection is done with expired timestamp v1"""

        now = datetime.datetime.utcnow()
        self.sim.timestamp.expires = now + datetime.timedelta(days=7)

        # Bump the snapshot version number to 3
        self.sim.update_snapshot()
        self.sim.update_snapshot()

        # Make a successful update of valid metadata which stores it in cache
        self._run_refresh()

        self.sim.snapshot.version = 1
        # Snapshot version number is set to 2, which is still less than 3
        self.sim.update_snapshot()
        self.sim.timestamp.expires = now + datetime.timedelta(days=21)

        mock_time.utcnow.return_value = (
            datetime.datetime.utcnow() + datetime.timedelta(days=18)
        )
        with patch("datetime.datetime", mock_time):
            # Assert that rollback protection is done even if
            # local timestamp has expired
            with self.assertRaises(BadVersionNumberError):
                self._run_refresh()

        self._assert_version_equals(Timestamp.type, 3)

    def test_new_timestamp_version_rollback(self) -> None:
        # Check for a rollback attack
        self.sim.timestamp.version = 2
        self._run_refresh()

        self.sim.timestamp.version = 1
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        self._assert_version_equals(Timestamp.type, 2)

    def test_new_timestamp_snapshot_rollback(self) -> None:
        # Check for a rollback attack.
        self.sim.snapshot.version = 2
        self.sim.update_timestamp()  # timestamp v2
        self._run_refresh()

        # Snapshot meta version is smaller than previous
        self.sim.timestamp.snapshot_meta.version = 1
        self.sim.timestamp.version += 1  # timestamp v3

        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        self._assert_version_equals(Timestamp.type, 2)

    def test_new_timestamp_expired(self) -> None:
        # Check for a freeze attack
        self.sim.timestamp.expires = self.past_datetime
        self.sim.update_timestamp()

        with self.assertRaises(ExpiredMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type])

    def test_new_timestamp_fast_forward_recovery(self) -> None:
        """Test timestamp fast-forward recovery using key rotation.

        The timestamp recovery is made by the following steps
         - Remove the timestamp key
         - Create and add a new key for timestamp
         - Bump and publish root
         - Rollback the timestamp version
        """

        # attacker updates to a higher version
        self.sim.timestamp.version = 99999

        # client refreshes the metadata and see the new timestamp version
        self._run_refresh()
        self._assert_version_equals(Timestamp.type, 99999)

        # repository rotates timestamp keys, rolls back timestamp version
        self.sim.rotate_keys(Timestamp.type)
        self.sim.root.version += 1
        self.sim.publish_root()
        self.sim.timestamp.version = 1

        # client refresh the metadata and see the initial timestamp version
        self._run_refresh()
        self._assert_version_equals(Timestamp.type, 1)

    def test_new_snapshot_hash_mismatch(self) -> None:
        # Check against timestamp role’s snapshot hash

        # Update timestamp with snapshot's hashes
        self.sim.compute_metafile_hashes_length = True
        self.sim.update_timestamp()  # timestamp v2
        self._run_refresh()

        # Modify snapshot contents without updating
        # timestamp's snapshot hash
        self.sim.snapshot.expires += datetime.timedelta(days=1)
        self.sim.snapshot.version += 1  # snapshot v2
        self.sim.timestamp.snapshot_meta.version = self.sim.snapshot.version
        self.sim.timestamp.version += 1  # timestamp v3

        # Hash mismatch error
        with self.assertRaises(LengthOrHashMismatchError):
            self._run_refresh()

        self._assert_version_equals(Timestamp.type, 3)
        self._assert_version_equals(Snapshot.type, 1)

    def test_new_snapshot_unsigned(self) -> None:
        # Check for an arbitrary software attack
        self.sim.signers[Snapshot.type].clear()
        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type])

    def test_new_snapshot_version_mismatch(self) -> None:
        # Check against timestamp role’s snapshot version

        # Increase snapshot version without updating timestamp
        self.sim.snapshot.version += 1
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type])

    def test_new_snapshot_version_rollback(self) -> None:
        # Check for a rollback attack
        self.sim.snapshot.version = 2
        self.sim.update_timestamp()
        self._run_refresh()

        self.sim.snapshot.version = 1
        self.sim.update_timestamp()

        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        self._assert_version_equals(Snapshot.type, 2)

    def test_new_snapshot_fast_forward_recovery(self) -> None:
        """Test snapshot fast-forward recovery using key rotation.

        The snapshot recovery requires the snapshot and timestamp key rotation.
        It is made by the following steps:
        - Remove the snapshot and timestamp keys
        - Create and add a new key for snapshot and timestamp
        - Rollback snapshot version
        - Bump and publish root
        - Bump the timestamp
        """

        # attacker updates to a higher version (bumping timestamp is required)
        self.sim.snapshot.version = 99999
        self.sim.update_timestamp()

        # client refreshes the metadata and see the new snapshot version
        self._run_refresh()
        self._assert_version_equals(Snapshot.type, 99999)

        # repository rotates snapshot & timestamp keys, rolls back snapshot
        self.sim.rotate_keys(Snapshot.type)
        self.sim.rotate_keys(Timestamp.type)
        self.sim.root.version += 1
        self.sim.publish_root()

        self.sim.snapshot.version = 1
        self.sim.update_timestamp()

        # client refresh the metadata and see the initial snapshot version
        self._run_refresh()
        self._assert_version_equals(Snapshot.type, 1)

    def test_new_snapshot_expired(self) -> None:
        # Check for a freeze attack
        self.sim.snapshot.expires = self.past_datetime
        self.sim.update_snapshot()

        with self.assertRaises(ExpiredMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type])

    def test_new_targets_hash_mismatch(self) -> None:
        # Check against snapshot role’s targets hashes

        # Update snapshot with target's hashes
        self.sim.compute_metafile_hashes_length = True
        self.sim.update_snapshot()
        self._run_refresh()

        # Modify targets contents without updating
        # snapshot's targets hashes
        self.sim.targets.version += 1
        self.sim.snapshot.meta[
            "targets.json"
        ].version = self.sim.targets.version
        self.sim.snapshot.version += 1
        self.sim.update_timestamp()

        with self.assertRaises(LengthOrHashMismatchError):
            self._run_refresh()

        self._assert_version_equals(Snapshot.type, 3)
        self._assert_version_equals(Targets.type, 1)

    def test_new_targets_unsigned(self) -> None:
        # Check for an arbitrary software attack
        self.sim.signers[Targets.type].clear()
        with self.assertRaises(UnsignedMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type])

    def test_new_targets_version_mismatch(self) -> None:
        # Check against snapshot role’s targets version

        # Increase targets version without updating snapshot
        self.sim.targets.version += 1
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type])

    def test_new_targets_expired(self) -> None:
        # Check for a freeze attack.
        self.sim.targets.expires = self.past_datetime
        self.sim.update_snapshot()

        with self.assertRaises(ExpiredMetadataError):
            self._run_refresh()

        self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type])

    def test_compute_metafile_hashes_length(self) -> None:
        self.sim.compute_metafile_hashes_length = True
        self.sim.update_snapshot()
        self._run_refresh()
        self._assert_version_equals(Timestamp.type, 2)
        self._assert_version_equals(Snapshot.type, 2)

        self.sim.compute_metafile_hashes_length = False
        self.sim.update_snapshot()
        self._run_refresh()

        self._assert_version_equals(Timestamp.type, 3)
        self._assert_version_equals(Snapshot.type, 3)

    def test_new_targets_fast_forward_recovery(self) -> None:
        """Test targets fast-forward recovery using key rotation.

        The targets recovery is made by issuing new Snapshot keys, by following
        steps:
            - Remove the snapshot key
            - Create and add a new key for snapshot
            - Bump and publish root
            - Rollback the target version
        """
        # attacker updates to a higher version
        self.sim.targets.version = 99999
        self.sim.update_snapshot()

        # client refreshes the metadata and see the new targets version
        self._run_refresh()
        self._assert_version_equals(Targets.type, 99999)

        # repository rotates snapshot keys, rolls back targets version
        self.sim.rotate_keys(Snapshot.type)
        self.sim.root.version += 1
        self.sim.publish_root()

        self.sim.targets.version = 1
        self.sim.update_snapshot()

        # client refreshes the metadata version and see initial targets version
        self._run_refresh()
        self._assert_version_equals(Targets.type, 1)

    @patch.object(builtins, "open", wraps=builtins.open)
    def test_not_loading_targets_twice(self, wrapped_open: MagicMock) -> None:
        # Do not load targets roles more than once when traversing
        # the delegations tree

        # Add new delegated targets, update the snapshot
        spec_version = ".".join(SPECIFICATION_VERSION)
        targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None)
        role = DelegatedRole("role1", [], 1, False, ["*"], None)
        self.sim.add_delegation("targets", role, targets)
        self.sim.update_snapshot()

        # Run refresh, top-level roles are loaded
        updater = self._run_refresh()
        # Clean up calls to open during refresh()
        wrapped_open.reset_mock()

        # First time looking for "somepath", only 'role1' must be loaded
        updater.get_targetinfo("somepath")
        wrapped_open.assert_called_once_with(
            os.path.join(self.metadata_dir, "role1.json"), "rb"
        )
        wrapped_open.reset_mock()
        # Second call to get_targetinfo, all metadata is already loaded
        updater.get_targetinfo("somepath")
        wrapped_open.assert_not_called()

    def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self) -> None:
        # Test triggering snapshot rollback check on a newly downloaded snapshot
        # when the local snapshot is loaded even when there is a hash mismatch
        # with timestamp.snapshot_meta.

        # By raising this flag on timestamp update the simulator would:
        # 1) compute the hash of the new modified version of snapshot
        # 2) assign the hash to timestamp.snapshot_meta
        # The purpose is to create a hash mismatch between timestamp.meta and
        # the local snapshot, but to have hash match between timestamp.meta and
        # the next snapshot version.
        self.sim.compute_metafile_hashes_length = True

        # Initialize all metadata and assign targets version higher than 1.
        self.sim.targets.version = 2
        self.sim.update_snapshot()
        self._run_refresh()

        # The new targets must have a lower version than the local trusted one.
        self.sim.targets.version = 1
        self.sim.update_snapshot()

        # During the snapshot update, the local snapshot will be loaded even if
        # there is a hash mismatch with timestamp.snapshot_meta, because it will
        # be considered as trusted.
        # Should fail as a new version of snapshot will be fetched which lowers
        # the snapshot.meta["targets.json"] version by 1 and throws an error.
        with self.assertRaises(BadVersionNumberError):
            self._run_refresh()

    @patch.object(builtins, "open", wraps=builtins.open)
    def test_load_metadata_from_cache(self, wrapped_open: MagicMock) -> None:

        # Add new delegated targets
        spec_version = ".".join(SPECIFICATION_VERSION)
        targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None)
        role = DelegatedRole("role1", [], 1, False, ["*"], None)
        self.sim.add_delegation("targets", role, targets)
        self.sim.update_snapshot()

        # Make a successful update of valid metadata which stores it in cache
        updater = self._run_refresh()
        updater.get_targetinfo("non_existent_target")

        # Clean up calls to open during refresh()
        wrapped_open.reset_mock()
        # Clean up fetch tracker metadata
        self.sim.fetch_tracker.metadata.clear()

        # Create a new updater and perform a second update while
        # the metadata is already stored in cache (metadata dir)
        updater = Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )
        updater.get_targetinfo("non_existent_target")

        # Test that metadata is loaded from cache and not downloaded
        wrapped_open.assert_has_calls(
            [
                call(os.path.join(self.metadata_dir, "root.json"), "rb"),
                call(os.path.join(self.metadata_dir, "timestamp.json"), "rb"),
                call(os.path.join(self.metadata_dir, "snapshot.json"), "rb"),
                call(os.path.join(self.metadata_dir, "targets.json"), "rb"),
                call(os.path.join(self.metadata_dir, "role1.json"), "rb"),
            ]
        )

        expected_calls = [("root", 2), ("timestamp", None)]
        self.assertListEqual(self.sim.fetch_tracker.metadata, expected_calls)

    @patch.object(datetime, "datetime", wraps=datetime.datetime)
    def test_expired_metadata(self, mock_time: Mock) -> None:
        """Verifies that expired local timestamp/snapshot can be used for
        updating from remote.

        The updates and verifications are performed with the following timing:
         - Timestamp v1 expiry set to day 7
         - First updater refresh performed on day 0
         - Repository bumps snapshot and targets to v2 on day 0
         - Timestamp v2 expiry set to day 21
         - Second updater refresh performed on day 18,
           it is successful and timestamp/snaphot final versions are v2"""

        now = datetime.datetime.utcnow()
        self.sim.timestamp.expires = now + datetime.timedelta(days=7)

        # Make a successful update of valid metadata which stores it in cache
        self._run_refresh()

        self.sim.targets.version += 1
        self.sim.update_snapshot()
        self.sim.timestamp.expires = now + datetime.timedelta(days=21)

        # Mocking time so that local timestam has expired
        # but the new timestamp has not
        mock_time.utcnow.return_value = (
            datetime.datetime.utcnow() + datetime.timedelta(days=18)
        )
        with patch("datetime.datetime", mock_time):
            self._run_refresh()

        # Assert that the final version of timestamp/snapshot is version 2
        # which means a successful refresh is performed
        # with expired local metadata
        for role in ["timestamp", "snapshot", "targets"]:
            md = Metadata.from_file(
                os.path.join(self.metadata_dir, f"{role}.json")
            )
            self.assertEqual(md.signed.version, 2)

    def test_max_metadata_lengths(self) -> None:
        """Test that clients configured max metadata lengths are respected"""

        # client has root v1 already: create a new one available for download
        self.sim.root.version += 1
        self.sim.publish_root()

        config_vars = [
            "root_max_length",
            "timestamp_max_length",
            "snapshot_max_length",
            "targets_max_length",
        ]
        # make sure going over any length limit raises DownloadLengthMismatchError
        for var_name in config_vars:
            updater = self._init_updater()
            setattr(updater.config, var_name, 100)
            with self.assertRaises(DownloadLengthMismatchError):
                updater.refresh()

        # All good with normal length limits
        updater = self._init_updater()
        updater.refresh()

    def test_timestamp_eq_versions_check(self) -> None:
        # Test that a modified timestamp with different content, but the same
        # version doesn't replace the valid locally stored one.

        # Make a successful update of valid metadata which stores it in cache
        self._run_refresh()
        initial_timestamp_meta_ver = self.sim.timestamp.snapshot_meta.version

        # Change timestamp without bumping its version in order to test if a new
        # timestamp with the same version will be persisted.
        self.sim.timestamp.snapshot_meta.version = 100
        self._run_refresh()

        # If the local timestamp md file has the same snapshot_meta.version as
        # the initial one, then the new modified timestamp has not been stored.
        timestamp_path = os.path.join(self.metadata_dir, "timestamp.json")
        timestamp: Metadata[Timestamp] = Metadata.from_file(timestamp_path)
        self.assertEqual(
            initial_timestamp_meta_ver, timestamp.signed.snapshot_meta.version
        )
Esempio n. 12
0
class TestDelegations(unittest.TestCase):
    """Base class for delegation tests"""

    # set dump_dir to trigger repository state dumps
    dump_dir: Optional[str] = None

    def setUp(self) -> None:
        # pylint: disable=consider-using-with
        self.subtest_count = 0
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)
        self.sim: RepositorySimulator

    def tearDown(self) -> None:
        self.temp_dir.cleanup()

    def setup_subtest(self) -> None:
        self.subtest_count += 1
        if self.dump_dir is not None:
            # create subtest dumpdir
            name = f"{self.id().split('.')[-1]}-{self.subtest_count}"
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)
            # dump the repo simulator metadata
            self.sim.write()

    def teardown_subtest(self) -> None:
        utils.cleanup_dir(self.metadata_dir)

    def _init_repo(self, test_case: DelegationsTestCase) -> None:
        """Create a new RepositorySimulator instance and
        populate it with delegations and target files"""

        self.sim = RepositorySimulator()
        spec_version = ".".join(SPECIFICATION_VERSION)
        for d in test_case.delegations:
            if d.rolename in self.sim.md_delegates:
                targets = self.sim.md_delegates[d.rolename].signed
            else:
                targets = Targets(1, spec_version, self.sim.safe_expiry, {},
                                  None)
            # unpack 'd' but skip "delegator"
            role = DelegatedRole(*astuple(d)[1:])
            self.sim.add_delegation(d.delegator, role, targets)

        for target in test_case.target_files:
            self.sim.add_target(*astuple(target))

        if test_case.target_files:
            self.sim.targets.version += 1
        self.sim.update_snapshot()

    def _init_updater(self) -> Updater:
        """Create a new Updater instance"""
        # Init trusted root for Updater
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

        return Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )

    def _assert_files_exist(self, roles: Iterable[str]) -> None:
        """Assert that local metadata files exist for 'roles'"""
        expected_files = sorted([f"{role}.json" for role in roles])
        local_metadata_files = sorted(os.listdir(self.metadata_dir))
        self.assertListEqual(local_metadata_files, expected_files)
Esempio n. 13
0
class TestDelegationsGraphs(TestDelegations):
    """Test creating delegations graphs with different complexity
    and successfully updating the delegated roles metadata"""

    graphs: utils.DataSet = {
        "basic delegation":
        DelegationsTestCase(
            delegations=[TestDelegation("targets", "A")],
            visited_order=["A"],
        ),
        "single level delegations":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
            ],
            visited_order=["A", "B"],
        ),
        "two-level delegations":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("B", "C"),
            ],
            visited_order=["A", "B", "C"],
        ),
        "two-level test DFS order of traversal":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C"),
                TestDelegation("A", "D"),
            ],
            visited_order=["A", "C", "D", "B"],
        ),
        "three-level delegation test DFS order of traversal":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C"),
                TestDelegation("C", "D"),
            ],
            visited_order=["A", "C", "D", "B"],
        ),
        "two-level terminating ignores all but role's descendants":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C", terminating=True),
                TestDelegation("A", "D"),
            ],
            visited_order=["A", "C"],
        ),
        "three-level terminating ignores all but role's descendants":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C", terminating=True),
                TestDelegation("C", "D"),
            ],
            visited_order=["A", "C", "D"],
        ),
        "two-level ignores all branches not matching 'paths'":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A", paths=["*.py"]),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C"),
            ],
            visited_order=["B"],
        ),
        "three-level ignores all branches not matching 'paths'":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("A", "C", paths=["*.py"]),
                TestDelegation("C", "D"),
            ],
            visited_order=["A", "B"],
        ),
        "cyclic graph":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("B", "C"),
                TestDelegation("C", "D"),
                TestDelegation("D", "B"),
            ],
            visited_order=["A", "B", "C", "D"],
        ),
        "two roles delegating to a third":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("B", "C"),
                TestDelegation("A", "C"),
            ],
            # Under all same conditions, 'C' is reached through 'A' first"
            visited_order=["A", "C", "B"],
        ),
        "two roles delegating to a third different 'paths'":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("B", "C"),
                TestDelegation("A", "C", paths=["*.py"]),
            ],
            # 'C' is reached through 'B' since 'A' does not delegate a matching pattern"
            visited_order=["A", "B", "C"],
        ),
        "max number of delegations":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "A"),
                TestDelegation("targets", "B"),
                TestDelegation("targets", "C"),
                TestDelegation("C", "D"),
                TestDelegation("C", "E"),
            ],
            # "E" is skipped, max_delegations is 4
            visited_order=["A", "B", "C", "D"],
        ),
    }

    @utils.run_sub_tests_with_dataset(graphs)
    def test_graph_traversal(self, test_data: DelegationsTestCase) -> None:
        """Test that delegated roles are traversed in the order of appearance
        in the delegator's metadata, using pre-order depth-first search"""

        try:
            exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order]
            exp_calls = [(role, 1) for role in test_data.visited_order]

            self._init_repo(test_data)
            self.setup_subtest()

            updater = self._init_updater()
            # restrict the max number of delegations to simplify the test
            updater.config.max_delegations = 4
            # Call explicitly refresh to simplify the expected_calls list
            updater.refresh()
            self.sim.fetch_tracker.metadata.clear()
            # Check that metadata dir contains only top-level roles
            self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)

            # Looking for a non-existing targetpath forces updater
            # to visit all possible delegated roles
            targetfile = updater.get_targetinfo("missingpath")
            self.assertIsNone(targetfile)
            # Check that the delegated roles were visited in the expected
            # order and the corresponding metadata files were persisted
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            self._assert_files_exist(exp_files)
        finally:
            self.teardown_subtest()

    invalid_metadata: utils.DataSet = {
        "unsigned delegated role":
        DelegationsTestCase(
            delegations=[
                TestDelegation("targets", "invalid"),
                TestDelegation("targets", "B"),
                TestDelegation("invalid", "C"),
            ],
            # The traversal stops after visiting an invalid role
            visited_order=["invalid"],
        )
    }

    @utils.run_sub_tests_with_dataset(invalid_metadata)
    def test_invalid_metadata(self, test_data: DelegationsTestCase) -> None:
        try:
            self._init_repo(test_data)
            # The invalid role is the last visited
            invalid_role = test_data.visited_order[-1]
            self.sim.signers[invalid_role].clear()

            self.setup_subtest()
            # The invalid role metadata must not be persisted
            exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order[:-1]]
            exp_calls = [(role, 1) for role in test_data.visited_order]

            updater = self._init_updater()
            # Call explicitly refresh to simplify the expected_calls list
            updater.refresh()
            self.sim.fetch_tracker.metadata.clear()

            with self.assertRaises(UnsignedMetadataError):
                updater.get_targetinfo("missingpath")
            # Check that there were no visited roles after the invalid one
            # and only the valid metadata files were persisted
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            self._assert_files_exist(exp_files)
        finally:
            self.teardown_subtest()

    def test_safely_encoded_rolenames(self) -> None:
        """Test that delegated roles names are safely encoded in the filenames
        and URLs.
        """

        roles_to_filenames = {
            "../a": "..%2Fa.json",
            ".": "..json",
            "/": "%2F.json",
            "ö": "%C3%B6.json",
        }

        delegations = []
        for rolename in roles_to_filenames:
            delegations.append(TestDelegation("targets", rolename))

        delegated_rolenames = DelegationsTestCase(delegations)
        self._init_repo(delegated_rolenames)
        updater = self._init_updater()
        updater.refresh()

        # trigger updater to fetch the delegated metadata
        self.sim.fetch_tracker.metadata.clear()
        updater.get_targetinfo("anything")

        # assert that local delegated metadata filenames are expected
        local_metadata = os.listdir(self.metadata_dir)
        for fname in roles_to_filenames.values():
            self.assertTrue(fname in local_metadata)

        # assert that requested URLs are quoted without extension
        exp_calls = [(quoted[:-5], 1)
                     for quoted in roles_to_filenames.values()]
        self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)

    hash_bins_graph: utils.DataSet = {
        "delegations":
        DelegationsTestCase(
            delegations=[
                TestDelegation(
                    "targets",
                    "role1",
                    paths=None,
                    path_hash_prefixes=["8", "9", "a", "b"],
                ),
                TestDelegation(
                    "targets",
                    "role2",
                    paths=None,
                    path_hash_prefixes=["0", "1", "2", "3"],
                ),
                TestDelegation(
                    "targets",
                    "role3",
                    paths=None,
                    path_hash_prefixes=["c", "d", "e", "f"],
                ),
            ],
            visited_order=["role1", "role2", "role3"],
        ),
    }

    @utils.run_sub_tests_with_dataset(hash_bins_graph)
    def test_hash_bins_graph_traversal(self,
                                       test_data: DelegationsTestCase) -> None:
        """Test that delegated roles are traversed in the order of appearance
        in the delegator's metadata, using pre-order depth-first search and that
        they correctly reffer to the corresponding hash bin prefixes"""

        try:
            exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order]
            exp_calls = [(role, 1) for role in test_data.visited_order]

            self._init_repo(test_data)
            self.setup_subtest()

            updater = self._init_updater()
            # Call explicitly refresh to simplify the expected_calls list
            updater.refresh()
            self.sim.fetch_tracker.metadata.clear()
            # Check that metadata dir contains only top-level roles
            self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)

            # Looking for a non-existing targetpath forces updater
            # to visit a correspondning delegated role
            targetfile = updater.get_targetinfo("missingpath")
            self.assertIsNone(targetfile)
            targetfile = updater.get_targetinfo("othermissingpath")
            self.assertIsNone(targetfile)
            targetfile = updater.get_targetinfo("thirdmissingpath")
            self.assertIsNone(targetfile)
            # Check that the delegated roles were visited in the expected
            # order and the corresponding metadata files were persisted
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            self._assert_files_exist(exp_files)
        finally:
            self.teardown_subtest()

    @dataclass
    class SuccinctRolesTestCase:
        bit_length: int
        target_path: str
        expected_target_bin: str

    # By setting the bit_length the total number of bins is 2^bit_length.
    # In each test case target_path is a path to a random target we want to
    # fetch and expected_target_bin is the bin we are expecting to visit.
    succinct_bins_graph: utils.DataSet = {
        "bin amount = 2, taget bin index 0":
        SuccinctRolesTestCase(
            bit_length=1,
            target_path="boo",
            expected_target_bin="bin-0",
        ),
        "bin amount = 2, taget bin index 1":
        SuccinctRolesTestCase(
            bit_length=1,
            target_path="too",
            expected_target_bin="bin-1",
        ),
        "bin amount = 4, taget bin index 0":
        SuccinctRolesTestCase(
            bit_length=2,
            target_path="foo",
            expected_target_bin="bin-0",
        ),
        "bin amount = 4, taget bin index 1":
        SuccinctRolesTestCase(
            bit_length=2,
            target_path="doo",
            expected_target_bin="bin-1",
        ),
        "bin amount = 4, taget bin index 2":
        SuccinctRolesTestCase(
            bit_length=2,
            target_path="too",
            expected_target_bin="bin-2",
        ),
        "bin amount = 4, taget bin index 3":
        SuccinctRolesTestCase(
            bit_length=2,
            target_path="bar",
            expected_target_bin="bin-3",
        ),
        "bin amount = 256, taget bin index fc":
        SuccinctRolesTestCase(
            bit_length=8,
            target_path="bar",
            expected_target_bin="bin-fc",
        ),
    }

    @utils.run_sub_tests_with_dataset(succinct_bins_graph)
    def test_succinct_roles_graph_traversal(
            self, test_data: SuccinctRolesTestCase) -> None:
        # Test traversing the delegation tree when succinct roles is used. For a
        # successful traversal all top level metadata files plus the expected
        # bin should exist locally and only one bin must be downloaded.

        try:
            exp_files = [*TOP_LEVEL_ROLE_NAMES, test_data.expected_target_bin]
            exp_calls = [(test_data.expected_target_bin, 1)]

            self.sim = RepositorySimulator()
            self.sim.add_succinct_roles("targets", test_data.bit_length, "bin")
            self.sim.update_snapshot()

            self.setup_subtest()

            updater = self._init_updater()
            # Call explicitly refresh to simplify the expected_calls list.
            updater.refresh()
            self.sim.fetch_tracker.metadata.clear()
            # Check that metadata dir contains only top-level roles
            self._assert_files_exist(TOP_LEVEL_ROLE_NAMES)

            # Looking for a non-existing targetpath forces updater
            # to visit a corresponding delegated role.
            targetfile = updater.get_targetinfo(test_data.target_path)
            self.assertIsNone(targetfile)

            # Check that the delegated roles were visited in the expected
            # order and the corresponding metadata files were persisted.
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            self._assert_files_exist(exp_files)

        finally:
            self.teardown_subtest()
Esempio n. 14
0
class TestFetchTarget(unittest.TestCase):
    """Test ngclient downloading and caching target files."""

    # set dump_dir to trigger repository state dumps
    dump_dir: Optional[str] = None

    def setUp(self) -> None:
        # pylint: disable-next=consider-using-with
        self.temp_dir = tempfile.TemporaryDirectory()
        self.metadata_dir = os.path.join(self.temp_dir.name, "metadata")
        self.targets_dir = os.path.join(self.temp_dir.name, "targets")
        os.mkdir(self.metadata_dir)
        os.mkdir(self.targets_dir)

        # Setup the repository, bootstrap client root.json
        self.sim = RepositorySimulator()
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

        if self.dump_dir is not None:
            # create test specific dump directory
            name = self.id().split(".")[-1]
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)

    def tearDown(self) -> None:
        self.temp_dir.cleanup()

    def _init_updater(self) -> Updater:
        """Creates a new updater instance."""
        if self.sim.dump_dir is not None:
            self.sim.write()

        updater = Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )
        return updater

    targets: utils.DataSet = {
        "standard case":
        TestTarget(
            path="targetpath",
            content=b"target content",
            encoded_path="targetpath",
        ),
        "non-asci case":
        TestTarget(
            path="åäö",
            content=b"more content",
            encoded_path="%C3%A5%C3%A4%C3%B6",
        ),
        "subdirectory case":
        TestTarget(
            path="a/b/c/targetpath",
            content=b"dir target content",
            encoded_path="a%2Fb%2Fc%2Ftargetpath",
        ),
    }

    @utils.run_sub_tests_with_dataset(targets)
    def test_fetch_target(self, target: TestTarget) -> None:
        path = os.path.join(self.targets_dir, target.encoded_path)

        updater = self._init_updater()
        # target does not exist yet
        self.assertIsNone(updater.get_targetinfo(target.path))

        # Add targets to repository
        self.sim.targets.version += 1
        self.sim.add_target("targets", target.content, target.path)
        self.sim.update_snapshot()

        updater = self._init_updater()
        # target now exists, is not in cache yet
        info = updater.get_targetinfo(target.path)
        assert info is not None
        # Test without and with explicit local filepath
        self.assertIsNone(updater.find_cached_target(info))
        self.assertIsNone(updater.find_cached_target(info, path))

        # download target, assert it is in cache and content is correct
        self.assertEqual(path, updater.download_target(info))
        self.assertEqual(path, updater.find_cached_target(info))
        self.assertEqual(path, updater.find_cached_target(info, path))

        with open(path, "rb") as f:
            self.assertEqual(f.read(), target.content)

        # download using explicit filepath as well
        os.remove(path)
        self.assertEqual(path, updater.download_target(info, path))
        self.assertEqual(path, updater.find_cached_target(info))
        self.assertEqual(path, updater.find_cached_target(info, path))

    def test_invalid_target_download(self) -> None:
        target = TestTarget("targetpath", b"content", "targetpath")

        # Add target to repository
        self.sim.targets.version += 1
        self.sim.add_target("targets", target.content, target.path)
        self.sim.update_snapshot()

        updater = self._init_updater()
        info = updater.get_targetinfo(target.path)
        assert info is not None

        # Corrupt the file content to not match the hash
        self.sim.target_files[target.path].data = b"conten@"
        with self.assertRaises(RepositoryError):
            updater.download_target(info)

        # Corrupt the file content to not match the length
        self.sim.target_files[target.path].data = b"cont"
        with self.assertRaises(RepositoryError):
            updater.download_target(info)

        # Verify the file is not persisted in cache
        self.assertIsNone(updater.find_cached_target(info))

    def test_invalid_target_cache(self) -> None:
        target = TestTarget("targetpath", b"content", "targetpath")

        # Add target to repository
        self.sim.targets.version += 1
        self.sim.add_target("targets", target.content, target.path)
        self.sim.update_snapshot()

        # Download the target
        updater = self._init_updater()
        info = updater.get_targetinfo(target.path)
        assert info is not None
        path = updater.download_target(info)
        self.assertEqual(path, updater.find_cached_target(info))

        # Add newer content to the same targetpath
        target.content = b"contentv2"
        self.sim.targets.version += 1
        self.sim.add_target("targets", target.content, target.path)
        self.sim.update_snapshot()

        # Newer content is detected, old cached version is not used
        updater = self._init_updater()
        info = updater.get_targetinfo(target.path)
        assert info is not None
        self.assertIsNone(updater.find_cached_target(info))

        # Download target, assert it is in cache and content is the newer
        path = updater.download_target(info)
        self.assertEqual(path, updater.find_cached_target(info))
        with open(path, "rb") as f:
            self.assertEqual(f.read(), target.content)
Esempio n. 15
0
class TestUpdaterKeyRotations(unittest.TestCase):
    """Test ngclient root rotation handling"""

    # set dump_dir to trigger repository state dumps
    dump_dir: Optional[str] = None
    temp_dir: ClassVar[tempfile.TemporaryDirectory]
    keys: ClassVar[List[Key]]
    signers: ClassVar[List[SSlibSigner]]

    @classmethod
    def setUpClass(cls) -> None:
        # pylint: disable-next=consider-using-with
        cls.temp_dir = tempfile.TemporaryDirectory()

        # Pre-create a bunch of keys and signers
        cls.keys = []
        cls.signers = []
        for _ in range(10):
            key, signer = RepositorySimulator.create_key()
            cls.keys.append(key)
            cls.signers.append(signer)

    @classmethod
    def tearDownClass(cls) -> None:
        cls.temp_dir.cleanup()

    def setup_subtest(self) -> None:
        # Setup repository for subtest: make sure no roots have been published
        # pylint: disable=attribute-defined-outside-init
        self.sim = RepositorySimulator()
        self.sim.signed_roots.clear()
        self.sim.root.version = 0

        if self.dump_dir is not None:
            # create subtest dumpdir
            # pylint: disable=no-member
            name = f"{self.id().split('.')[-1]}-{self.case_name}"
            self.sim.dump_dir = os.path.join(self.dump_dir, name)
            os.mkdir(self.sim.dump_dir)

    def _run_refresh(self) -> None:
        """Create new updater, run refresh"""
        if self.sim.dump_dir is not None:
            self.sim.write()

        # bootstrap with initial root
        # pylint: disable=attribute-defined-outside-init
        self.metadata_dir = tempfile.mkdtemp(dir=self.temp_dir.name)
        with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f:
            f.write(self.sim.signed_roots[0])

        updater = Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            fetcher=self.sim,
        )
        updater.refresh()

    # fmt: off
    root_rotation_cases = {
        "1-of-1 key rotation": [
            MdVersion(keys=[1], threshold=1, sigs=[1]),
            MdVersion(keys=[2], threshold=1, sigs=[2, 1]),
            MdVersion(keys=[2], threshold=1, sigs=[2]),
        ],
        "1-of-1 key rotation, unused signatures": [
            MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]),
            MdVersion(keys=[2], threshold=1, sigs=[3, 2, 1, 4]),
            MdVersion(keys=[2], threshold=1, sigs=[3, 2, 4]),
        ],
        "1-of-1 key rotation fail: not signed with old key": [
            MdVersion(keys=[1], threshold=1, sigs=[1]),
            MdVersion(keys=[2],
                      threshold=1,
                      sigs=[2, 3, 4],
                      res=UnsignedMetadataError),
        ],
        "1-of-1 key rotation fail: not signed with new key": [
            MdVersion(keys=[1], threshold=1, sigs=[1]),
            MdVersion(keys=[2],
                      threshold=1,
                      sigs=[1, 3, 4],
                      res=UnsignedMetadataError),
        ],
        "3-of-5, sign with different keycombos": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]),
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]),
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]),
        ],
        "3-of-5, one key rotated": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 1]),
        ],
        "3-of-5, one key rotate fails: not signed with 3 new keys": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[0, 1, 3, 4, 5],
                      threshold=3,
                      sigs=[0, 2, 4],
                      res=UnsignedMetadataError),
        ],
        "3-of-5, one key rotate fails: not signed with 3 old keys": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[0, 1, 3, 4, 5],
                      threshold=3,
                      sigs=[0, 4, 5],
                      res=UnsignedMetadataError),
        ],
        "3-of-5, one key rotated, with intermediate step": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4, 5]),
            MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 5]),
        ],
        "3-of-5, all keys rotated, with intermediate step": [
            MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
            MdVersion(keys=[5, 6, 7, 8, 9],
                      threshold=3,
                      sigs=[0, 2, 4, 5, 6, 7]),
            MdVersion(keys=[5, 6, 7, 8, 9], threshold=3, sigs=[5, 6, 7]),
        ],
        "1-of-3 threshold increase to 2-of-3": [
            MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]),
            MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]),
        ],
        "1-of-3 threshold bump to 2-of-3 fails: new threshold not reached": [
            MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]),
            MdVersion(keys=[1, 2, 3],
                      threshold=2,
                      sigs=[2],
                      res=UnsignedMetadataError),
        ],
        "2-of-3 threshold decrease to 1-of-3": [
            MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]),
            MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1, 2]),
            MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]),
        ],
        "2-of-3 threshold decr. to 1-of-3 fails: old threshold not reached": [
            MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]),
            MdVersion(keys=[1, 2, 3],
                      threshold=1,
                      sigs=[1],
                      res=UnsignedMetadataError),
        ],
        "1-of-2 threshold increase to 2-of-2": [
            MdVersion(keys=[1], threshold=1, sigs=[1]),
            MdVersion(keys=[1, 2], threshold=2, sigs=[1, 2]),
        ],
    }
    # fmt: on

    @run_sub_tests_with_dataset(root_rotation_cases)
    def test_root_rotation(self, root_versions: List[MdVersion]) -> None:
        """Test Updater.refresh() with various sequences of root updates

        Each MdVersion in the list describes root keys and signatures of a
        remote root metadata version. As an example:
            MdVersion([1,2,3], 2, [1,2])
        defines a root that contains keys 1, 2 and 3 with threshold 2. The
        metadata is signed with keys 1 and 2.

        Assert that refresh() result is expected and that local root on disk is
        the expected one after all roots have been loaded from remote using the
        standard client update workflow.
        """
        self.setup_subtest()

        # Publish all remote root versions defined in root_versions
        for rootver in root_versions:
            # clear root keys, signers
            self.sim.root.roles[Root.type].keyids.clear()
            self.sim.signers[Root.type].clear()

            self.sim.root.roles[Root.type].threshold = rootver.threshold
            for i in rootver.keys:
                self.sim.root.add_key(self.keys[i], Root.type)
            for i in rootver.sigs:
                self.sim.add_signer(Root.type, self.signers[i])
            self.sim.root.version += 1
            self.sim.publish_root()

        # run client workflow, assert success/failure
        expected_error = root_versions[-1].res
        if expected_error is None:
            self._run_refresh()
            expected_local_root = self.sim.signed_roots[-1]
        else:
            # failure expected: local root should be the root before last
            with self.assertRaises(expected_error):
                self._run_refresh()
            expected_local_root = self.sim.signed_roots[-2]

        # assert local root on disk is expected
        with open(os.path.join(self.metadata_dir, "root.json"), "rb") as f:
            self.assertEqual(f.read(), expected_local_root)

    # fmt: off
    non_root_rotation_cases: Dict[str, MdVersion] = {
        "1-of-1 key rotation":
        MdVersion(keys=[2], threshold=1, sigs=[2]),
        "1-of-1 key rotation, unused signatures":
        MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]),
        "1-of-1 key rotation fail: not signed with new key":
        MdVersion(keys=[2],
                  threshold=1,
                  sigs=[1, 3, 4],
                  res=UnsignedMetadataError),
        "3-of-5, one key signature wrong: not signed with 3 expected keys":
        MdVersion(keys=[0, 1, 3, 4, 5],
                  threshold=3,
                  sigs=[0, 2, 4],
                  res=UnsignedMetadataError),
        "2-of-5, one key signature mising: threshold not reached":
        MdVersion(keys=[0, 1, 3, 4, 5],
                  threshold=3,
                  sigs=[0, 4],
                  res=UnsignedMetadataError),
        "3-of-5, sign first combo":
        MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]),
        "3-of-5, sign second combo":
        MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]),
        "3-of-5, sign third combo":
        MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]),
        "3-of-5, sign fourth combo":
        MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[1, 2, 3]),
        "3-of-5, sign fifth combo":
        MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[2, 3, 4]),
    }
    # fmt: on

    @run_sub_tests_with_dataset(non_root_rotation_cases)
    def test_non_root_rotations(self, md_version: MdVersion) -> None:
        """Test Updater.refresh() with various sequences of metadata updates

        Each MdVersion in the list describes metadata keys and signatures
        of a remote metadata version. As an example:
            MdVersion([1,2,3], 2, [1,2])
        defines a metadata that contains keys 1, 2 and 3 with threshold 2. The
        metadata is signed with keys 1 and 2.

        Assert that refresh() result is expected and that local metadata on disk
        is the expected one after all roots have been loaded from remote using
        the standard client update workflow.
        """
        self.setup_subtest()
        roles = ["timestamp", "snapshot", "targets"]
        for role in roles:

            # clear role keys, signers
            self.sim.root.roles[role].keyids.clear()
            self.sim.signers[role].clear()

            self.sim.root.roles[role].threshold = md_version.threshold
            for i in md_version.keys:
                self.sim.root.add_key(self.keys[i], role)

            for i in md_version.sigs:
                self.sim.add_signer(role, self.signers[i])

            self.sim.root.version += 1
            self.sim.publish_root()

            # run client workflow, assert success/failure
            expected_error = md_version.res
            if expected_error is None:
                self._run_refresh()

                # Call fetch_metadata to sign metadata with new keys
                expected_local_md: bytes = self.sim.fetch_metadata(role)
                # assert local metadata role is on disk as expected
                md_path = os.path.join(self.metadata_dir, f"{role}.json")
                with open(md_path, "rb") as f:
                    data = f.read()
                    self.assertEqual(data, expected_local_md)
            else:
                # failure expected
                with self.assertRaises(expected_error):
                    self._run_refresh()