Exemplo n.º 1
0
    def test_is_target_in_pathpattern(self) -> None:
        # pylint: disable=protected-access
        supported_use_cases = [
            ("foo.tgz", "foo.tgz"),
            ("foo.tgz", "*"),
            ("foo.tgz", "*.tgz"),
            ("foo-version-a.tgz", "foo-version-?.tgz"),
            ("targets/foo.tgz", "targets/*.tgz"),
            ("foo/bar/zoo/k.tgz", "foo/bar/zoo/*"),
            ("foo/bar/zoo/k.tgz", "foo/*/zoo/*"),
            ("foo/bar/zoo/k.tgz", "*/*/*/*"),
            ("foo/bar", "f?o/bar"),
            ("foo/bar", "*o/bar"),
        ]
        for targetpath, pathpattern in supported_use_cases:
            self.assertTrue(
                DelegatedRole._is_target_in_pathpattern(
                    targetpath, pathpattern))

        invalid_use_cases = [
            ("targets/foo.tgz", "*.tgz"),
            ("/foo.tgz", "*.tgz"),
            ("targets/foo.tgz", "*"),
            ("foo-version-alpha.tgz", "foo-version-?.tgz"),
            ("foo//bar", "*/bar"),
            ("foo/bar", "f?/bar"),
        ]
        for targetpath, pathpattern in invalid_use_cases:
            self.assertFalse(
                DelegatedRole._is_target_in_pathpattern(
                    targetpath, pathpattern))
Exemplo n.º 2
0
    def test_not_loading_targets_twice(self, wrapped_open: MagicMock) -> None:
        # Do not load targets roles more than once when traversing
        # the delegations tree

        # Add new delegated targets, update the snapshot
        spec_version = ".".join(SPECIFICATION_VERSION)
        targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None)
        role = DelegatedRole("role1", [], 1, False, ["*"], None)
        self.sim.add_delegation("targets", role, targets)
        self.sim.update_snapshot()

        # Run refresh, top-level roles are loaded
        updater = self._run_refresh()
        # Clean up calls to open during refresh()
        wrapped_open.reset_mock()

        # First time looking for "somepath", only 'role1' must be loaded
        updater.get_targetinfo("somepath")
        wrapped_open.assert_called_once_with(
            os.path.join(self.metadata_dir, "role1.json"), "rb"
        )
        wrapped_open.reset_mock()
        # Second call to get_targetinfo, all metadata is already loaded
        updater.get_targetinfo("somepath")
        wrapped_open.assert_not_called()
    def test_delegated_roles_update(
        self, test_case_data: Dict[str, Any]
    ) -> None:
        # Test if the client fetches and stores delegated metadata files with
        # the correct version prefix, depending on 'consistent_snapshot' config
        try:
            consistent_snapshot: bool = test_case_data["consistent_snapshot"]
            exp_version: Optional[int] = test_case_data["expected_version"]
            rolenames = ["role1", "..", "."]
            exp_calls = [(role, exp_version) for role in rolenames]

            self.setup_subtest(consistent_snapshot)
            # Add new delegated targets
            spec_version = ".".join(SPECIFICATION_VERSION)
            for role in rolenames:
                delegated_role = DelegatedRole(role, [], 1, False, ["*"], None)
                targets = Targets(
                    1, spec_version, self.sim.safe_expiry, {}, None
                )
                self.sim.add_delegation("targets", delegated_role, targets)
            self.sim.update_snapshot()
            updater = self._init_updater()
            updater.refresh()

            # cleanup fetch tracker metadata
            self.sim.fetch_tracker.metadata.clear()
            # trigger updater to fetch the delegated metadata
            updater.get_targetinfo("anything")
            # metadata files are fetched with the expected version (or None)
            self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls)
            # metadata files are always persisted without a version prefix
            self._assert_metadata_files_exist(rolenames)
        finally:
            self.teardown_subtest()
Exemplo n.º 4
0
    def test_load_metadata_from_cache(self, wrapped_open: MagicMock) -> None:

        # Add new delegated targets
        spec_version = ".".join(SPECIFICATION_VERSION)
        targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None)
        role = DelegatedRole("role1", [], 1, False, ["*"], None)
        self.sim.add_delegation("targets", role, targets)
        self.sim.update_snapshot()

        # Make a successful update of valid metadata which stores it in cache
        updater = self._run_refresh()
        updater.get_targetinfo("non_existent_target")

        # Clean up calls to open during refresh()
        wrapped_open.reset_mock()
        # Clean up fetch tracker metadata
        self.sim.fetch_tracker.metadata.clear()

        # Create a new updater and perform a second update while
        # the metadata is already stored in cache (metadata dir)
        updater = Updater(
            self.metadata_dir,
            "https://example.com/metadata/",
            self.targets_dir,
            "https://example.com/targets/",
            self.sim,
        )
        updater.get_targetinfo("non_existent_target")

        # Test that metadata is loaded from cache and not downloaded
        wrapped_open.assert_has_calls(
            [
                call(os.path.join(self.metadata_dir, "root.json"), "rb"),
                call(os.path.join(self.metadata_dir, "timestamp.json"), "rb"),
                call(os.path.join(self.metadata_dir, "snapshot.json"), "rb"),
                call(os.path.join(self.metadata_dir, "targets.json"), "rb"),
                call(os.path.join(self.metadata_dir, "role1.json"), "rb"),
            ]
        )

        expected_calls = [("root", 2), ("timestamp", None)]
        self.assertListEqual(self.sim.fetch_tracker.metadata, expected_calls)
Exemplo n.º 5
0
    def test_is_delegated_role(self) -> None:
        # test path matches
        # see more extensive tests in test_is_target_in_pathpattern()
        for paths in [
            ["a/path"],
            ["otherpath", "a/path"],
            ["*/?ath"],
        ]:
            role = DelegatedRole("", [], 1, False, paths, None)
            self.assertFalse(role.is_delegated_path("a/non-matching path"))
            self.assertTrue(role.is_delegated_path("a/path"))

        # test path hash prefix matches: sha256 sum of "a/path" is 927b0ecf9...
        for hash_prefixes in [
            ["927b0ecf9"],
            ["other prefix", "927b0ecf9"],
            ["927b0"],
            ["92"],
        ]:
            role = DelegatedRole("", [], 1, False, None, hash_prefixes)
            self.assertFalse(role.is_delegated_path("a/non-matching path"))
            self.assertTrue(role.is_delegated_path("a/path"))
Exemplo n.º 6
0
    def _init_repo(self, test_case: DelegationsTestCase) -> None:
        """Create a new RepositorySimulator instance and
        populate it with delegations and target files"""

        self.sim = RepositorySimulator()
        spec_version = ".".join(SPECIFICATION_VERSION)
        for d in test_case.delegations:
            if d.rolename in self.sim.md_delegates:
                targets = self.sim.md_delegates[d.rolename].signed
            else:
                targets = Targets(1, spec_version, self.sim.safe_expiry, {},
                                  None)
            # unpack 'd' but skip "delegator"
            role = DelegatedRole(*astuple(d)[1:])
            self.sim.add_delegation(d.delegator, role, targets)

        for target in test_case.target_files:
            self.sim.add_target(*astuple(target))

        if test_case.target_files:
            self.sim.targets.version += 1
        self.sim.update_snapshot()
Exemplo n.º 7
0
# The hash bin generator yields an ordered list of incremental hash bin names
# (ranges), plus the hash prefixes each bin is responsible for, e.g.:
#
# bin_n_name:  00-07  bin_n_hash_prefixes: 00 01 02 03 04 05 06 07
#              08-0f                       08 09 0a 0b 0c 0d 0e 0f
#              10-17                       10 11 12 13 14 15 16 17
#              ...                         ...
#              f8-ff                       f8 f9 fa fb fc fd fe ff
assert roles["bins"].signed.delegations.roles is not None
for bin_n_name, bin_n_hash_prefixes in generate_hash_bins():
    # Update delegating targets role (bins) with delegation details for each
    # delegated targets role (bin_n).
    roles["bins"].signed.delegations.roles[bin_n_name] = DelegatedRole(
        name=bin_n_name,
        keyids=[keys["bin-n"]["keyid"]],
        threshold=1,
        terminating=False,
        path_hash_prefixes=bin_n_hash_prefixes,
    )

    # Create delegated targets roles (bin_n)
    roles[bin_n_name] = Metadata(Targets(expires=_in(7)))

# Add target file
# ---------------
# For the purpose of this example we will protect the integrity of this very
# example script by adding its file info to the corresponding bin metadata.

# NOTE: See "Targets" paragraph in 'basic_repo.py' example for more details
# about adding target file infos to targets metadata.
local_path = Path(__file__).resolve()
Exemplo n.º 8
0
    def test_targets_key_api(self) -> None:
        targets_path = os.path.join(self.repo_dir, "metadata", "targets.json")
        targets: Targets = Metadata[Targets].from_file(targets_path).signed

        # Add a new delegated role "role2" in targets
        delegated_role = DelegatedRole.from_dict({
            "keyids": [],
            "name": "role2",
            "paths": ["fn3", "fn4"],
            "terminating": False,
            "threshold": 1,
        })
        assert isinstance(targets.delegations, Delegations)
        assert isinstance(targets.delegations.roles, Dict)
        targets.delegations.roles["role2"] = delegated_role

        key_dict = {
            "keytype": "ed25519",
            "keyval": {
                "public":
                "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd"
            },
            "scheme": "ed25519",
        }
        key = Key.from_dict("id2", key_dict)

        # Assert that add_key with old argument order will raise an error
        with self.assertRaises(ValueError):
            targets.add_key("role1", key)  # type: ignore

        # Assert that delegated role "role1" does not contain the new key
        self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids)
        targets.add_key(key, "role1")

        # Assert that the new key is added to the delegated role "role1"
        self.assertIn(key.keyid, targets.delegations.roles["role1"].keyids)

        # Confirm that the newly added key does not break the obj serialization
        targets.to_dict()

        # Try adding the same key again and assert its ignored.
        past_keyid = targets.delegations.roles["role1"].keyids.copy()
        targets.add_key(key, "role1")
        self.assertEqual(past_keyid, targets.delegations.roles["role1"].keyids)

        # Try adding a key to a delegated role that doesn't exists
        with self.assertRaises(ValueError):
            targets.add_key(key, "nosuchrole")

        # Add the same key to "role2" as well
        targets.add_key(key, "role2")

        # Remove the key from "role1" role ("role2" still uses it)
        targets.revoke_key(key.keyid, "role1")

        # Assert that delegated role "role1" doesn't contain the key.
        self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids)
        self.assertIn(key.keyid, targets.delegations.roles["role2"].keyids)

        # Remove the key from "role2" as well
        targets.revoke_key(key.keyid, "role2")
        self.assertNotIn(key.keyid, targets.delegations.roles["role2"].keyids)

        # Try remove key not used by "role1"
        with self.assertRaises(ValueError):
            targets.revoke_key(key.keyid, "role1")

        # Try removing a key from delegated role that doesn't exists
        with self.assertRaises(ValueError):
            targets.revoke_key(key.keyid, "nosuchrole")

        # Remove delegations as a whole
        targets.delegations = None
        # Test that calling add_key and revoke_key throws an error
        # and that delegations is still None after each of the api calls
        with self.assertRaises(ValueError):
            targets.add_key(key, "role1")
        self.assertTrue(targets.delegations is None)
        with self.assertRaises(ValueError):
            targets.revoke_key(key.keyid, "role1")
        self.assertTrue(targets.delegations is None)
Exemplo n.º 9
0
 def test_invalid_delegated_role_serialization(self, test_case_data: str):
     case_dict = json.loads(test_case_data)
     with self.assertRaises(ValueError):
         DelegatedRole.from_dict(copy.copy(case_dict))
Exemplo n.º 10
0
 def test_delegated_role_serialization(self, test_case_data: str):
     case_dict = json.loads(test_case_data)
     deserialized_role = DelegatedRole.from_dict(copy.copy(case_dict))
     self.assertDictEqual(case_dict, deserialized_role.to_dict())
Exemplo n.º 11
0
# The delegation info defined by the delegator further requires the provision
# of a unique delegatee name and constraints about the target files the
# delegatee is responsible for, e.g. a list of path patterns. For details about
# all configuration parameters see
# https://theupdateframework.github.io/specification/latest/#delegations
roles["targets"].signed.delegations = Delegations(
    keys={
        keys[delegatee_name]["keyid"]:
        Key.from_securesystemslib_key(keys[delegatee_name])
    },
    roles={
        delegatee_name:
        DelegatedRole(
            name=delegatee_name,
            keyids=[keys[delegatee_name]["keyid"]],
            threshold=1,
            terminating=True,
            paths=["*.py"],
        ),
    },
)

# Remove target file info from top-level targets (delegatee is now responsible)
del roles["targets"].signed.targets[target_path]

# Increase expiry (delegators should be less volatile)
roles["targets"].signed.expires = _in(365)

# Snapshot + Timestamp + Sign + Persist
# -------------------------------------
# In order to publish a new consistent set of metadata, we need to update