def test_index(self): index = self.workspace_folder / "index.json" # Build some packages for pis in ( "install_1.0", "condition_1.0", "condition-A_1.0", "condition-A_2.0", "condition-B_1.0", "condition-C_1.0", "condition-D_1.0", "condition-E_1.0", "condition-F_1.0", "condition-G_1.0", "condition-H_1.0", ): folder = TEST_REMOTE_PACKAGE_SOURCE / pis output_file = self.workspace_folder / (pis + ".leaf") self.rm.create_package(folder, output_file) self.rm.generate_index(index, self.workspace_folder.glob("condition*.leaf"), prettyprint=True) index_content = jloadfile(index) self.assertEqual(10, len(index_content[JsonConstants.REMOTE_PACKAGES])) self.rm.generate_index(index, self.workspace_folder.glob("*.leaf"), prettyprint=False) index_content = jloadfile(index) self.assertEqual(11, len(index_content[JsonConstants.REMOTE_PACKAGES]))
def __init__(self, *layers: Path, default_factory=OrderedDict): model = None for layer in layers: if layer is not None and layer.is_file(): if model is None: model = jloadfile(layer) else: jlayer_update(model, jloadfile(layer)) if model is None: model = default_factory() JsonObject.__init__(self, model) self._check_model()
def __init__(self, mffile: Path, read_only=False): Manifest.__init__(self, jloadfile(mffile)) IEnvProvider.__init__(self, "package {pi}".format(pi=self.identifier)) self.__folder = mffile.parent self.__read_only = read_only if read_only: self.custom_tags.append("system")
def list_remotes(self, only_enabled: bool = False): out = OrderedDict() remotes = self.read_user_configuration().remotes if len(remotes) == 0: raise NoRemoteException() for alias, json in remotes.items(): remote = Remote(alias, json) if remote.enabled or not only_enabled: out[alias] = remote if remote.enabled: # Load content if remote is enabled cache exists and check signature is present if needed rindex, rsig = self.__get_remote_files(alias) if rindex.exists() and (remote.gpg_key is None or rsig.exists()): try: remote.content = jloadfile(rindex) except Exception: self.logger.print_default( "Invalid json file cache for remote {alias}". format(alias=alias)) self.__clean_remote_files(alias) if len(out) == 0 and only_enabled: raise NoEnabledRemoteException() return out
def test_json(self): jo = JsonObject({}) self.assertIsNone(jo.jsonpath(["a"])) self.assertIsNotNone(jo.jsonpath(["a"], {})) self.assertIsNotNone(jo.jsonpath(["a"])) self.assertIsNone(jo.jsonpath(["a", "b"])) self.assertIsNotNone(jo.jsonpath(["a", "b"], {})) self.assertIsNotNone(jo.jsonpath(["a", "b"])) self.assertIsNone(jo.jsonpath(["a", "b", "c"])) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"], "hello")) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"], "world")) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"])) tmpfile = Path(mktemp(".json", "leaf-ut")) jwritefile(tmpfile, jo.json, pp=True) jo = JsonObject(jloadfile(tmpfile)) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"], "hello")) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"], "world")) self.assertEqual("hello", jo.jsonpath(["a", "b", "c"])) with self.assertRaises(ValueError): jo.jsonget("z", mandatory=True) with self.assertRaises(ValueError): jo.jsonpath(["a", "b", "c", "d"]) with self.assertRaises(ValueError): jo.jsonpath(["a", "d", "e"])
def test_ap_candidates(self): remote_file = Remote("remote_file", {"url": "file:///tmp/file/index.json"}) remote_fs = Remote("remote_fs", {"url": "/tmp/fs/index.json"}) remote_custom = Remote("remote_custom", { "url": "https://foo.tld/custom/index.json", "priority": 150 }) remote_https = Remote("remote_https", {"url": "https://foo.tld/https/index.json"}) remote_http = Remote("remote_http", {"url": "http://foo.tld/http/index.json"}) remote_other = Remote("remote_other", {"url": "nfs://foo.tld/other/index.json"}) self.assertEqual(100, remote_file.priority) self.assertEqual(100, remote_fs.priority) self.assertEqual(150, remote_custom.priority) self.assertEqual(200, remote_https.priority) self.assertEqual(201, remote_http.priority) self.assertEqual(500, remote_other.priority) ap_json = {"file": "pack.leaf"} ap_json["info"] = jloadfile(TEST_REMOTE_PACKAGE_SOURCE / "version_1.0" / LeafFiles.MANIFEST)["info"] ap = AvailablePackage(ap_json, remote=remote_other) self.assertEqual("nfs://foo.tld/other/pack.leaf", ap.best_candidate.url) ap.add_duplicate(AvailablePackage(ap_json, remote=remote_https)) self.assertEqual("https://foo.tld/https/pack.leaf", ap.best_candidate.url) ap.add_duplicate(AvailablePackage(ap_json, remote=remote_http)) self.assertEqual("https://foo.tld/https/pack.leaf", ap.best_candidate.url) ap.add_duplicate(AvailablePackage(ap_json, remote=remote_custom)) self.assertEqual("https://foo.tld/custom/pack.leaf", ap.best_candidate.url) ap.add_duplicate(AvailablePackage(ap_json, remote=remote_file)) self.assertEqual("file:///tmp/file/pack.leaf", ap.best_candidate.url) ap.add_duplicate(AvailablePackage(ap_json, remote=remote_fs)) self.assertEqual("file:///tmp/file/pack.leaf", ap.best_candidate.url) remote_custom.json["priority"] = 1 self.assertEqual("https://foo.tld/custom/pack.leaf", ap.best_candidate.url) remote_custom.json["priority"] = 999 self.assertEqual("file:///tmp/file/pack.leaf", ap.best_candidate.url) remote_custom.json["priority"] = 100 self.assertEqual("https://foo.tld/custom/pack.leaf", ap.best_candidate.url)
def test_index_generation_with_input_file(self): index = self.workspace_folder / "index.json" input_file = self.workspace_folder / "input.list" # Build some packages self.leaf_exec(("build", "pack"), "--output", self.workspace_folder / "a.leaf", "--input", TEST_REMOTE_PACKAGE_SOURCE / "install_1.0") self.leaf_exec(("build", "pack"), "--output", self.workspace_folder / "b.leaf", "--input", TEST_REMOTE_PACKAGE_SOURCE / "condition_1.0") with input_file.open("w") as fp: fp.write("# This is a comment \n") self.leaf_exec(("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--prettyprint", "--input", input_file) self.assertTrue(index.exists()) self.assertEqual(0, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES])) with input_file.open("a") as fp: fp.write("{0}\n".format(self.workspace_folder / "a.leaf")) self.leaf_exec(("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--prettyprint", "--input", input_file) self.assertEqual(1, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES])) with input_file.open("a") as fp: fp.write(" ## {0} \n".format(self.workspace_folder / "b.leaf")) self.leaf_exec(("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--prettyprint", "--input", input_file) self.assertEqual(1, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES])) with input_file.open("a") as fp: fp.write(" {0} \n".format(self.workspace_folder / "b.leaf")) self.leaf_exec(("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--prettyprint", "--input", input_file) self.assertEqual(2, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES]))
def test_index_generation(self): index = self.workspace_folder / "index.json" # Build some packages self.leaf_exec(("build", "pack"), "--output", self.workspace_folder / "a.leaf", "--input", TEST_REMOTE_PACKAGE_SOURCE / "install_1.0") self.leaf_exec(("build", "pack"), "--output", self.workspace_folder / "b.leaf", "--input", TEST_REMOTE_PACKAGE_SOURCE / "condition_1.0") self.leaf_exec( ("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--prettyprint", self.workspace_folder / "a.leaf", ) self.assertTrue(index.exists()) self.assertEqual(1, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES])) self.leaf_exec( ("build", "index"), "--resolve", "--output", index, "--name", "Name", "--description", "Description here", "--no-info", self.workspace_folder / "a.leaf", self.workspace_folder / "b.leaf", ) self.assertTrue(index.exists()) self.assertEqual(2, len(jloadfile(index)[JsonConstants.REMOTE_PACKAGES]))
def __load_manifest(self): out = [] for folder in TEST_REMOTE_PACKAGE_SOURCE.iterdir(): if folder.is_dir(): mffile = folder / LeafFiles.MANIFEST if mffile.is_file(): out.append( AvailablePackage2(jloadfile(mffile), "http://fakeUrl")) out.append(InstalledPackage2(mffile)) return sorted(out, key=lambda mf: str(type(mf)) + str(mf))
def test_index_generation_with_extra_tags(self): index1 = self.workspace_folder / "index1.json" index2 = self.workspace_folder / "index2.json" folder = TEST_REMOTE_PACKAGE_SOURCE / "container-A_1.0" leaf_file = self.workspace_folder / "a.leaf" tags_file = self.workspace_folder / "a.leaf.tags" with tags_file.open("w") as fp: fp.write("foo\n") fp.write("bar\n") fp.write(" foo \n") fp.write(" bar \n") fp.write("\n") fp.write(" \n") fp.write("foo\n") fp.write(" hello\n") fp.write("bar\n") fp.write("world \n") # Build some packages self.leaf_exec(("build", "pack"), "--output", leaf_file, "--input", folder) self.leaf_exec(("build", "index"), "--resolve", "--output", index1, "--prettyprint", leaf_file) self.assertTrue(index1.exists()) self.leaf_exec(("build", "index"), "--resolve", "--output", index2, "--no-extra-tags", "--prettyprint", leaf_file) self.assertTrue(index2.exists()) self.assertEqual(1, len(jloadfile(index1)[JsonConstants.REMOTE_PACKAGES])) self.assertEqual(1, len(jloadfile(index2)[JsonConstants.REMOTE_PACKAGES])) self.assertEqual(["foo", "bar", "hello", "world"], jloadfile(index1)[JsonConstants.REMOTE_PACKAGES][0] ["info"]["tags"]) self.assertEqual(["foo"], jloadfile(index2)[JsonConstants.REMOTE_PACKAGES][0] ["info"]["tags"])
def write_layer(self, output: Path, previous_layer: Path = None, pp: bool = False): # Update last leaf version field self.leaf_min_version = str(CURRENT_LEAF_VERSION) # Extract layer to write data = self.json if previous_layer is not None and previous_layer.exists(): data = jlayer_diff(jloadfile(previous_layer), self.json) # Write layer jwritefile(output, data, pp=pp)
def test_updaters(self): def my_updater_foo(model): model.json["test"].append("foo") def my_updater_bar(model): model.json["test"].append("bar") class MyConfig(ConfigFileWithLayer): def __init__(self, *layers): ConfigFileWithLayer.__init__(self, *layers) def _get_updaters(self): return super()._get_updaters() + ((None, my_updater_foo), (Version("2.0"), my_updater_bar)) @property def test_list(self): return self.json["test"] tmpfile = self.test_folder / "a.json" jwritefile(tmpfile, {"test": ["hello"]}) self.force_version("1.0") model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar"], model.test_list) model.write_layer(tmpfile) jmodel = jloadfile(tmpfile) self.assertEqual(["hello", "foo", "bar"], jmodel["test"]) self.assertEqual("1.0", jmodel["leafMinVersion"]) model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar"], model.test_list) model.write_layer(tmpfile) self.force_version("1.1") model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar", "foo", "bar"], model.test_list) model.write_layer(tmpfile) self.force_version("2.0") model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar", "foo", "bar", "foo", "bar"], model.test_list) model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar", "foo", "bar", "foo", "bar"], model.test_list) model.write_layer(tmpfile) self.force_version("2.1") model = MyConfig(tmpfile) self.assertEqual(["hello", "foo", "bar", "foo", "bar", "foo", "bar", "foo"], model.test_list)
def list_remotes(self, only_enabled: bool = False): out = OrderedDict() remotes = self.read_user_configuration().remotes if len(remotes) == 0: raise NoRemoteException() for alias, json in remotes.items(): remote = Remote(alias, json) if remote.enabled or not only_enabled: out[alias] = remote rindex, rsig = self.__get_remote_files(alias) # Load content if cache exists and check signature is present if needed if rindex.exists() and (remote.gpg_key is None or rsig.exists()): remote.content = jloadfile(rindex) if len(out) == 0 and only_enabled: raise NoEnabledRemoteException() return out
def test_external_info_file(self): folder = TEST_REMOTE_PACKAGE_SOURCE / "install_1.0" artifact = self.workspace_folder / "myPackage.leaf" info_file = self.workspace_folder / "myPackage.leaf.info" self.rm.create_package(folder, artifact, store_extenal_info=False) self.assertTrue(artifact.exists()) self.assertFalse(info_file.exists()) self.rm.create_package(folder, artifact, store_extenal_info=True) self.assertTrue(artifact.exists()) self.assertTrue(info_file.exists()) self.assertEqual(info_file, self.rm.find_external_info_file(artifact)) self.assertEqual(hash_compute(artifact), AvailablePackage(jloadfile(info_file), None).hashsum) with self.assertRaises(LeafException): self.rm.create_package(folder, artifact, store_extenal_info=False)
def __fetch_remote(self, remote: Remote): # clean files if they exist self.__clean_remote_files(remote.alias) # Target files index, sig = self.__get_remote_files(remote.alias) try: # Download index self.logger.print_default("Fetching remote {remote.alias}".format(remote=remote)) download_file(remote.url, index) # If gpg enabled gpgkey = remote.gpg_key if gpgkey is not None: download_file(remote.url + LeafConstants.GPG_SIG_EXTENSION, sig) self.logger.print_default("Verifying signature for remote {0.alias}".format(remote)) self.gpg_import_keys(gpgkey) self.gpg_verify_file(index, sig, expected_key=gpgkey) remote.content = jloadfile(index) self.__check_remote_content(remote) except Exception as e: self.__clean_remote_files(remote.alias) self.print_exception(RemoteFetchException(remote, e))
def generate_repository(source_folder, output_folder): mkdirs(output_folder) artifacts_list1 = [] artifacts_list2 = [] rm = RelengManager() for package_folder in source_folder.iterdir(): if package_folder.is_dir() and PackageIdentifier.is_valid_identifier( package_folder.name): manifest_file = package_folder / LeafFiles.MANIFEST if manifest_file.is_file(): manifest = Manifest.parse(manifest_file) if str(manifest.identifier) != package_folder.name: raise ValueError( "Naming error: {mf.identifier} != {folder.name}". format(mf=manifest, folder=package_folder)) filename = str(manifest.identifier) + ".leaf" output_file = output_folder / filename tar_extraargs = TAR_EXTRA_ARGS.get(manifest.identifier) rm.create_package(package_folder, output_file, tar_extra_args=tar_extraargs) # Check that the generated archive is OK check_archive_format( output_file, tar_extraargs[0] if tar_extraargs is not None else None) # Create multi index.json if str(manifest.identifier) in ALT_INDEX_CONTENT: artifacts_list2.append(output_file) if ALT_INDEX_CONTENT[str(manifest.identifier)]: artifacts_list1.append(output_file) else: artifacts_list1.append(output_file) # Create a problem with failure-badhash package if manifest.name == "failure-badhash": info_node = jloadfile( rm.find_external_info_file(output_file)) # chosen by fair dice roll. # garanteed to be random. info_node[ JsonConstants. REMOTE_PACKAGE_HASH] = "sha384:d1083143b5c4cf7f1ddaadc391b2d0102fc9fffeb0951ec51020b512ef9548d40cd1af079a1221133faa949fdc304c41" jwritefile(rm.find_external_info_file(output_file), info_node, pp=True) if len(artifacts_list1) == 0 or len(artifacts_list2) == 0: raise ValueError("Empty index!") with (output_folder / "multitags_1.0.leaf.tags").open("w") as fp: fp.write("volatileTag1\n") fp.write("volatileTag2") rm.generate_index(output_folder / "index.json", artifacts_list1, name="First repository", description="First repository description", prettyprint=True) with (output_folder / "multitags_1.0.leaf.tags").open("w") as fp: fp.write("volatileTag3\n") fp.write("volatileTag4") rm.generate_index(output_folder / "index2.json", artifacts_list2, name="Second repository", description="Second repository description", prettyprint=True) # Alter some values for test purpose index1json = jloadfile(output_folder / "index.json") for pkgjson in index1json[JsonConstants.REMOTE_PACKAGES]: if pkgjson["info"]["name"] == "failure-large-ap": pkgjson["size"] = 999999999999 jwritefile(output_folder / "index.json", index1json, pp=True) # Sign with GPG subprocess.check_call([ "gpg", "--homedir", str(TEST_GPG_HOMEDIR), "--detach-sign", "--armor", str(output_folder / "index.json") ]) subprocess.check_call([ "gpg", "--homedir", str(TEST_GPG_HOMEDIR), "--detach-sign", "--armor", str(output_folder / "index2.json") ])
def generate_manifest(self, output_file: Path, fragment_files: list = None, info_map: dict = None, resolve_envvars: bool = False): """ Used to create a manifest.json file """ model = OrderedDict() # Load fragments if fragment_files is not None: for fragment_file in fragment_files: self.logger.print_default( "Use json fragment: {fragment}".format( fragment=fragment_file)) jlayer_update(model, jloadfile(fragment_file), list_append=True) # Load model manifest = Manifest(model) info = manifest.jsonget(JsonConstants.INFO, default=OrderedDict()) # Set the common info if info_map is not None: for key in ( JsonConstants.INFO_NAME, JsonConstants.INFO_VERSION, JsonConstants.INFO_DESCRIPTION, JsonConstants.INFO_MASTER, JsonConstants.INFO_DATE, JsonConstants.INFO_REQUIRES, JsonConstants.INFO_DEPENDS, JsonConstants.INFO_TAGS, JsonConstants.INFO_LEAF_MINVER, JsonConstants.INFO_AUTOUPGRADE, ): if key in info_map: value = info_map[key] if value is not None: if key in (JsonConstants.INFO_REQUIRES, JsonConstants.INFO_DEPENDS, JsonConstants.INFO_TAGS): # Handle lists model_list = manifest.jsonpath( [JsonConstants.INFO, key], default=[]) for motif in value: if motif not in model_list: if key == JsonConstants.INFO_DEPENDS: # Try to parse as a conditional package # identifier ConditionalPackageIdentifier.parse( motif) elif key == JsonConstants.INFO_REQUIRES: # Try to parse as a package identifier PackageIdentifier.parse(motif) self.logger.print_verbose( "Add '{motif}' to '{key}' list".format( motif=motif, key=key)) model_list.append(motif) else: self.logger.print_verbose( "Set '{key}' = '{value}'".format(key=key, value=value)) info[key] = value # String replacement jsonstr = jtostring(manifest.json, pp=True) if resolve_envvars: for var in set( re.compile(r"#\{([a-zA-Z0-9_]+)\}").findall(jsonstr)): value = os.environ.get(var) if value is None: raise LeafException( "Cannot find '{var}' in env".format(var=var), hints="Set the variable with 'export {var}=xxx'". format(var=var)) self.logger.print_default("Replace {key} --> {value}".format( key=var, value=value)) jsonstr = jsonstr.replace("#{{{var}}}".format(var=var), value) if is_latest_package(manifest.identifier): raise LeafException( "Invalid version ({word} is a reserved keyword)".format( word=LeafConstants.LATEST)) self.logger.print_default( "Save '{mf.identifier}' manifest to {file}".format( mf=manifest, file=output_file)) with output_file.open("w") as fp: fp.write(jsonstr)
def generate_index( self, index_file: Path, artifacts: list, name: str = None, description: str = None, use_external_info: bool = True, use_extra_tags: bool = True, prettyprint: bool = False, resolve: bool = True, ): """ Create an index.json referencing all given artifacts """ if not index_file.exists(): index_file.touch() if resolve: index_file = index_file.resolve() try: # Create the "info" node info_node = OrderedDict() if name is not None: info_node[JsonConstants.REMOTE_NAME] = name if description is not None: info_node[JsonConstants.REMOTE_DESCRIPTION] = description info_node[JsonConstants.REMOTE_DATE] = self.__get_date_now() packages_map = OrderedDict() # Resolve artifacts if needed if resolve: artifacts = [a.resolve() for a in artifacts] for artifact in artifacts: artifact_node = None if use_external_info: infofile = self.find_external_info_file(artifact) if infofile.exists(): self.logger.print_default( "Reading info from {file}".format(file=infofile)) artifact_node = jloadfile(infofile) if artifact_node is None: self.logger.print_default( "Compute info for {artifact}".format( artifact=artifact)) artifact_node = self.__build_pkg_node(artifact) ap = AvailablePackage(artifact_node) pi = ap.identifier if is_latest_package(pi): raise LeafException( "Invalid version for package {artifact} ({word} is a reserved keyword)" .format(artifact=artifact, word=LeafConstants.LATEST)) if pi in packages_map: self.logger.print_default( "Artifact already present: {pi}".format(pi=pi)) if ap.hashsum != AvailablePackage( packages_map[pi]).hashsum: raise LeafException( "Artifact {pi} has multiple different artifacts for same version" .format(pi=pi)) else: # Read extra tags extratags_file = artifact.parent / (artifact.name + ".tags") if use_extra_tags and extratags_file.exists(): with extratags_file.open() as fp: for tag in filter( None, map(str.strip, fp.read().splitlines())): if tag not in ap.tags: self.logger.print_default( "Add extra tag {tag}".format(tag=tag)) ap.tags.append(tag) self.logger.print_default("Add package {pi}".format(pi=pi)) try: relative_path = artifact.relative_to(index_file.parent) artifact_node[JsonConstants.REMOTE_PACKAGE_FILE] = str( relative_path) except ValueError: raise LeafException( "Artifact {a} must be relative to {i.parent}". format(a=artifact, i=index_file)) packages_map[pi] = artifact_node # Create the json structure root_node = OrderedDict() root_node[JsonConstants.INFO] = info_node root_node[JsonConstants.REMOTE_PACKAGES] = list( packages_map.values()) jwritefile(index_file, root_node, pp=prettyprint) self.logger.print_default( "Index created: {index}".format(index=index_file)) except BaseException as e: # Clean the invalid index file if index_file.exists(): index_file.unlink() raise e
def parse(mffile: Path): return Manifest(jloadfile(mffile))