def test_roundtrip_dump(datafiles, fromdisk): filename = os.path.join(datafiles.dirname, datafiles.basename, "roundtrip-test.yaml") with open(filename, "r") as fh: rt_raw = fh.read() if fromdisk: rt_loaded = _yaml.roundtrip_load(filename) else: rt_loaded = _yaml.roundtrip_load_data(rt_raw, filename=filename) # Now walk the loaded data structure, checking for ints etc. def walk_node(node): for v in node.values(): if isinstance(v, list): walk_list(v) elif isinstance(v, dict): walk_node(v) else: assert isinstance(v, str) def walk_list(l): for v in l: if isinstance(v, list): walk_list(v) elif isinstance(v, dict): walk_node(v) else: assert isinstance(v, str) walk_node(rt_loaded) outfile = StringIO() _yaml.roundtrip_dump(rt_loaded, file=outfile) rt_back = outfile.getvalue() assert rt_raw == rt_back
def update_project(project_path, updated_configuration): project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) project_conf.update(updated_configuration) _yaml.roundtrip_dump(project_conf, project_conf_path)
def _set_project_includes_and_aliases(project_path, includes, aliases): project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) project_conf["aliases"].update(aliases) project_conf["(@)"] = includes _yaml.roundtrip_dump(project_conf, project_conf_path)
def _set_project_mirrors_and_aliases(project_path, mirrors, aliases): project_conf_path = os.path.join(project_path, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_path) project_conf["mirrors"] = mirrors project_conf["aliases"].update(aliases) _yaml.roundtrip_dump(project_conf, project_conf_path)
def add_plugins_conf(project, plugin_kind): _scaffolder, plugin_package = ALL_REPO_KINDS[plugin_kind] project_conf_file = os.path.join(project, "project.conf") project_conf = _yaml.roundtrip_load(project_conf_file) if plugin_package is not None: project_conf["plugins"] = [ { "origin": "pip", "package-name": plugin_package, "sources": [plugin_kind], }, ] _yaml.roundtrip_dump(project_conf, project_conf_file)
def test_caching_elements_ignoring_remotes(cli, tmpdir, datafiles): project = os.path.join(str(datafiles), "parent") base_project = os.path.join(str(project), "base") # Load the junction element junction_element = os.path.join(project, "base.bst") junction_data = _yaml.roundtrip_load(junction_element) # Configure to push everything to the project's remote and nothing to the junction's junction_data["config"] = { "cache-junction-elements": True, "ignore-junction-remotes": True } _yaml.roundtrip_dump(junction_data, junction_element) with create_artifact_share( os.path.join( str(tmpdir), "artifactshare-parent")) as share, create_artifact_share( os.path.join(str(tmpdir), "artifactshare-base")) as base_share: # First build it without the artifact cache configured result = cli.run(project=project, args=["build", "target.bst"]) assert result.exit_code == 0 # Assert that we are now cached locally state = cli.get_element_state(project, "target.bst") assert state == "cached" state = cli.get_element_state(base_project, "base-element.bst") assert state == "cached" project_set_artifacts(project, share.repo) project_set_artifacts(base_project, base_share.repo) # Push to the remote(s)) result = cli.run( project=project, args=["artifact", "push", "--deps", "all", "target.bst"]) assert result.exit_code == 0 # And finally assert that the artifacts are in the right shares # # The parent project's cache should *also* contain elements from the junction assert_shared(cli, share, project, "target.bst", project_name="parent") assert_shared(cli, share, project, "app.bst", project_name="parent") assert_shared(cli, share, base_project, "base-element.bst", project_name="base") # The junction project's cache should be empty assert_not_shared(cli, base_share, project, "target.bst", project_name="parent") assert_not_shared(cli, base_share, project, "app.bst", project_name="parent") assert_not_shared(cli, base_share, base_project, "base-element.bst", project_name="base")
def test_ignore_junction_remotes(cli, tmpdir, datafiles): project = os.path.join(str(datafiles), "parent") base_project = os.path.join(str(project), "base") # Load the junction element junction_element = os.path.join(project, "base.bst") junction_data = _yaml.roundtrip_load(junction_element) with create_artifact_share( os.path.join( str(tmpdir), "artifactshare-parent")) as share, create_artifact_share( os.path.join(str(tmpdir), "artifactshare-base")) as base_share: # Immediately declare the artifact caches in the appropriate project configs project_set_artifacts(project, share.repo) project_set_artifacts(base_project, base_share.repo) # Build and populate the project remotes with their respective elements result = cli.run(project=project, args=["build", "target.bst"]) assert result.exit_code == 0 # And finally assert that the artifacts are in the right shares # # The parent project's cache should only contain project elements assert_shared(cli, share, project, "target.bst", project_name="parent") assert_shared(cli, share, project, "app.bst", project_name="parent") assert_not_shared(cli, share, base_project, "base-element.bst", project_name="base") # The junction project's cache should only contain elements in the junction project assert_not_shared(cli, base_share, project, "target.bst", project_name="parent") assert_not_shared(cli, base_share, project, "app.bst", project_name="parent") assert_shared(cli, base_share, base_project, "base-element.bst", project_name="base") # Ensure that, from now on, we ignore junction element remotes junction_data["config"] = {"ignore-junction-remotes": True} _yaml.roundtrip_dump(junction_data, junction_element) # Now delete everything from the local cache and try to # redownload from the shares. # cas = os.path.join(cli.directory, "cas") shutil.rmtree(cas) artifact_dir = os.path.join(cli.directory, "artifacts") shutil.rmtree(artifact_dir) # Assert that nothing is cached locally anymore state = cli.get_element_state(project, "target.bst") assert state != "cached" state = cli.get_element_state(base_project, "base-element.bst") assert state != "cached" # Now try bst artifact pull result = cli.run( project=project, args=["artifact", "pull", "--deps", "all", "target.bst"]) assert result.exit_code == 0 # And assert that they are again in the local cache, without having built state = cli.get_element_state(project, "target.bst") assert state == "cached" # We shouldn't be able to download base-element! state = cli.get_element_state(base_project, "base-element.bst") assert state != "cached"