def test_filter_track_multi(datafiles, cli, tmpdir): repo = create_repo("tar", str(tmpdir)) ref = repo.create(os.path.join(str(datafiles), "files")) elements_dir = os.path.join(str(tmpdir), "elements") project = str(tmpdir) input_name = "input.bst" input2_name = "input2.bst" project_config = { "name": "filter-track-test", "min-version": "2.0", "element-path": "elements", } project_file = os.path.join(str(tmpdir), "project.conf") _yaml.roundtrip_dump(project_config, project_file) input_config = { "kind": "import", "sources": [repo.source_config()], } input_file = os.path.join(elements_dir, input_name) _yaml.roundtrip_dump(input_config, input_file) input2_config = dict(input_config) input2_file = os.path.join(elements_dir, input2_name) _yaml.roundtrip_dump(input2_config, input2_file) filter1_config = {"kind": "filter", "depends": [{"filename": input_name, "type": "build"}]} filter1_file = os.path.join(elements_dir, "filter1.bst") _yaml.roundtrip_dump(filter1_config, filter1_file) filter2_config = {"kind": "filter", "depends": [{"filename": input2_name, "type": "build"}]} filter2_file = os.path.join(elements_dir, "filter2.bst") _yaml.roundtrip_dump(filter2_config, filter2_file) # Assert that a fetch is needed states = cli.get_element_states(project, [input_name, input2_name]) assert states == { input_name: "no reference", input2_name: "no reference", } # Now try to track it result = cli.run(project=project, args=["source", "track", "filter1.bst", "filter2.bst"]) result.assert_success() # Now check that a ref field exists new_input = _yaml.load(input_file, shortname=None) source_node = new_input.get_sequence("sources").mapping_at(0) new_ref = source_node.get_str("ref") assert new_ref == ref new_input2 = _yaml.load(input2_file, shortname=None) source_node2 = new_input2.get_sequence("sources").mapping_at(0) new_ref2 = source_node2.get_str("ref") assert new_ref2 == ref
def test_list_deletion(datafiles): base = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") overlay = os.path.join(datafiles.dirname, datafiles.basename, "listoverwriteempty.yaml") base = _yaml.load(base, shortname="basics.yaml") overlay = _yaml.load(overlay, shortname="listoverwriteempty.yaml") overlay._composite(base) children = base.get_sequence("children") assert not children
def test_list_composition(datafiles, filename, tmpdir, index, length, mood, prov_file, prov_line, prov_col): base_file = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") overlay_file = os.path.join(datafiles.dirname, datafiles.basename, filename) base = _yaml.load(base_file, shortname="basics.yaml") overlay = _yaml.load(overlay_file, shortname=filename) overlay._composite(base) children = base.get_sequence("children") assert len(children) == length child = children.mapping_at(index) assert child.get_str("mood") == mood assert_provenance(prov_file, prov_line, prov_col, child.get_node("mood"))
def test_mapping_validate_keys(datafiles): valid = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") invalid = os.path.join(datafiles.dirname, datafiles.basename, "invalid.yaml") base = _yaml.load(valid, shortname=None) base.validate_keys(["kind", "description", "moods", "children", "extra"]) base = _yaml.load(invalid, shortname=None) with pytest.raises(LoadError) as exc: base.validate_keys(["kind", "description", "moods", "children", "extra"]) assert exc.value.reason == LoadErrorReason.INVALID_DATA
def test_overwrite_directive_on_later_composite(tmpdir): with make_includes(str(tmpdir)) as includes: _yaml.roundtrip_dump( { "(@)": ["a.yml", "b.yml"], "test": { "(=)": ["Overwritten"] } }, str(tmpdir.join("main.yml"))) main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None) # a.yml _yaml.roundtrip_dump( { "test": ["some useless", "list", "to be overwritten"], "foo": "should not be present" }, str(tmpdir.join("a.yml")), ) # b.yaml isn't going to have a 'test' node to overwrite _yaml.roundtrip_dump({"foo": "should be present"}, str(tmpdir.join("b.yml"))) includes.process(main) assert main.get_str_list("test") == ["Overwritten"] assert main.get_str("foo") == "should be present"
def test_load_yaml(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") loaded = _yaml.load(filename, shortname=None) assert loaded.get_str("kind") == "pony"
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") junction_path = os.path.join(project, "elements", "junction.bst") element_path = os.path.join(project, "elements", "junction-dep.bst") configure_project(project, {"ref-storage": ref_storage}) # Create a repo to hold the subproject and generate a junction element for it generate_junction(tmpdir, subproject_path, junction_path, store_ref=False) # Create a stack element to depend on a cross junction element # element = {"kind": "stack", "depends": [{"junction": "junction.bst", "filename": "import-etc.bst"}]} _yaml.roundtrip_dump(element, element_path) # Now try to track it, this will bail with the appropriate error # informing the user to track the junction first result = cli.run(project=project, args=["build", "junction-dep.bst"]) result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT) # Assert that we have the expected provenance encoded into the error element_node = _yaml.load(element_path, shortname="junction-dep.bst") ref_node = element_node.get_sequence("depends").mapping_at(0) provenance = ref_node.get_provenance() assert str(provenance) in result.stderr
def test_element_provenance(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") loaded = _yaml.load(filename, shortname=None) assert loaded.get_str("kind") == "pony" assert_provenance(filename, 5, 2, loaded.get_sequence("moods").scalar_at(1))
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage, workspaced): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") junction_path = os.path.join(project, "elements", "junction.bst") element_path = os.path.join(project, "elements", "junction-dep.bst") configure_project(project, {"ref-storage": ref_storage}) # Create a repo to hold the subproject and generate a junction element for it generate_junction(tmpdir, subproject_path, junction_path, store_ref=False) # Create a stack element to depend on a cross junction element # element = { "kind": "stack", "depends": [{ "junction": "junction.bst", "filename": "import-etc.bst" }] } _yaml.roundtrip_dump(element, element_path) # Open a workspace if we're testing workspaced behavior if workspaced: result = cli.run( project=project, silent=True, args=[ "workspace", "open", "--no-checkout", "--directory", subproject_path, "junction.bst" ], ) result.assert_success() # Assert the correct error when trying to show the pipeline dep_result = cli.run(project=project, silent=True, args=["show", "junction-dep.bst"]) # Assert the correct error when trying to show the pipeline etc_result = cli.run(project=project, silent=True, args=["show", "junction.bst:import-etc.bst"]) # If a workspace is open, no ref is needed if workspaced: dep_result.assert_success() etc_result.assert_success() else: # Assert that we have the expected provenance encoded into the error element_node = _yaml.load(element_path, shortname="junction-dep.bst") ref_node = element_node.get_sequence("depends").mapping_at(0) provenance = ref_node.get_provenance() assert str(provenance) in dep_result.stderr dep_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT) etc_result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.SUBPROJECT_INCONSISTENT)
def test_node_find_target(datafiles, case): filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml") # We set copy_tree in order to ensure that the nodes in `loaded` # are not the same nodes as in `prov.toplevel` loaded = _yaml.load(filename, shortname=None, copy_tree=True) prov = loaded.get_provenance() toplevel = prov._toplevel assert toplevel is not loaded # Walk down the node tree, with insider knowledge of how nodes are # laid out. Client code should never do this. def _walk(node, entry, rest): if rest: if isinstance(entry, int): new_node = node.node_at(entry) else: new_node = node.get_node(entry) return _walk(new_node, rest[0], rest[1:]) else: if isinstance(entry, int): return node.node_at(entry) return node.get_node(entry) want = _walk(loaded, case[0], case[1:]) found_path = toplevel._find(want) assert case == found_path
def test_element_path_interactive(cli, tmp_path, monkeypatch, element_path): project = tmp_path project_conf_path = project.joinpath("project.conf") class DummyInteractiveApp(App): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.interactive = True @classmethod def create(cls, *args, **kwargs): return DummyInteractiveApp(*args, **kwargs) def _init_project_interactive(self, *args, **kwargs): # pylint: disable=signature-differs return ("project_name", "2.0", element_path) monkeypatch.setattr(App, "create", DummyInteractiveApp.create) result = cli.run(args=["init", str(project)]) result.assert_success() full_element_path = project.joinpath(element_path) assert full_element_path.exists() project_conf = _yaml.load(str(project_conf_path), shortname=None) assert project_conf.get_str("name") == "project_name" assert project_conf.get_str("min-version") == "2.0" assert project_conf.get_str("element-path") == element_path
def test_member_provenance(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") loaded = _yaml.load(filename, shortname=None) assert loaded.get_str("kind") == "pony" assert_provenance(filename, 2, 13, loaded.get_scalar("description"))
def test_node_find_target_fails(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "traversal.yaml") loaded = _yaml.load(filename, shortname=None, copy_tree=True) brand_new = Node.from_dict({}) assert loaded._find(brand_new) is None
def test_basic_provenance(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") loaded = _yaml.load(filename, shortname=None) assert loaded.get_str("kind") == "pony" assert_provenance(filename, 1, 0, loaded)
def test_get_str_list_default_none(datafiles): conf_file = os.path.join(datafiles.dirname, datafiles.basename, "list-of-dict.yaml") base = _yaml.load(conf_file, shortname=None) # There is no "pony" key here, assert that the default return is smooth strings = base.get_str_list("pony", None) assert strings is None
def test_composite_preserve_originals(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") overlayfile = os.path.join(datafiles.dirname, datafiles.basename, "composite.yaml") base = _yaml.load(filename, shortname=None) overlay = _yaml.load(overlayfile, shortname=None) base_copy = base.clone() overlay._composite(base_copy) copy_extra = base_copy.get_mapping("extra") orig_extra = base.get_mapping("extra") # Test that the node copy has the overridden value... assert copy_extra.get_str("old") == "override" # But the original node is not effected by the override. assert orig_extra.get_str("old") == "new"
def test_value_doesnt_match_expected(datafiles): conf_file = os.path.join(datafiles.dirname, datafiles.basename, "convert_value_to_str.yaml") # Run file through yaml to convert it test_dict = _yaml.load(conf_file, shortname=None) with pytest.raises(LoadError) as exc: test_dict.get_int("Test4") assert exc.value.reason == LoadErrorReason.INVALID_DATA
def test_track_include(cli, tmpdir, datafiles, ref_storage, kind): project = str(datafiles) dev_files_path = os.path.join(project, "files", "dev-files") element_path = os.path.join(project, "elements") element_name = "track-test-{}.bst".format(kind) update_project_configuration(project, {"ref-storage": ref_storage}) # Create our repo object of the given source type with # the dev files, and then collect the initial ref. # repo = create_repo(kind, str(tmpdir)) ref = repo.create(dev_files_path) # Generate the element element = {"kind": "import", "(@)": ["elements/sources.yml"]} sources = {"sources": [repo.source_config()]} _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) _yaml.roundtrip_dump(sources, os.path.join(element_path, "sources.yml")) # Assert that a fetch is needed assert cli.get_element_state(project, element_name) == "no reference" # Now first try to track it result = cli.run(project=project, args=["source", "track", element_name]) result.assert_success() # And now fetch it: The Source has probably already cached the # latest ref locally, but it is not required to have cached # the associated content of the latest ref at track time, that # is the job of fetch. result = cli.run(project=project, args=["source", "fetch", element_name]) result.assert_success() # Assert that we are now buildable because the source is # now cached. assert cli.get_element_state(project, element_name) == "buildable" # Assert there was a project.refs created, depending on the configuration if ref_storage == "project.refs": assert os.path.exists(os.path.join(project, "project.refs")) else: assert not os.path.exists(os.path.join(project, "project.refs")) new_sources = _yaml.load(os.path.join(element_path, "sources.yml"), shortname="sources.yml") # Get all of the sources assert "sources" in new_sources sources_list = new_sources.get_sequence("sources") assert len(sources_list) == 1 # Get the first source from the sources list new_source = sources_list.mapping_at(0) assert "ref" in new_source assert ref == new_source.get_str("ref")
def test_get_str_list_invalid(datafiles, filename, provenance): conf_file = os.path.join(datafiles.dirname, datafiles.basename, filename) base = _yaml.load(conf_file, shortname=None) with pytest.raises(LoadError) as exc: base.get_str_list("list") assert exc.value.reason == LoadErrorReason.INVALID_DATA assert provenance in str(exc.value)
def test_node_set(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") base = _yaml.load(filename, shortname=None) assert "mother" not in base base["mother"] = "snow white" assert base.get_str("mother") == "snow white"
def test_mirror_track_upstream_absent(cli, tmpdir, datafiles, kind): project_dir = str(datafiles) bin_files_path = os.path.join(project_dir, "files", "bin-files", "usr") dev_files_path = os.path.join(project_dir, "files", "dev-files", "usr") upstream_repodir = os.path.join(str(tmpdir), "upstream") mirror_repodir = os.path.join(str(tmpdir), "mirror") element_dir = os.path.join(project_dir, "elements") # Create repo objects of the upstream and mirror upstream_repo = create_repo(kind, upstream_repodir) upstream_ref = upstream_repo.create(bin_files_path) mirror_repo = upstream_repo.copy(mirror_repodir) mirror_ref = upstream_ref upstream_ref = upstream_repo.create(dev_files_path) # Configure to allow tracking mirrors as well as upstream cli.configure({"track": {"source": "all"}}) element = { "kind": "import", "sources": [upstream_repo.source_config(ref=upstream_ref)] } element_name = "test.bst" element_path = os.path.join(element_dir, element_name) full_repo = element["sources"][0]["url"] _, repo_name = os.path.split(full_repo) alias = "foo-" + kind aliased_repo = alias + ":" + repo_name element["sources"][0]["url"] = aliased_repo full_mirror = mirror_repo.source_config()["url"] mirror_map, _ = os.path.split(full_mirror) _yaml.roundtrip_dump(element, element_path) _set_project_mirrors_and_aliases( project_dir, [ { "name": "middle-earth", "aliases": { alias: [mirror_map + "/"], }, }, ], {alias: "http://www.example.com"}, ) result = cli.run(project=project_dir, args=["source", "track", element_name]) result.assert_success() # Check that tracking fell back to the mirror new_element = _yaml.load(element_path, shortname=element_name) source = new_element.get_sequence("sources").mapping_at(0) if "ref" in source: assert source.get_str("ref") == mirror_ref
def test_node_set_list_element(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") base = _yaml.load(filename, shortname=None) assert base.get_str_list("moods") == ["happy", "sad"] base.get_sequence("moods")[0] = "confused" assert base.get_str_list("moods") == ["confused", "sad"]
def test_defaults(cli, tmpdir): project = str(tmpdir) project_path = os.path.join(project, "project.conf") result = cli.run(args=["init", "--project-name", "foo", project]) result.assert_success() project_conf = _yaml.load(project_path, shortname=None) assert project_conf.get_str("name") == "foo" assert project_conf.get_str("min-version") == get_default_min_version() assert project_conf.get_str("element-path") == "elements"
def run_project_config(self, *, project_config=None, **kwargs): # First load the project.conf and substitute {project_dir} # # Save the original project.conf, because we will run more than # once in the same temp directory # project_directory = kwargs["project"] project_filename = os.path.join(project_directory, "project.conf") project_backup = os.path.join(project_directory, "project.conf.backup") project_load_filename = project_filename if not os.path.exists(project_backup): shutil.copy(project_filename, project_backup) else: project_load_filename = project_backup with open(project_load_filename) as f: config = f.read() config = config.format(project_dir=project_directory) if project_config is not None: # If a custom project configuration dictionary was # specified, composite it on top of the already # substituted base project configuration # base_config = _yaml.load_data(config) # In order to leverage _yaml.composite_dict(), both # dictionaries need to be loaded via _yaml.load_data() first # with tempfile.TemporaryDirectory( dir=project_directory) as scratchdir: temp_project = os.path.join(scratchdir, "project.conf") with open(temp_project, "w") as f: yaml.safe_dump(project_config, f) project_config = _yaml.load(temp_project, shortname="project.conf") project_config._composite(base_config) _yaml.roundtrip_dump(base_config, project_filename) else: # Otherwise, just dump it as is with open(project_filename, "w") as f: f.write(config) return super().run(**kwargs)
def test_relative_path_directory_as_argument(cli, tmpdir): project = os.path.join(str(tmpdir), "child-directory") os.makedirs(project, exist_ok=True) project_path = os.path.join(project, "project.conf") rel_path = os.path.relpath(project) result = cli.run(args=["init", "--project-name", "foo", rel_path]) result.assert_success() project_conf = _yaml.load(project_path, shortname=None) assert project_conf.get_str("name") == "foo" assert project_conf.get_str("min-version") == get_default_min_version() assert project_conf.get_str("element-path") == "elements"
def test_force_overwrite_project(cli, tmpdir): project = str(tmpdir) project_path = os.path.join(project, "project.conf") with open(project_path, "w") as f: f.write("name: pony\n") result = cli.run( args=["init", "--project-name", "foo", "--force", project]) result.assert_success() project_conf = _yaml.load(project_path, shortname=None) assert project_conf.get_str("name") == "foo" assert project_conf.get_str("min-version") == get_default_min_version()
def test_lastest_sibling_has_priority(tmpdir): with make_includes(str(tmpdir)) as includes: _yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml"))) main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None) _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml"))) _yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml"))) includes.process(main) assert main.get_str_list("test") == ["b"]
def test_node_set_overwrite(datafiles): filename = os.path.join(datafiles.dirname, datafiles.basename, "basics.yaml") base = _yaml.load(filename, shortname=None) # Overwrite a string assert base.get_str("kind") == "pony" base["kind"] = "cow" assert base.get_str("kind") == "cow" # Overwrite a list as a string assert base.get_str_list("moods") == ["happy", "sad"] base["moods"] = "unemotional" assert base.get_str("moods") == "unemotional"
def test_sibling_cannot_append_backward(tmpdir): with make_includes(str(tmpdir)) as includes: _yaml.roundtrip_dump({"(@)": ["a.yml", "b.yml"]}, str(tmpdir.join("main.yml"))) main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None) _yaml.roundtrip_dump({"test": { "(>)": ["a"] }}, str(tmpdir.join("a.yml"))) _yaml.roundtrip_dump({"test": ["b"]}, str(tmpdir.join("b.yml"))) includes.process(main) assert main.get_str_list("test") == ["b"]
def test_main_keeps_keys(tmpdir): with make_includes(str(tmpdir)) as includes: _yaml.roundtrip_dump({ "(@)": ["a.yml"], "something": "else" }, str(tmpdir.join("main.yml"))) main = _yaml.load(str(tmpdir.join("main.yml")), shortname=None) _yaml.roundtrip_dump({"test": ["a"]}, str(tmpdir.join("a.yml"))) includes.process(main) assert main.get_str_list("test") == ["a"] assert main.get_str("something") == "else"