예제 #1
0
파일: filter.py 프로젝트: wjt/buildstream
def test_filter_forbid_no_bdep(datafiles, cli):
    project = str(datafiles)
    result = cli.run(project=project, args=["build", "forbidden-no-bdep.bst"])
    result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-wrong-count")
예제 #2
0
파일: push.py 프로젝트: wjt/buildstream
def test_source_push_split(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)

    with _configure_caches(tmpdir, "indexshare",
                           "storageshare") as (index, storage):
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [
                    {
                        "url": index.repo,
                        "push": True,
                        "type": "index"
                    },
                    {
                        "url": storage.repo,
                        "push": True,
                        "type": "storage"
                    },
                ]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        # get the source object
        with dummy_context(config=user_config_file) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements(["push.bst"])[0]
            element._initialize_state()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            # check we don't have it in the current cache
            assert not index.get_source_proto(source._get_source_name())

            # build the element, this should fetch and then push the source to the
            # remote
            res = cli.run(project=project_dir, args=["build", "push.bst"])
            res.assert_success()
            assert "Pushed source" in res.stderr

            # check that we've got the remote locally now
            sourcecache = context.sourcecache
            assert sourcecache.contains(source)

            # check that the remote CAS now has it
            digest = sourcecache.export(source)._get_digest()
            assert storage.has_object(digest)
예제 #3
0
def test_no_packages(cli, datafiles):
    project = str(datafiles)
    generate_project(project)
    result = cli.run(project=project, args=["show", "target.bst"])
    result.assert_main_error(ErrorDomain.SOURCE, None)
예제 #4
0
def test_link_invalid_config(cli, tmpdir, datafiles, target, expected_error, expected_reason):
    project = os.path.join(str(datafiles), "invalid")
    result = cli.run(project=project, args=["show", target])
    result.assert_main_error(expected_error, expected_reason)
예제 #5
0
파일: mirror.py 프로젝트: wjt/buildstream
def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
    # Test that it behaves as expected with submodules, both defined in config
    # and discovered when fetching.
    foo_file = os.path.join(str(datafiles), "files", "foo")
    bar_file = os.path.join(str(datafiles), "files", "bar")
    bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
    dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
    mirror_dir = os.path.join(str(datafiles), "mirror")

    defined_subrepo = create_repo("git", str(tmpdir), "defined_subrepo")
    defined_subrepo.create(bin_files_path)
    defined_subrepo.copy(mirror_dir)
    defined_subrepo.add_file(foo_file)

    found_subrepo = create_repo("git", str(tmpdir), "found_subrepo")
    found_subrepo.create(dev_files_path)

    main_repo = create_repo("git", str(tmpdir))
    main_mirror_ref = main_repo.create(bin_files_path)
    main_repo.add_submodule("defined", "file://" + defined_subrepo.repo)
    main_repo.add_submodule("found", "file://" + found_subrepo.repo)
    main_mirror = main_repo.copy(mirror_dir)
    main_repo.add_file(bar_file)

    project_dir = os.path.join(str(tmpdir), "project")
    os.makedirs(project_dir)
    element_dir = os.path.join(project_dir, "elements")
    os.makedirs(element_dir)
    element = {"kind": "import", "sources": [main_repo.source_config(ref=main_mirror_ref)]}
    element_name = "test.bst"
    element_path = os.path.join(element_dir, element_name)

    # Alias the main repo
    full_repo = element["sources"][0]["url"]
    _, repo_name = os.path.split(full_repo)
    alias = "foo"
    aliased_repo = alias + ":" + repo_name
    element["sources"][0]["url"] = aliased_repo

    # Hide the found subrepo
    del element["sources"][0]["submodules"]["found"]

    # Alias the defined subrepo
    subrepo = element["sources"][0]["submodules"]["defined"]["url"]
    _, repo_name = os.path.split(subrepo)
    aliased_repo = alias + ":" + repo_name
    element["sources"][0]["submodules"]["defined"]["url"] = aliased_repo

    _yaml.roundtrip_dump(element, element_path)

    full_mirror = main_mirror.source_config()["url"]
    mirror_map, _ = os.path.split(full_mirror)
    project = {
        "name": "test",
        "min-version": "2.0",
        "element-path": "elements",
        "aliases": {alias: "http://www.example.com/"},
        "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
    }
    project_file = os.path.join(project_dir, "project.conf")
    _yaml.roundtrip_dump(project, project_file)

    result = cli.run(project=project_dir, args=["source", "fetch", element_name])
    result.assert_success()
예제 #6
0
def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")
    workspace = os.path.join(cli.directory, "workspace")
    checkout = os.path.join(cli.directory, "checkout")

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path)

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # Now open a workspace on the junction
    #
    result = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, "junction.bst"])
    result.assert_success()
    filename = os.path.join(workspace, "files", "etc-files", "etc",
                            "animal.conf")

    # Assert the content of /etc/animal.conf in the workspace
    assert os.path.exists(filename)
    with open(filename, "r") as f:
        contents = f.read()
    assert contents == "animal=Pony\n"

    # Modify the content of the animal.conf in the workspace
    with open(filename, "w") as f:
        f.write("animal=Horsy\n")

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"

    # Now check it out
    result = cli.run(project=project,
                     args=[
                         "artifact", "checkout", "junction-dep.bst",
                         "--directory", checkout
                     ])
    result.assert_success()

    # Assert the workspace modified content of /etc/animal.conf
    filename = os.path.join(checkout, "etc", "animal.conf")
    assert os.path.exists(filename)
    with open(filename, "r") as f:
        contents = f.read()
    assert contents == "animal=Horsy\n"
예제 #7
0
def test_duplicates(cli, tmpdir, datafiles, project_dir):
    project = os.path.join(str(datafiles), project_dir)

    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()
예제 #8
0
def test_missing_project_conf(cli, datafiles):
    project = str(datafiles)
    result = cli.run(project=project, args=["workspace", "list"])
    result.assert_main_error(ErrorDomain.LOAD,
                             LoadErrorReason.MISSING_PROJECT_CONF)
예제 #9
0
def test_plugin_preflight_error(cli, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "plugin-preflight-error")
    result = cli.run(project=project, args=["source", "fetch", "error.bst"])
    result.assert_main_error(ErrorDomain.SOURCE, "the-preflight-error")
예제 #10
0
def test_require_build_and_run(cli, datafiles, target):
    project = str(datafiles)
    result = cli.run(project=project, args=["show", target])
    result.assert_main_error(ErrorDomain.ELEMENT,
                             "stack-requires-build-and-run")
예제 #11
0
def test_missing_local_plugin_directory(cli, datafiles):
    project = str(datafiles)
    result = cli.run(project=project, args=["workspace", "list"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
예제 #12
0
파일: filter.py 프로젝트: wjt/buildstream
def test_filter_stack_depend_failure(datafiles, cli):
    project = str(datafiles)

    result = cli.run(project=project,
                     args=["build", "forbidden-stack-dep.bst"])
    result.assert_main_error(ErrorDomain.ELEMENT, "filter-bdepend-no-artifact")
예제 #13
0
파일: filter.py 프로젝트: wjt/buildstream
def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(os.path.join(str(datafiles), "files"))
    elements_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    input_name = "input.bst"
    input2_name = "input2.bst"

    project_config = {
        "name": "filter-track-test",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)

    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }

    input_file = os.path.join(elements_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    input2_config = dict(input_config)
    input2_file = os.path.join(elements_dir, input2_name)
    _yaml.roundtrip_dump(input2_config, input2_file)

    filter1_config = {
        "kind": "filter",
        "depends": [{
            "filename": input_name,
            "type": "build"
        }]
    }
    filter1_file = os.path.join(elements_dir, "filter1.bst")
    _yaml.roundtrip_dump(filter1_config, filter1_file)

    filter2_config = {
        "kind": "filter",
        "depends": [{
            "filename": input2_name,
            "type": "build"
        }]
    }
    filter2_file = os.path.join(elements_dir, "filter2.bst")
    _yaml.roundtrip_dump(filter2_config, filter2_file)

    # Assert that a fetch is needed
    states = cli.get_element_states(project, [input_name, input2_name])
    assert states == {
        input_name: "no reference",
        input2_name: "no reference",
    }

    # Now try to track it
    result = cli.run(project=project,
                     args=[
                         "source", "track", "filter1.bst", "filter2.bst",
                         "--except", input_name
                     ])
    result.assert_success()

    # Now check that a ref field exists
    new_input = _yaml.load(input_file, shortname=None)
    source_node = new_input.get_sequence("sources").mapping_at(0)
    assert "ref" not in source_node

    new_input2 = _yaml.load(input2_file, shortname=None)
    source_node2 = new_input2.get_sequence("sources").mapping_at(0)
    new_ref2 = source_node2.get_str("ref")
    assert new_ref2 == ref
예제 #14
0
파일: filter.py 프로젝트: wjt/buildstream
def test_filter_forbid_also_rdep(datafiles, cli):
    project = str(datafiles)
    result = cli.run(project=project,
                     args=["build", "forbidden-also-rdep.bst"])
    result.assert_main_error(ErrorDomain.ELEMENT,
                             "filter-bdepend-also-rdepend")
예제 #15
0
def test_fail_no_args(datafiles, cli):
    project = str(datafiles)

    result = cli.run(project=project, args=["artifact", "checkout"])
    result.assert_main_error(ErrorDomain.APP, None)
    assert "Missing argument" in result.stderr
예제 #16
0
def test_empty_depends(cli, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "empty-depends")
    result = cli.run(project=project, args=["show", "manual.bst"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
예제 #17
0
def test_build_invalid_suffix(datafiles, cli, strict, hardlinks):
    project = str(datafiles)

    result = cli.run(project=project,
                     args=strict_args(["build", "target.foo"], strict))
    result.assert_main_error(ErrorDomain.LOAD, "bad-element-suffix")
예제 #18
0
def test_empty_project_name(cli, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename, "emptyname")
    result = cli.run(project=project, args=["workspace", "list"])
    result.assert_main_error(ErrorDomain.LOAD,
                             LoadErrorReason.INVALID_SYMBOL_NAME)
예제 #19
0
def test_invalid(cli, datafiles, target, domain, reason, provenance):
    project = os.path.join(str(datafiles), "invalid")
    result = cli.run(project=project, args=["build", target])
    result.assert_main_error(domain, reason)
    assert provenance in result.stderr
예제 #20
0
def test_invalid_yaml(cli, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "invalid-yaml")
    result = cli.run(project=project, args=["workspace", "list"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_YAML)
예제 #21
0
def test_link_not_found(cli, tmpdir, datafiles, target, provenance):
    project = os.path.join(str(datafiles), "notfound")
    result = cli.run(project=project, args=["build", target])

    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.MISSING_FILE)
    assert provenance in result.stderr
예제 #22
0
파일: track.py 프로젝트: wjt/buildstream
def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction,
                              ref_storage):
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files")
    target_path = os.path.join(project, "target.bst")
    subtarget_path = os.path.join(project, "subproject", "subtarget.bst")

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    repo.create(dev_files_path)

    # Generate two elements using the git source, one in
    # the main project and one in the subproject.
    generate_element(repo, target_path, dep_name="subproject.bst")
    generate_element(repo, subtarget_path)

    # Generate project.conf
    #
    project_conf = {
        "name": "test",
        "min-version": "2.0",
        "ref-storage": ref_storage
    }
    _yaml.roundtrip_dump(project_conf, os.path.join(project, "project.conf"))

    #
    # FIXME: This can be simplified when we have support
    #        for addressing of junctioned elements.
    #
    def get_subproject_element_state():
        result = cli.run(project=project,
                         args=[
                             "show", "--deps", "all", "--format",
                             "%{name}|%{state}", "target.bst"
                         ])
        result.assert_success()

        # Create two dimentional list of the result,
        # first line should be the junctioned element
        lines = [line.split("|") for line in result.output.splitlines()]
        assert lines[0][0] == "subproject-junction.bst:subtarget.bst"
        return lines[0][1]

    #
    # Assert that we have no reference yet for the cross junction element
    #
    assert get_subproject_element_state() == "no reference"

    # Track recursively across the junction
    args = ["source", "track", "--deps", "all"]
    if cross_junction == "cross":
        args += ["--cross-junctions"]
    args += ["target.bst"]

    result = cli.run(project=project, args=args)

    if ref_storage == "inline":

        if cross_junction == "cross":
            #
            # Cross junction tracking is not allowed when the toplevel project
            # is using inline ref storage.
            #
            result.assert_main_error(ErrorDomain.STREAM, "untrackable-sources")
        else:
            #
            # No cross juction tracking was requested
            #
            result.assert_success()
            assert get_subproject_element_state() == "no reference"
    else:
        #
        # Tracking is allowed with project.refs ref storage
        #
        result.assert_success()

        #
        # If cross junction tracking was enabled, we should now be buildable
        #
        if cross_junction == "cross":
            assert get_subproject_element_state() == "buildable"
        else:
            assert get_subproject_element_state() == "no reference"
예제 #23
0
def test_invalid_relative_path(cli, datafiles):
    project = str(datafiles)

    result = cli.run(project=project, args=["show", "irregular.bst"])
    result.assert_main_error(ErrorDomain.LOAD,
                             LoadErrorReason.PROJ_PATH_INVALID)
예제 #24
0
파일: track.py 프로젝트: wjt/buildstream
def test_track_consistency_error(cli, datafiles):
    project = str(datafiles)

    # Track the element causing a consistency error
    result = cli.run(project=project, args=["source", "track", "error.bst"])
    result.assert_main_error(ErrorDomain.SOURCE, "the-consistency-error")
예제 #25
0
파일: mirror.py 프로젝트: wjt/buildstream
def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
    # Main repo has mirror. But does not list submodules.
    #
    # We expect:
    #  - we will fetch submodules anyway

    bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
    dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")

    bin_repodir = os.path.join(str(tmpdir), "bin-repo")
    bin_repo = create_repo("git", bin_repodir)
    bin_repo.create(bin_files_path)

    dev_repodir = os.path.join(str(tmpdir), "dev-repo")
    dev_repo = create_repo("git", dev_repodir)
    dev_repo.create(dev_files_path)

    main_files = os.path.join(str(tmpdir), "main-files")
    os.makedirs(main_files)
    with open(os.path.join(main_files, "README"), "w") as f:
        f.write("TEST\n")
    upstream_main_repodir = os.path.join(str(tmpdir), "main-upstream")
    upstream_main_repo = create_repo("git", upstream_main_repodir)
    upstream_main_repo.create(main_files)

    upstream_main_repo.add_submodule("bin", url="file://{}".format(bin_repo.repo))
    upstream_main_repo.add_submodule("dev", url="file://{}".format(dev_repo.repo))
    # Unlist submodules.
    del upstream_main_repo.submodules["bin"]
    del upstream_main_repo.submodules["dev"]

    upstream_main_ref = upstream_main_repo.latest_commit()

    mirror_main_repodir = os.path.join(str(tmpdir), "main-mirror")
    mirror_main_repo = upstream_main_repo.copy(mirror_main_repodir)

    upstream_url = mirror_main_repo.source_config()["url"]

    upstream_map, repo_name = os.path.split(upstream_url)
    alias = "foo"
    aliased_repo = "{}:{}".format(alias, repo_name)

    full_mirror = mirror_main_repo.source_config()["url"]
    mirror_map, _ = os.path.split(full_mirror)

    project_dir = os.path.join(str(tmpdir), "project")
    os.makedirs(project_dir)
    element_dir = os.path.join(project_dir, "elements")

    element = {
        "kind": "import",
        "sources": [upstream_main_repo.source_config_extra(ref=upstream_main_ref, checkout_submodules=True)],
    }
    element["sources"][0]["url"] = aliased_repo
    element_name = "test.bst"
    element_path = os.path.join(element_dir, element_name)
    os.makedirs(element_dir)
    _yaml.roundtrip_dump(element, element_path)

    project = {
        "name": "test",
        "min-version": "2.0",
        "element-path": "elements",
        "aliases": {alias: upstream_map + "/"},
        "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}],
    }
    project_file = os.path.join(project_dir, "project.conf")
    _yaml.roundtrip_dump(project, project_file)

    # Now make the upstream unavailable.
    os.rename(upstream_main_repo.repo, "{}.bak".format(upstream_main_repo.repo))
    result = cli.run(project=project_dir, args=["source", "fetch", element_name])
    result.assert_success()

    result = cli.run(project=project_dir, args=["build", element_name])
    result.assert_success()

    checkout = os.path.join(str(tmpdir), "checkout")
    result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout])
    result.assert_success()

    assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
    assert os.path.exists(os.path.join(checkout, "dev", "include", "pony.h"))
예제 #26
0
파일: track.py 프로젝트: wjt/buildstream
def test_track_skip(cli, tmpdir, datafiles):
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files", "dev-files")
    element_path = os.path.join(project, "elements")
    element_dep_name = "track-test-dep.bst"
    element_workspace_name = "track-test-workspace.bst"
    element_target_name = "track-test-target.bst"
    workspace_dir = os.path.join(str(tmpdir), "workspace")

    # Generate an import element with some local source plugins, these
    # do not implement track() and thus can be skipped.
    #
    element = {
        "kind":
        "import",
        "sources": [
            {
                "kind": "local",
                "path": "files/dev-files",
                "directory": "/foo"
            },
            {
                "kind": "local",
                "path": "files/dev-files",
                "directory": "/bar"
            },
        ],
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_dep_name))

    # Generate a regular import element which will have a workspace open
    #
    repo = create_repo("tar", str(tmpdir))
    repo.create(dev_files_path)
    generate_element(repo, os.path.join(element_path, element_workspace_name))

    # Generate a stack element which depends on the import of local files
    #
    # Stack elements do not have any sources, as such they are also skipped.
    #
    element = {
        "kind": "stack",
        "depends": [element_dep_name, element_workspace_name],
    }
    _yaml.roundtrip_dump(element,
                         os.path.join(element_path, element_target_name))

    # First track and fetch the workspace element
    result = cli.run(
        project=project,
        args=["source", "track", "--deps", "none", element_workspace_name])
    result.assert_success()
    result = cli.run(
        project=project,
        args=["source", "fetch", "--deps", "none", element_workspace_name])
    result.assert_success()

    # Open the workspace so it really is a workspace
    result = cli.run(project=project,
                     args=[
                         "workspace", "open", "--directory", workspace_dir,
                         element_workspace_name
                     ])
    result.assert_success()

    # Now run track on the stack and all the deps
    result = cli.run(
        project=project,
        args=["source", "track", "--deps", "all", element_target_name])
    result.assert_success()

    # Assert we got the expected skip messages
    pattern = r"\[.*track:track-test-dep\.bst.*\] SKIPPED"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1
    pattern = r"\[.*track:track-test-workspace\.bst.*\] SKIPPED"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1

    # For now, we expect to not see the job for stack elements
    #
    # This may be revisited, need to consider if we should emit
    # START/SKIPPED message pairs for jobs which were assessed to
    # be unneeded before ever processing.
    #
    pattern = r"\[.*track:track-test-target\.bst.*\]"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 0
예제 #27
0
def test_first_source(cli, datafiles):
    project = str(datafiles)
    generate_project(project)
    result = cli.run(project=project, args=["show", "target.bst"])
    result.assert_main_error(ErrorDomain.ELEMENT, None)
예제 #28
0
파일: track.py 프로젝트: wjt/buildstream
def test_track_optional(cli, tmpdir, datafiles, ref_storage):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "track-optional-" + ref_storage)
    dev_files_path = os.path.join(project, "files")
    element_path = os.path.join(project, "target.bst")

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    repo.create(dev_files_path)

    # Now create an optional test branch and add a commit to that,
    # so two branches with different heads now exist.
    #
    repo.branch("test")
    repo.add_commit()

    # Substitute the {repo} for the git repo we created
    with open(element_path) as f:
        target_bst = f.read()
    target_bst = target_bst.format(repo=repo.repo)
    with open(element_path, "w") as f:
        f.write(target_bst)

    # First track for both options
    #
    # We want to track and persist the ref separately in this test
    #
    result = cli.run(
        project=project,
        args=["--option", "test", "False", "source", "track", "target.bst"])
    result.assert_success()
    result = cli.run(
        project=project,
        args=["--option", "test", "True", "source", "track", "target.bst"])
    result.assert_success()

    # Now fetch the key for both options
    #
    result = cli.run(
        project=project,
        args=[
            "--option", "test", "False", "show", "--deps", "none", "--format",
            "%{key}", "target.bst"
        ],
    )
    result.assert_success()
    master_key = result.output

    result = cli.run(
        project=project,
        args=[
            "--option", "test", "True", "show", "--deps", "none", "--format",
            "%{key}", "target.bst"
        ],
    )
    result.assert_success()
    test_key = result.output

    # Assert that the keys are different when having
    # tracked separate branches
    assert test_key != master_key
예제 #29
0
def test_push(cli, tmpdir, datafiles):
    project = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project, "target.bst") == "cached"

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare1")) as share1:

        with create_artifact_share(os.path.join(str(tmpdir),
                                                "artifactshare2")) as share2:

            # Try pushing with no remotes configured. This should fail.
            result = cli.run(project=project,
                             args=["artifact", "push", "target.bst"])
            result.assert_main_error(ErrorDomain.STREAM, None)

            # Configure bst to pull but not push from a cache and run `bst artifact push`.
            # This should also fail.
            cli.configure({
                "artifacts": {
                    "servers": [{
                        "url": share1.repo,
                        "push": False
                    }]
                }
            })
            result = cli.run(project=project,
                             args=["artifact", "push", "target.bst"])
            result.assert_main_error(ErrorDomain.STREAM, None)

            # Configure bst to push to one of the caches and run `bst artifact push`. This works.
            cli.configure({
                "artifacts": {
                    "servers": [
                        {
                            "url": share1.repo,
                            "push": False
                        },
                        {
                            "url": share2.repo,
                            "push": True
                        },
                    ]
                }
            })
            cli.run(project=project, args=["artifact", "push", "target.bst"])

            assert_not_shared(cli, share1, project, "target.bst")
            assert_shared(cli, share2, project, "target.bst")

        # Now try pushing to both

        with create_artifact_share(os.path.join(str(tmpdir),
                                                "artifactshare2")) as share2:
            cli.configure({
                "artifacts": {
                    "servers": [
                        {
                            "url": share1.repo,
                            "push": True
                        },
                        {
                            "url": share2.repo,
                            "push": True
                        },
                    ]
                }
            })
            cli.run(project=project, args=["artifact", "push", "target.bst"])

            assert_shared(cli, share1, project, "target.bst")
            assert_shared(cli, share2, project, "target.bst")
예제 #30
0
파일: filter.py 프로젝트: wjt/buildstream
def test_filter_forbid_sources(datafiles, cli):
    project = str(datafiles)
    result = cli.run(project=project, args=["build", "forbidden-source.bst"])
    result.assert_main_error(ErrorDomain.ELEMENT, "element-forbidden-sources")