コード例 #1
0
def test_ensure_misformed_project_overrides_give_sensible_errors(
        cli, datafiles):
    userconfig = {"projects": {"test": []}}
    cli.configure(userconfig)

    result = cli.run(project=datafiles, args=["show"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
コード例 #2
0
def test_source_fetch(tmpdir, cli, datafiles):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")

    cli.configure({"cachedir": cachedir})

    res = cli.run(project=project_dir,
                  args=["source", "fetch", "import-dev.bst"])
    res.assert_success()

    with dummy_context() as context:
        context.cachedir = cachedir
        # load project and sourcecache
        project = Project(project_dir, context)
        project.ensure_fully_loaded()
        cas = context.get_cascache()
        sourcecache = context.sourcecache

        element = project.load_elements(["import-dev.bst"])[0]
        element._query_source_cache()
        source = list(element.sources())[0]
        assert element._cached_sources()

        # check that the directory structures are identical
        digest = sourcecache.export(source)._get_digest()
        extractdir = os.path.join(str(tmpdir), "extract")
        cas.checkout(extractdir, digest)
        dir1 = extractdir
        dir2 = os.path.join(project_dir, "files", "dev-files")

        assert list(relative_walk(dir1)) == list(relative_walk(dir2))
コード例 #3
0
def test_source_staged(tmpdir, cli, datafiles):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")

    cli.configure({"cachedir": cachedir})

    res = cli.run(project=project_dir, args=["build", "import-bin.bst"])
    res.assert_success()

    with dummy_context() as context:
        context.cachedir = cachedir
        # load project and sourcecache
        project = Project(project_dir, context)
        project.ensure_fully_loaded()
        sourcecache = context.sourcecache
        cas = context.get_cascache()

        # now check that the source is in the refs file, this is pretty messy but
        # seems to be the only way to get the sources?
        element = project.load_elements(["import-bin.bst"])[0]
        element._query_source_cache()
        source = list(element.sources())[0]
        assert element._cached_sources()
        assert sourcecache.contains(source)

        # Extract the file and check it's the same as the one we imported
        digest = sourcecache.export(source)._get_digest()
        extractdir = os.path.join(str(tmpdir), "extract")
        cas.checkout(extractdir, digest)
        dir1 = extractdir
        dir2 = os.path.join(project_dir, "files", "bin-files")

        assert list(relative_walk(dir1)) == list(relative_walk(dir2))
コード例 #4
0
def test_staged_source_build(tmpdir, datafiles, cli):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")
    element_path = "elements"
    source_protos = os.path.join(str(tmpdir), "cache", "source_protos")
    elementsources = os.path.join(str(tmpdir), "cache", "elementsources")
    source_dir = os.path.join(str(tmpdir), "cache", "sources")

    cli.configure({"cachedir": cachedir})

    create_element_size("target.bst", project_dir, element_path, [], 10000)

    with dummy_context() as context:
        context.cachedir = cachedir
        project = Project(project_dir, context)
        project.ensure_fully_loaded()

        element = project.load_elements(["import-dev.bst"])[0]

        # check consistency of the source
        element._query_source_cache()
        assert not element._cached_sources()

    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()

    # delete artifacts check state is buildable
    cli.remove_artifact_from_cache(project_dir, "target.bst")
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "buildable"

    # delete source dir and check that state is still buildable
    shutil.rmtree(source_dir)
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "buildable"

    # build and check that no fetching was done.
    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()
    assert "Fetching" not in res.stderr

    # assert the source directory is still empty (though there may be
    # directories from staging etc.)
    files = []
    for _, _, filename in os.walk(source_dir):
        files.extend(filename)
    assert not files, files

    # Now remove the source refs and check the state
    shutil.rmtree(source_protos)
    shutil.rmtree(elementsources)
    cli.remove_artifact_from_cache(project_dir, "target.bst")
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "fetch needed"

    # Check that it now fetches from when building the target
    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()
    assert "Fetching" in res.stderr
コード例 #5
0
def test_notparallel_twice(cli, datafiles):
    project = str(datafiles)

    #
    # Explicitly configure default max-jobs using user configuration
    #
    cli.configure({"build": {"max-jobs": 2}})

    # Fetch the variables and environment of both elements, where parallel.bst depends on notparallel.bst
    result = cli.run(
        project=project,
        args=["show", "--format", "%{vars}%{env}", "parallel.bst"])
    result.assert_success()

    # Split on the empty line, which separates elements in bst show output
    groups = result.output.split("\n\n")
    assert len(groups) >= 2
    notparallel_vars = _yaml.load_data(groups[0])
    parallel_vars = _yaml.load_data(groups[1])

    # Test the first group for the expected notparallel state
    assert notparallel_vars.get_str("element-name") == "notparallel.bst"
    assert notparallel_vars.get_str("max-jobs") == "1"
    assert notparallel_vars.get_str("MAKEFLAGS") == "-j1"

    # Test the second group for the expected !notparallel state
    assert parallel_vars.get_str("element-name") == "parallel.bst"
    assert parallel_vars.get_str("max-jobs") == "2"
    assert parallel_vars.get_str("MAKEFLAGS") == "-j2"
コード例 #6
0
def test_conditional_config(cli, datafiles, target, value, expected):
    project = os.path.join(datafiles.dirname, datafiles.basename, "option-element-mask")
    cli.configure({"projects": {"test": {"options": {"debug_elements": value}}}})
    result = cli.run(project=project, silent=True, args=["show", "--deps", "none", "--format", "%{vars}", target])

    result.assert_success()
    loaded = _yaml.load_data(result.output)
    assert loaded.get_str("debug") == expected
コード例 #7
0
def test_invalid_value_config(cli, datafiles, config_option):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "option-flags")
    cli.configure({"projects": {"test": {"options": {"farm": config_option}}}})
    result = cli.run(
        project=project,
        silent=True,
        args=["show", "--deps", "none", "--format", "%{vars}", "element.bst"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA)
コード例 #8
0
ファイル: config.py プロジェクト: nanonyme/buildstream
def test_only_one(cli, datafiles, override_caches, project_caches, user_caches):
    project = os.path.join(datafiles.dirname, datafiles.basename, "only-one")

    # Produce a fake user and project config with the cache configuration.
    user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
    project_config["name"] = "test"
    project_config["min-version"] = "2.0"

    cli.configure(user_config)

    project_config_file = os.path.join(project, "project.conf")
    _yaml.roundtrip_dump(project_config, file=project_config_file)

    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    #
    # This does not happen for a simple `bst show`.
    result = cli.run(project=project, args=["artifact", "pull", "element.bst"])
    result.assert_main_error(ErrorDomain.STREAM, None)
コード例 #9
0
def test_source_checkout(tmpdir, datafiles, cli):
    project_dir = os.path.join(str(tmpdir), "project")
    element_path = "elements"
    cache_dir = os.path.join(str(tmpdir), "cache")
    source_dir = os.path.join(cache_dir, "sources")

    cli.configure({
        "cachedir": cache_dir,
    })
    target_dir = os.path.join(str(tmpdir), "target")

    repo = create_element_size("target.bst", project_dir, element_path, [],
                               100000)

    # check implicit fetching
    res = cli.run(
        project=project_dir,
        args=["source", "checkout", "--directory", target_dir, "target.bst"])
    res.assert_success()
    assert "Fetching" in res.stderr

    # remove the directory and check source checkout works with sources only in
    # the CAS
    shutil.rmtree(repo.repo)
    shutil.rmtree(target_dir)
    shutil.rmtree(source_dir)

    res = cli.run(
        project=project_dir,
        args=["source", "checkout", "--directory", target_dir, "target.bst"])
    res.assert_success()
    assert "Fetching" not in res.stderr

    # remove the CAS and check it doesn't work again
    shutil.rmtree(target_dir)
    shutil.rmtree(os.path.join(cache_dir, "cas"))

    res = cli.run(
        project=project_dir,
        args=["source", "checkout", "--directory", target_dir, "target.bst"])
    res.assert_task_error(ErrorDomain.SOURCE, None)
コード例 #10
0
ファイル: workspace.py プロジェクト: nanonyme/buildstream
def test_workspace_open_no_source_push(tmpdir, datafiles, cli):
    project_dir = os.path.join(str(tmpdir), "project")
    element_path = "elements"
    cache_dir = os.path.join(str(tmpdir), "cache")
    share_dir = os.path.join(str(tmpdir), "share")
    workspace = os.path.join(cli.directory, "workspace")

    with create_artifact_share(share_dir) as share:
        cli.configure(
            {
                "cachedir": cache_dir,
                "scheduler": {"pushers": 1},
                "source-caches": {
                    "servers": [
                        {
                            "url": share.repo,
                            "push": True,
                        }
                    ]
                },
            }
        )

        # Fetch as in previous test and check it pushes the source
        create_element_size("target.bst", project_dir, element_path, [], 10000)
        res = cli.run(project=project_dir, args=["build", "target.bst"])
        res.assert_success()
        assert "Fetching" in res.stderr
        assert "Pushed source" in res.stderr

        # clear the cas and open a workspace
        shutil.rmtree(os.path.join(cache_dir, "cas"))
        res = cli.run(project=project_dir, args=["workspace", "open", "target.bst", "--directory", workspace])
        res.assert_success()

        # Check that this time it does not push the sources
        res = cli.run(project=project_dir, args=["build", "target.bst"])
        res.assert_success()
        assert "Pushed source" not in res.stderr
コード例 #11
0
ファイル: workspace.py プロジェクト: nanonyme/buildstream
def test_workspace_source_fetch(tmpdir, datafiles, cli):
    project_dir = os.path.join(str(tmpdir), "project")
    element_path = "elements"
    source_dir = os.path.join(str(tmpdir), "cache", "sources")
    workspace = os.path.join(cli.directory, "workspace")

    cli.configure({"cachedir": os.path.join(str(tmpdir), "cache")})

    create_element_size("target.bst", project_dir, element_path, [], 10000)
    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()
    assert "Fetching" in res.stderr

    # remove the original sources
    shutil.rmtree(source_dir)

    # Open a workspace and check that fetches the original sources
    res = cli.run(project=project_dir, args=["workspace", "open", "target.bst", "--directory", workspace])
    res.assert_success()
    assert "Fetching" in res.stderr

    assert os.listdir(workspace) != []
コード例 #12
0
def test_missing_certs(cli, datafiles, config_key, config_value):
    project = os.path.join(datafiles.dirname, datafiles.basename,
                           "missing-certs")

    cli.configure({
        "remote-execution": {
            "execution-service": {
                "url": "http://localhost:8088"
            },
            "storage-service": {
                "url": "http://charactron:11001",
                config_key: config_value,
            },
        },
    })

    # Use `pull` here to ensure we try to initialize the remotes, triggering the error
    #
    # This does not happen for a simple `bst show`.
    result = cli.run(project=project, args=["show", "element.bst"])
    result.assert_main_error(ErrorDomain.LOAD, LoadErrorReason.INVALID_DATA,
                             "Your config is missing")