コード例 #1
0
ファイル: pull.py プロジェクト: tom--pollard/buildstream
def test_dynamic_build_plan(cli, tmpdir, datafiles):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache directory
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try to rebuild target
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that target and runtime dependency were pulled
        # but build dependency was not pulled as it wasn't needed
        # (dynamic build plan).
        assert target in result.get_pulled_elements()
        assert runtime_dep in result.get_pulled_elements()
        assert build_dep not in result.get_pulled_elements()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        assert states[target] == "cached"
        assert states[runtime_dep] == "cached"
        assert states[build_dep] != "cached"
コード例 #2
0
def test_push_already_cached(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["build", "target.bst"])

        result.assert_success()
        assert "SKIPPED Push" not in result.stderr

        result = cli.run(project=project, args=["artifact", "push", "target.bst"])

        result.assert_success()
        assert not result.get_pushed_elements(), "No elements should have been pushed since the cache was populated"
        assert "INFO    Remote ({}) already has ".format(share.repo) in result.stderr
        assert "SKIPPED Push" in result.stderr
コード例 #3
0
def test_push_cross_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")

    generate_junction(tmpdir, subproject_path, junction_path, store_ref=True)

    result = cli.run(project=project, args=["build", "junction.bst:import-etc.bst"])
    result.assert_success()

    assert cli.get_element_state(project, "junction.bst:import-etc.bst") == "cached"

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True}],}})
        cli.run(project=project, args=["artifact", "push", "junction.bst:import-etc.bst"])

        cache_key = cli.get_element_key(project, "junction.bst:import-etc.bst")
        assert share.get_artifact(cli.get_artifact_name(project, "subtest", "import-etc.bst", cache_key=cache_key))
コード例 #4
0
def test_source_artifact_caches(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_path = os.path.join(project_dir, "elements")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cachedir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        create_element_size("repo.bst", project_dir, element_path, [], 10000)

        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Pushed source " in res.stderr
        assert "Pushed artifact " in res.stderr

        # delete local sources and artifacts and check it pulls them
        shutil.rmtree(os.path.join(cachedir, "cas"))
        shutil.rmtree(os.path.join(cachedir, "sources"))

        # this should just fetch the artifacts
        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Pulled artifact " in res.stderr
        assert "Pulled source " not in res.stderr
コード例 #5
0
ファイル: push.py プロジェクト: cphang99/buildstream
def test_push_update_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path":
            "elements",
            "min-version":
            "2.0",
            "plugins": [{
                "origin": "local",
                "path": "plugins",
                "elements": ["randomelement"]
            }],
        },
    )

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        # Build the element and push the artifact
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Now delete the artifact and ensure it is not in the cache
        result = cli.run(project=project,
                         args=["artifact", "delete", "random.bst"])
        assert cli.get_element_state(project, "random.bst") != "cached"

        # Now rebuild the element. Reset config to disable pulling.
        cli.config = None
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert cli.get_element_state(project, "random.bst") == "cached"

        # Push the new build
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project,
                         args=["artifact", "push", "random.bst"])
        assert result.get_pushed_elements() == ["random.bst"]
コード例 #6
0
ファイル: push.py プロジェクト: cphang99/buildstream
def test_push_deps(cli, tmpdir, datafiles, deps, expected_states):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Configure artifact share
        cli.configure({
            #
            # FIXME: This test hangs "sometimes" if we allow
            #        concurrent push.
            #
            #        It's not too bad to ignore since we're
            #        using the local artifact cache functionality
            #        only, but it should probably be fixed.
            #
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
        })

        # Now try bst artifact push all the deps
        result = cli.run(project=project,
                         args=["artifact", "push", target, "--deps", deps])
        result.assert_success()

        # And finally assert that the selected artifacts are in the share
        states = []
        for element in (target, build_dep, runtime_dep):
            is_cached = share.get_artifact(
                cli.get_artifact_name(project, "test", element)) is not None
            states.append(is_cached)
        assert states == expected_states
コード例 #7
0
ファイル: push.py プロジェクト: tom--pollard/buildstream
def test_push_message(tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    artifactshare = os.path.join(str(tmpdir), "artifactshare")
    with create_artifact_share(artifactshare) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {"pushers": 1},
            "artifacts": {"url": share.repo, "push": True,},
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)

        with dummy_context(config=user_config_file) as context:
            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Manually setup the artifact remote
            artifactcache.setup_remotes(use_config=True)
            artifactcache.initialize_remotes()
            assert artifactcache.has_push_remotes()

            command = remote_execution_pb2.Command(
                arguments=["/usr/bin/gcc", "--help"],
                working_directory="/buildstream-build",
                output_directories=["/buildstream-install"],
            )

            # Push the message object
            command_digest = artifactcache.push_message(project, command)
            message_hash, message_size = command_digest.hash, command_digest.size_bytes

        assert message_hash and message_size
        message_digest = remote_execution_pb2.Digest(hash=message_hash, size_bytes=message_size)
        assert share.has_object(message_digest)
コード例 #8
0
def test_push_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
        assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Configure artifact share
        cli.configure(
            {
                #
                # FIXME: This test hangs "sometimes" if we allow
                #        concurrent push.
                #
                #        It's not too bad to ignore since we're
                #        using the local artifact cache functionality
                #        only, but it should probably be fixed.
                #
                "scheduler": {"pushers": 1},
                "artifacts": {"url": share.repo, "push": True,},
            }
        )

        # Now try bst artifact push all the deps
        result = cli.run(project=project, args=["artifact", "push", artifact_ref])
        result.assert_success()

        # And finally assert that all the artifacts are in the share
        #
        # Note that assert shared tests that an element is shared by obtaining
        # the artifact ref and asserting that the path exists in the share
        assert_shared(cli, share, project, element)
コード例 #9
0
ファイル: pull.py プロジェクト: abderrahim/buildstream
def test_push_pull_cross_junction(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        subproject_path = os.path.join(project, "files", "sub-project")
        junction_path = os.path.join(project, "elements", "junction.bst")

        generate_junction(tmpdir,
                          subproject_path,
                          junction_path,
                          store_ref=True)

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project,
                         args=["build", "junction.bst:import-etc.bst"])
        result.assert_success()
        assert cli.get_element_state(project,
                                     "junction.bst:import-etc.bst") == "cached"

        cache_dir = os.path.join(project, "cache", "cas")
        shutil.rmtree(cache_dir)
        artifact_dir = os.path.join(project, "cache", "artifacts")
        shutil.rmtree(artifact_dir)

        assert cli.get_element_state(
            project, "junction.bst:import-etc.bst") == "buildable"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "junction.bst:import-etc.bst"])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project,
                                     "junction.bst:import-etc.bst") == "cached"
コード例 #10
0
def test_push_after_rebuild(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)

    generate_project(
        project,
        config={
            "element-path":
            "elements",
            "min-version":
            "2.0",
            "plugins": [{
                "origin": "local",
                "path": "plugins",
                "elements": ["randomelement"]
            }],
        },
    )

    # First build the element
    result = cli.run(project=project, args=["build", "random.bst"])
    result.assert_success()
    assert cli.get_element_state(project, "random.bst") == "cached"

    # Delete the artifact blobs but keep the artifact proto,
    # i.e., now we have an incomplete artifact
    casdir = os.path.join(cli.directory, "cas")
    shutil.rmtree(casdir)
    assert cli.get_element_state(project, "random.bst") != "cached"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        # Now rebuild the element and push it
        result = cli.run(project=project, args=["build", "random.bst"])
        result.assert_success()
        assert result.get_pushed_elements() == ["random.bst"]
        assert cli.get_element_state(project, "random.bst") == "cached"
コード例 #11
0
def test_partial_checkout_fail(tmpdir, datafiles, cli):
    project = str(datafiles)
    build_elt = "import-bin.bst"
    checkout_dir = os.path.join(str(tmpdir), "checkout")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        res = cli.run(project=project,
                      args=[
                          "artifact", "checkout", "--pull", build_elt,
                          "--directory", checkout_dir
                      ])
        res.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
        assert re.findall(
            r"Remote \((\S+)\) does not have artifact (\S+) cached",
            res.stderr)
コード例 #12
0
ファイル: default_target.py プロジェクト: wjt/buildstream
def test_default_target_push_pull(cli, tmpdir, datafiles):
    project = str(datafiles)
    project_path = os.path.join(project, "project.conf")
    target = "dummy_1.bst"

    # Set a default target
    project_conf = {
        "name": "test-default-target",
        "min-version": "2.0",
        "element-path": "elements",
        "defaults": {
            "targets": [target]
        },
    }
    _yaml.roundtrip_dump(project_conf, project_path)

    # Build the target
    result = cli.run(project=project, args=["build"])
    result.assert_success()
    assert cli.get_element_state(project, target) == "cached"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Push the artifacts
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["artifact", "push"])
        result.assert_success()

        # Delete local artifacts
        # Note that `artifact delete` does not support default targets
        result = cli.run(project=project, args=["artifact", "delete", target])
        result.assert_success()

        # Target should be buildable now, and we should be able to pull it
        assert cli.get_element_state(project, target) == "buildable"
        result = cli.run(project=project, args=["artifact", "pull"])
        assert cli.get_element_state(project, target) == "cached"
コード例 #13
0
def test_large_directory(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Number of files chosen to ensure the complete list of digests exceeds
    # our 1 MB gRPC message limit. I.e., test message splitting.
    MAX_MESSAGE_LENGTH = 1024 * 1024
    NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1

    large_directory_dir = os.path.join(project, "files", "large-directory")
    os.mkdir(large_directory_dir)
    for i in range(NUM_FILES):
        with open(os.path.join(large_directory_dir, str(i)),
                  "w",
                  encoding="utf-8") as f:
            # The files need to have different content as we want different digests.
            f.write(str(i))

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure bst to push to the artifact share
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # Enforce 1 MB gRPC message limit
        with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
            # Build and push
            result = cli.run(project=project,
                             args=["build", "import-large-directory.bst"])
            result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project,
                                     "import-large-directory.bst") == "cached"

        # Assert that the push was successful
        assert_shared(cli, share, project, "import-large-directory.bst")
コード例 #14
0
def test_source_cache_empty_artifact_cache(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_path = os.path.join(project_dir, "elements")

    with create_artifact_share(os.path.join(str(tmpdir), "share")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "url": share.repo,
                "push": True,
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": cachedir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        create_element_size("repo.bst", project_dir, element_path, [], 10000)

        res = cli.run(project=project_dir, args=["source", "push", "repo.bst"])
        res.assert_success()
        assert "Pushed source " in res.stderr

        # delete local sources and check it pulls sources, builds
        # and then pushes the artifacts
        shutil.rmtree(os.path.join(cachedir, "cas"))
        shutil.rmtree(os.path.join(cachedir, "sources"))

        res = cli.run(project=project_dir, args=["build", "repo.bst"])
        res.assert_success()
        assert "Remote ({}) does not have artifact ".format(
            share.repo) in res.stderr
        assert "Pulled source" in res.stderr
        assert "Caching artifact" in res.stderr
        assert "Pushed artifact" in res.stderr
コード例 #15
0
ファイル: pull.py プロジェクト: tom--pollard/buildstream
def test_pull_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure({"artifacts": {"url": share.repo, "push": True}})

        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test",
                                    os.path.splitext(element)[0], cache_key)
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Assert that the target is shared (note that assert shared will use the artifact name)
        assert_shared(cli, share, project, element)

        # Now we've pushed, remove the local cache
        shutil.rmtree(os.path.join(local_cache, "artifacts"))

        # Assert that nothing is cached locally anymore
        assert not os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", artifact_ref])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))
コード例 #16
0
ファイル: buildcheckout.py プロジェクト: nanonyme/buildstream
def test_non_strict_checkout_uncached(datafiles, cli, tmpdir):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    element_name = "target.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})

        # Attempt to checkout an uncached artifact with remote artifact server
        # configured but pull disabled.
        result = cli.run(project=project,
                         args=[
                             "--no-strict", "artifact", "checkout",
                             element_name, "--directory", checkout
                         ])
        result.assert_main_error(ErrorDomain.STREAM,
                                 "uncached-checkout-attempt")
コード例 #17
0
ファイル: push.py プロジェクト: wjt/buildstream
def test_push_pull(cli, datafiles, tmpdir):
    project_dir = str(datafiles)
    cache_dir = os.path.join(str(tmpdir), "cache")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        # create repo to pull from
        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        res = cli.run(project=project_dir, args=["build", "push.bst"])
        res.assert_success()

        # remove local cache dir, and repo files and check it all works
        shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)
        shutil.rmtree(repo.repo)

        # check it's pulls from the share
        res = cli.run(project=project_dir, args=["build", "push.bst"])
        res.assert_success()
コード例 #18
0
def test_push_after_pull(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
        os.path.join(str(tmpdir), "artifactshare2")
    ) as share2:

        # Set the scene: share1 has the artifact, share2 does not.
        #
        cli.configure(
            {"artifacts": {"url": share1.repo, "push": True},}
        )

        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        cli.remove_artifact_from_cache(project, "target.bst")

        assert_shared(cli, share1, project, "target.bst")
        assert_not_shared(cli, share2, project, "target.bst")
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now run the build again. Correct `bst build` behaviour is to download the
        # artifact from share1 but not push it back again.
        #
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert result.get_pulled_elements() == ["target.bst"]
        assert result.get_pushed_elements() == []

        # Delete the artifact locally again.
        cli.remove_artifact_from_cache(project, "target.bst")

        # Now we add share2 into the mix as a second push remote. This time,
        # `bst build` should push to share2 after pulling from share1.
        cli.configure({"artifacts": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]})
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert result.get_pulled_elements() == ["target.bst"]
        assert result.get_pushed_elements() == ["target.bst"]
コード例 #19
0
ファイル: pull.py プロジェクト: tom--pollard/buildstream
def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure({"artifacts": {"url": share.repo, "push": True}})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "--deps", deps, target])
        result.assert_success()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        states_flattended = (states[target], states[build_dep],
                             states[runtime_dep])
        assert states_flattended == expected_states
コード例 #20
0
def test_push_fails(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Configure bst to be able to push to the share
        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})

        # First ensure that the target is *NOT* cache
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now try and push the target
        result = cli.run(project=project, args=["artifact", "push", "target.bst"])
        result.assert_main_error(ErrorDomain.STREAM, None)

        assert "Push failed: target.bst is not cached" in result.stderr

        # Now ensure that deps are also not cached
        assert cli.get_element_state(project, "import-bin.bst") != "cached"
        assert cli.get_element_state(project, "import-dev.bst") != "cached"
        assert cli.get_element_state(project, "compose-all.bst") != "cached"
コード例 #21
0
def test_artifact_too_large(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Mock a file system with 5 MB total space
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(5e6)) as share:

        # Configure bst to push to the remote cache
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Create and push a 3MB element
        create_element_size("small_element.bst", project, element_path, [], int(3e6))
        result = cli.run(project=project, args=["build", "small_element.bst"])
        result.assert_success()

        # Create and try to push a 6MB element.
        create_element_size("large_element.bst", project, element_path, [], int(6e6))
        result = cli.run(project=project, args=["build", "large_element.bst"])
        # This should fail; the server will refuse to store the CAS
        # blobs for the artifact, and then fail to find the files for
        # the uploaded artifact proto.
        #
        # FIXME: This should be extremely uncommon in practice, since
        # the artifact needs to be at least half the cache size for
        # this to happen. Nonetheless, a nicer error message would be
        # nice (perhaps we should just disallow uploading artifacts
        # that large).
        result.assert_main_error(ErrorDomain.STREAM, None)

        # Ensure that the small artifact is still in the share
        states = cli.get_element_states(project, ["small_element.bst", "large_element.bst"])
        assert states["small_element.bst"] == "cached"
        assert_shared(cli, share, project, "small_element.bst")

        # Ensure that the artifact is cached locally but NOT remotely
        assert states["large_element.bst"] == "cached"
        assert_not_shared(cli, share, project, "large_element.bst")
コード例 #22
0
def test_push_artifacts_all_deps_fails(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "checkout-deps.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test", os.path.splitext(element)[0], cache_key)
        assert os.path.exists(os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Configure artifact share
        cli.configure(
            {
                #
                # FIXME: This test hangs "sometimes" if we allow
                #        concurrent push.
                #
                #        It's not too bad to ignore since we're
                #        using the local artifact cache functionality
                #        only, but it should probably be fixed.
                #
                "scheduler": {"pushers": 1},
                "artifacts": {"url": share.repo, "push": True,},
            }
        )

        # Now try bst artifact push all the deps
        result = cli.run(project=project, args=["artifact", "push", "--deps", "all", artifact_ref])
        result.assert_main_error(ErrorDomain.STREAM, None)

        assert "Error: '--deps all' is not supported for artifact refs" in result.stderr
コード例 #23
0
ファイル: artifact_delete.py プロジェクト: wjt/buildstream
def test_artifact_delete_pulled_artifact_without_buildtree(
        cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete and then pull the artifact (without its buildtree)
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
        result = cli.run(project=project, args=["artifact", "pull", element])
        result.assert_success()
        assert cli.get_element_state(project, element) == "cached"

        # Now delete it again (it should have been pulled without the buildtree, but
        # a digest of the buildtree is pointed to in the artifact's metadata
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"
コード例 #24
0
ファイル: push.py プロジェクト: wjt/buildstream
def test_push_fail(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    cache_dir = os.path.join(str(tmpdir), "cache")

    # set up config with remote that we'll take down
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        remote = share.repo
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

    # create repo to pull from
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(os.path.join(project_dir, "files"))
    element_path = os.path.join(project_dir, "elements")
    element_name = "push.bst"
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # build and check that it fails to set up the remote
    res = cli.run(project=project_dir, args=["build", "push.bst"])
    res.assert_success()

    assert "Failed to initialize remote {}".format(remote) in res.stderr
    assert "Pushing" not in res.stderr
    assert "Pushed" not in res.stderr
コード例 #25
0
def test_push_artifact_glob(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test",
                                    os.path.splitext(element)[0], cache_key)
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Configure artifact share
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        # Run bst artifact push with a wildcard, there is only one artifact
        # matching "test/target/*", even though it can be accessed both by it's
        # strong and weak key.
        #
        result = cli.run(project=project,
                         args=["artifact", "push", "test/target/*"])
        result.assert_success()
        assert len(result.get_pushed_elements()) == 1
コード例 #26
0
def test_push_failed_missing_shell(cli, tmpdir, datafiles, on_error):
    """Test that we can upload a built artifact that didn't have a valid shell inside.

    When we don't have a valid shell, the artifact will be empty, not even the root directory.
    This ensures we handle the case of an entirely empty artifact correctly.
    """
    if on_error == "quit":
        pytest.xfail("https://gitlab.com/BuildStream/buildstream/issues/534")

    project = str(datafiles)
    element_path = os.path.join(project, "elements", "element.bst")

    # Write out our test target
    element = {
        "kind": "script",
        "config": {
            "commands": [
                "false",
                # Ensure unique cache key for different test variants
                'TEST="{}"'.format(os.environ.get("PYTEST_CURRENT_TEST")),
            ],
        },
    }
    _yaml.roundtrip_dump(element, element_path)

    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as share:
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Build the element, continuing to finish active jobs on error.
        result = cli.run(project=project, args=["--on-error={}".format(on_error), "build", "element.bst"])
        result.assert_main_error(ErrorDomain.STREAM, None)

        # This element should have failed
        assert cli.get_element_state(project, "element.bst") == "failed"
        # This element should have been pushed to the remote
        assert share.get_artifact(cli.get_artifact_name(project, "test", "element.bst"))
コード例 #27
0
def test_fetch_fallback(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    element_name, repo, ref = create_test_element(tmpdir, project_dir)
    cache_dir = os.path.join(str(tmpdir), "cache")

    # use artifact cache for sources for now, they should work the same
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        with context_with_source_cache(cli, cache_dir, share,
                                       tmpdir) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements([element_name])[0]
            element._query_source_cache()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            assert not share.get_source_proto(source._get_source_name())
            assert not os.path.exists(os.path.join(cache_dir, "sources"))

            # Now check if it falls back to the source fetch method.
            res = cli.run(project=project_dir,
                          args=["source", "fetch", element_name])
            res.assert_success()
            brief_key = source._get_brief_display_key()
            assert ("Remote source service ({}) does not have source {} cached"
                    .format(share.repo, brief_key)) in res.stderr
            assert ("SUCCESS Fetching {}".format(
                repo.source_config(ref=ref)["url"])) in res.stderr

            # Check that the source in both in the source dir and the local CAS
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements([element_name])[0]
            element._query_source_cache()
            assert element._cached_sources()
コード例 #28
0
def test_push(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project_dir, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project_dir, "target.bst") == "cached"

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        element_key = _push(cli, rootcache_dir, project_dir, user_config_file,
                            "target.bst")
        assert share.get_artifact(
            cli.get_artifact_name(project_dir,
                                  "test",
                                  "target.bst",
                                  cache_key=element_key))
コード例 #29
0
def test_non_strict_pull_build_strict_checkout(datafiles, cli, tmpdir):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    # Target with at least one (indirect) build-only dependency
    element_name = "target.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure({"artifacts": {"servers": [{"url": share.repo}]}})

        # First build it in non-strict mode with an artifact server configured.
        # With this configuration BuildStream will attempt to pull the build-only
        # dependencies after attempting to pull the target element. This means
        # that the cache key calculation of the target element has to be deferred
        # until the pull attempt of the build-only dependencies, exercising a
        # different code path.
        # As this is a clean build from scratch, the result and also the cache keys
        # should be identical to a build in strict mode.
        result = cli.run(project=project,
                         args=["--no-strict", "build", element_name])
        result.assert_success()

        # Now check it out in strict mode.
        # This verifies that the clean build in non-strict mode produced an artifact
        # matching the strict cache key.
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", element_name,
                             "--directory", checkout
                         ])
        result.assert_success()

        # Check that the executable hello file is found in the checkout
        filename = os.path.join(checkout, "usr", "bin", "hello")
        assert os.path.exists(filename)
コード例 #30
0
def test_build_checkout(cli, tmpdir, datafiles):
    cachedir = os.path.join(str(tmpdir), "cache")
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "remote-cache")) as remote_cache:
        # Enable remote cache
        cli.configure(
            {"cache": {
                "storage-service": {
                    "url": remote_cache.repo
                }
            }})

        # First build it
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        # Discard the local CAS cache
        shutil.rmtree(str(os.path.join(cachedir, "cas")))

        # Now check it out, this should automatically fetch the necessary blobs
        # from the remote cache
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", "target.bst",
                             "--directory", checkout
                         ])
        result.assert_success()

        # Check that the executable hello file is found in the checkout
        filename = os.path.join(checkout, "usr", "bin", "hello")
        assert os.path.exists(filename)

        filename = os.path.join(checkout, "usr", "include", "pony.h")
        assert os.path.exists(filename)