Exemplo n.º 1
0
def test_staged_source_build(tmpdir, datafiles, cli):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")
    element_path = "elements"
    source_protos = os.path.join(str(tmpdir), "cache", "source_protos")
    elementsources = os.path.join(str(tmpdir), "cache", "elementsources")
    source_dir = os.path.join(str(tmpdir), "cache", "sources")

    cli.configure({"cachedir": cachedir})

    create_element_size("target.bst", project_dir, element_path, [], 10000)

    with dummy_context() as context:
        context.cachedir = cachedir
        project = Project(project_dir, context)
        project.ensure_fully_loaded()

        element = project.load_elements(["import-dev.bst"])[0]

        # check consistency of the source
        element._query_source_cache()
        assert not element._cached_sources()

    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()

    # delete artifacts check state is buildable
    cli.remove_artifact_from_cache(project_dir, "target.bst")
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "buildable"

    # delete source dir and check that state is still buildable
    shutil.rmtree(source_dir)
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "buildable"

    # build and check that no fetching was done.
    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()
    assert "Fetching from" not in res.stderr

    # assert the source directory is still empty (though there may be
    # directories from staging etc.)
    files = []
    for _, _, filename in os.walk(source_dir):
        files.extend(filename)
    assert files == []

    # Now remove the source refs and check the state
    shutil.rmtree(source_protos)
    shutil.rmtree(elementsources)
    cli.remove_artifact_from_cache(project_dir, "target.bst")
    states = cli.get_element_states(project_dir, ["target.bst"])
    assert states["target.bst"] == "fetch needed"

    # Check that it now fetches from when building the target
    res = cli.run(project=project_dir, args=["build", "target.bst"])
    res.assert_success()
    assert "Fetching from" in res.stderr
Exemplo n.º 2
0
def test_source_staged(tmpdir, cli, datafiles):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")

    cli.configure({"cachedir": cachedir})

    res = cli.run(project=project_dir, args=["build", "import-bin.bst"])
    res.assert_success()

    with dummy_context() as context:
        context.cachedir = cachedir
        # load project and sourcecache
        project = Project(project_dir, context)
        project.ensure_fully_loaded()
        sourcecache = context.sourcecache
        cas = context.get_cascache()

        # now check that the source is in the refs file, this is pretty messy but
        # seems to be the only way to get the sources?
        element = project.load_elements(["import-bin.bst"])[0]
        element._query_source_cache()
        source = list(element.sources())[0]
        assert element._cached_sources()
        assert sourcecache.contains(source)

        # Extract the file and check it's the same as the one we imported
        digest = sourcecache.export(source)._get_digest()
        extractdir = os.path.join(str(tmpdir), "extract")
        cas.checkout(extractdir, digest)
        dir1 = extractdir
        dir2 = os.path.join(project_dir, "files", "bin-files")

        assert list(relative_walk(dir1)) == list(relative_walk(dir2))
Exemplo n.º 3
0
def test_artifact_cache_with_missing_capabilities_is_skipped(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_dummy_artifact_share() as share:
        # Configure artifact share
        cache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {"pushers": 1},
            "source-caches": {"servers": [{"url": share.repo,}]},
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)

        with dummy_context(config=user_config_file) as context:
            # Load the project
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)

            # Create a local artifact cache handle
            sourcecache = context.sourcecache

            assert (
                not sourcecache.has_fetch_remotes()
            ), "System didn't realize the source cache didn't support BuildStream"
Exemplo n.º 4
0
def _push(cli, cache_dir, project_dir, config_file, target):
    with dummy_context(config=config_file) as context:
        # Load the project manually
        project = Project(project_dir, context)
        project.ensure_fully_loaded()

        # Assert that the element's artifact is cached
        element = project.load_elements(["target.bst"])[0]
        element_key = cli.get_element_key(project_dir, "target.bst")
        assert cli.artifact.is_cached(cache_dir, element, element_key)

        # Create a local artifact cache handle
        artifactcache = context.artifactcache

        # Ensure the element's artifact memeber is initialised
        # This is duplicated from Pipeline.resolve_elements()
        # as this test does not use the cli frontend.
        for e in element._dependencies(_Scope.ALL):
            e._initialize_state()

        # Initialize remotes
        context.initialize_remotes(True, True, None, None)

        assert artifactcache.has_push_remotes(plugin=element), "No remote configured for element target.bst"
        assert element._push(), "Push operation failed"

    return element_key
Exemplo n.º 5
0
def test_artifact_cache_precedence(tmpdir, override_caches, project_caches, user_caches):
    # Produce a fake user and project config with the cache configuration.
    user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
    project_config["name"] = "test"
    project_config["min-version"] = "2.0"

    project_dir = tmpdir.mkdir("project")
    project_config_file = str(project_dir.join("project.conf"))
    _yaml.roundtrip_dump(project_config, file=project_config_file)

    with runcli.configured(str(tmpdir), user_config) as user_config_file, dummy_context(
        config=user_config_file
    ) as context:
        project = Project(str(project_dir), context)
        project.ensure_fully_loaded()

        # Check the specs which the artifact cache thinks are configured
        context.initialize_remotes(True, True, None, None)
        artifactcache = context.artifactcache
        parsed_cache_specs = artifactcache._project_specs[project.name]

        # Verify that it was correctly read.
        expected_cache_specs = list(_deduplicate(override_caches or user_caches))
        expected_cache_specs = list(_deduplicate(expected_cache_specs + project_caches))
        assert parsed_cache_specs == expected_cache_specs
Exemplo n.º 6
0
def test_artifact_cache_precedence(tmpdir, override_caches, project_caches,
                                   user_caches):
    # Produce a fake user and project config with the cache configuration.
    user_config, project_config = configure_remote_caches(
        override_caches, project_caches, user_caches)
    project_config["name"] = "test"
    project_config["min-version"] = "2.0"

    user_config_file = str(tmpdir.join("buildstream.conf"))
    _yaml.roundtrip_dump(user_config, file=user_config_file)

    project_dir = tmpdir.mkdir("project")
    project_config_file = str(project_dir.join("project.conf"))
    _yaml.roundtrip_dump(project_config, file=project_config_file)

    with dummy_context(config=user_config_file) as context:
        project = Project(str(project_dir), context)
        project.ensure_fully_loaded()

        # Use the helper from the artifactcache module to parse our configuration.
        parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(
            context, project)

        # Verify that it was correctly read.
        expected_cache_specs = list(
            _deduplicate(
                itertools.chain(override_caches, project_caches, user_caches)))
        assert parsed_cache_specs == expected_cache_specs
Exemplo n.º 7
0
def _push(cli, cache_dir, project_dir, config_file, target):
    with dummy_context(config=config_file) as context:
        # Load the project manually
        project = Project(project_dir, context)
        project.ensure_fully_loaded()

        # Assert that the element's artifact is cached
        element = project.load_elements(["target.bst"])[0]
        element_key = cli.get_element_key(project_dir, "target.bst")
        assert cli.artifact.is_cached(cache_dir, element, element_key)

        # Create a local artifact cache handle
        artifactcache = context.artifactcache

        # Initialize remotes
        context.initialize_remotes(True, True, None, None)

        # Query local cache
        element._load_artifact(pull=False)

        assert artifactcache.has_push_remotes(
            plugin=element), "No remote configured for element target.bst"
        assert element._push(), "Push operation failed"

    return element_key
Exemplo n.º 8
0
def test_source_fetch(tmpdir, cli, datafiles):
    project_dir = os.path.join(datafiles.dirname, datafiles.basename,
                               "project")
    cachedir = os.path.join(str(tmpdir), "cache")

    cli.configure({"cachedir": cachedir})

    res = cli.run(project=project_dir,
                  args=["source", "fetch", "import-dev.bst"])
    res.assert_success()

    with dummy_context() as context:
        context.cachedir = cachedir
        # load project and sourcecache
        project = Project(project_dir, context)
        project.ensure_fully_loaded()
        cas = context.get_cascache()
        sourcecache = context.sourcecache

        element = project.load_elements(["import-dev.bst"])[0]
        element._query_source_cache()
        source = list(element.sources())[0]
        assert element._cached_sources()

        # check that the directory structures are identical
        digest = sourcecache.export(source)._get_digest()
        extractdir = os.path.join(str(tmpdir), "extract")
        cas.checkout(extractdir, digest)
        dir1 = extractdir
        dir2 = os.path.join(project_dir, "files", "dev-files")

        assert list(relative_walk(dir1)) == list(relative_walk(dir2))
Exemplo n.º 9
0
def create_pipeline(tmpdir, basedir, target):
    with dummy_context() as context:
        context.deploydir = os.path.join(str(tmpdir), "deploy")
        context.casdir = os.path.join(str(tmpdir), "cas")
        project = Project(basedir, context)

        pipeline = Pipeline(context, project, None)
        (targets,) = pipeline.load([(target,)])
        yield targets
Exemplo n.º 10
0
def make_includes(basedir):
    _yaml.roundtrip_dump({
        "name": "test",
        "min-version": "2.0"
    }, os.path.join(basedir, "project.conf"))
    with dummy_context() as context:
        project = Project(basedir, context)
        loader = project.loader
        yield Includes(loader)
Exemplo n.º 11
0
def test_source_push(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        # get the source object
        with dummy_context(config=user_config_file) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements(["push.bst"])[0]
            element._initialize_state()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            # check we don't have it in the current cache
            assert not share.get_source_proto(source._get_source_name())

            # build the element, this should fetch and then push the source to the
            # remote
            res = cli.run(project=project_dir, args=["build", "push.bst"])
            res.assert_success()
            assert "Pushed source" in res.stderr

            # check that we've got the remote locally now
            sourcecache = context.sourcecache
            assert sourcecache.contains(source)

            # check that the remote CAS now has it
            digest = sourcecache.export(source)._get_digest()
            assert share.has_object(digest)
Exemplo n.º 12
0
def context_with_source_cache(cli, cache, share, tmpdir):
    user_config_file = str(tmpdir.join("buildstream.conf"))
    user_config = {
        "scheduler": {"pushers": 1},
        "source-caches": {"url": share.repo,},
        "cachedir": cache,
    }
    _yaml.roundtrip_dump(user_config, file=user_config_file)
    cli.configure(user_config)

    with dummy_context(config=user_config_file) as context:
        yield context
Exemplo n.º 13
0
def test_push_message(tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    artifactshare = os.path.join(str(tmpdir), "artifactshare")
    with create_artifact_share(artifactshare) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)

        with dummy_context(config=user_config_file) as context:
            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)
            assert artifactcache.has_push_remotes()

            command = remote_execution_pb2.Command(
                arguments=["/usr/bin/gcc", "--help"],
                working_directory="/buildstream-build",
                output_directories=["/buildstream-install"],
            )

            # Push the message object
            command_digest = artifactcache.push_message(project, command)
            message_hash, message_size = command_digest.hash, command_digest.size_bytes

        assert message_hash and message_size
        message_digest = remote_execution_pb2.Digest(hash=message_hash,
                                                     size_bytes=message_size)
        assert share.has_object(message_digest)
Exemplo n.º 14
0
def test_paths_for_artifact_config_are_expanded(tmpdir, monkeypatch,
                                                artifacts_config,
                                                in_user_config):
    # Produce a fake user and project config with the cache configuration.
    # user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
    # project_config['name'] = 'test'

    monkeypatch.setenv("HOME", str(tmpdir.join("homedir")))

    project_config = {"name": "test", "min-version": "2.0"}
    user_config = {}
    if in_user_config:
        user_config["artifacts"] = {"servers": artifacts_config}
    else:
        project_config["artifacts"] = artifacts_config

    user_config_file = str(tmpdir.join("buildstream.conf"))
    _yaml.roundtrip_dump(user_config, file=user_config_file)

    project_dir = tmpdir.mkdir("project")
    project_config_file = str(project_dir.join("project.conf"))
    _yaml.roundtrip_dump(project_config, file=project_config_file)

    with dummy_context(config=user_config_file) as context:
        project = Project(str(project_dir), context)
        project.ensure_fully_loaded()

        # Check the specs which the artifact cache thinks are configured
        context.initialize_remotes(True, True, None, None)
        artifactcache = context.artifactcache
        parsed_cache_specs = artifactcache._project_specs[project.name]

    if isinstance(artifacts_config, dict):
        artifacts_config = [artifacts_config]

    # Build expected artifact config
    artifacts_config = [
        RemoteSpec(
            RemoteType.ALL,
            config["url"],
            push=False,
            server_cert=os.path.expanduser(config["auth"]["server-cert"]),
            client_cert=os.path.expanduser(config["auth"]["client-cert"]),
            client_key=os.path.expanduser(config["auth"]["client-key"]),
        ) for config in artifacts_config
    ]

    assert parsed_cache_specs == artifacts_config
Exemplo n.º 15
0
def test_paths_for_artifact_config_are_expanded(tmpdir, monkeypatch,
                                                artifacts_config,
                                                in_user_config):
    # Produce a fake user and project config with the cache configuration.
    # user_config, project_config = configure_remote_caches(override_caches, project_caches, user_caches)
    # project_config['name'] = 'test'

    monkeypatch.setenv("HOME", str(tmpdir.join("homedir")))

    project_config = {"name": "test", "min-version": "2.0"}
    user_config = {}
    if in_user_config:
        user_config["artifacts"] = artifacts_config
    else:
        project_config["artifacts"] = artifacts_config

    user_config_file = str(tmpdir.join("buildstream.conf"))
    _yaml.roundtrip_dump(user_config, file=user_config_file)

    project_dir = tmpdir.mkdir("project")
    project_config_file = str(project_dir.join("project.conf"))
    _yaml.roundtrip_dump(project_config, file=project_config_file)

    with dummy_context(config=user_config_file) as context:
        project = Project(str(project_dir), context)
        project.ensure_fully_loaded()

        # Use the helper from the artifactcache module to parse our configuration.
        parsed_cache_specs = ArtifactCache._configured_remote_cache_specs(
            context, project)

    if isinstance(artifacts_config, dict):
        artifacts_config = [artifacts_config]

    # Build expected artifact config
    artifacts_config = [
        RemoteSpec(
            url=config["url"],
            push=False,
            server_cert=os.path.expanduser(config["server-cert"]),
            client_cert=os.path.expanduser(config["client-cert"]),
            client_key=os.path.expanduser(config["client-key"]),
        ) for config in artifacts_config
    ]

    assert parsed_cache_specs == artifacts_config
Exemplo n.º 16
0
def test_pull(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        cache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": cache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        # Delete the artifact locally
        cli.remove_artifact_from_cache(project_dir, "target.bst")

        # Assert that we are not cached locally anymore
        assert cli.get_element_state(project_dir, "target.bst") != "cached"

        with dummy_context(config=user_config_file) as context:
            # Load the project
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Assert that the element's artifact is **not** cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert not cli.artifact.is_cached(cache_dir, element, element_key)

            context.cachedir = cache_dir
            context.casdir = os.path.join(cache_dir, "cas")
            context.tmpdir = os.path.join(cache_dir, "tmp")

            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Manually setup the CAS remote
            artifactcache.setup_remotes(use_config=True)

            assert artifactcache.has_push_remotes(
                plugin=element), "No remote configured for element target.bst"
            assert artifactcache.pull(element,
                                      element_key), "Pull operation failed"

            assert cli.artifact.is_cached(cache_dir, element, element_key)
Exemplo n.º 17
0
def test_pull_tree(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "url": share.repo,
                "push": True,
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        with dummy_context(config=user_config_file) as context:
            # Load the project and CAS cache
            project = Project(project_dir, context)
            project.ensure_fully_loaded()
            cas = context.get_cascache()

            # Assert that the element's artifact is cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert cli.artifact.is_cached(rootcache_dir, element, element_key)

            # Retrieve the Directory object from the cached artifact
            artifact_digest = cli.artifact.get_digest(rootcache_dir, element,
                                                      element_key)

            artifactcache = context.artifactcache
            # Manually setup the CAS remote
            artifactcache.setup_remotes(use_config=True)
            assert artifactcache.has_push_remotes()

            directory = remote_execution_pb2.Directory()

            with open(cas.objpath(artifact_digest), "rb") as f:
                directory.ParseFromString(f.read())

            # Build the Tree object while we are still cached
            tree = remote_execution_pb2.Tree()
            tree_maker(cas, tree, directory)

            # Push the Tree as a regular message
            tree_digest = artifactcache.push_message(project, tree)
            tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes
            assert tree_hash and tree_size

            # Now delete the artifact locally
            cli.remove_artifact_from_cache(project_dir, "target.bst")

            # Assert that we are not cached locally anymore
            artifactcache.close_grpc_channels()
            cas.close_grpc_channels()
            assert cli.get_element_state(project_dir, "target.bst") != "cached"

            tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
                                                      size_bytes=tree_size)

            # Pull the artifact using the Tree object
            directory_digest = artifactcache.pull_tree(project,
                                                       artifact_digest)
            directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes

        # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
        assert directory_hash and not directory_size

        directory_digest = remote_execution_pb2.Digest(
            hash=directory_hash, size_bytes=directory_size)

        # Ensure the entire Tree stucture has been pulled
        assert os.path.exists(cas.objpath(directory_digest))
Exemplo n.º 18
0
def make_loader(basedir):
    with dummy_context() as context:
        project = Project(basedir, context)
        yield project.loader