コード例 #1
0
ファイル: pull.py プロジェクト: nanonyme/buildstream
def test_pull(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        cache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        # Delete the artifact locally
        cli.remove_artifact_from_cache(project_dir, "target.bst")

        # Assert that we are not cached locally anymore
        assert cli.get_element_state(project_dir, "target.bst") != "cached"

        with dummy_context(config=user_config_file) as context:
            # Load the project
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Assert that the element's artifact is **not** cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert not cli.artifact.is_cached(cache_dir, element, element_key)

            context.cachedir = cache_dir
            context.casdir = os.path.join(cache_dir, "cas")
            context.tmpdir = os.path.join(cache_dir, "tmp")

            # Load the project manually
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            # Create a local artifact cache handle
            artifactcache = context.artifactcache

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)

            assert artifactcache.has_push_remotes(
                plugin=element), "No remote configured for element target.bst"
            assert artifactcache.pull(element,
                                      element_key), "Pull operation failed"

            assert cli.artifact.is_cached(cache_dir, element, element_key)
コード例 #2
0
def test_source_cache_key(cli, datafiles):
    project_dir = str(datafiles)

    file_path = os.path.join(project_dir, "files")
    file_url = "file://" + file_path
    element_path = os.path.join(project_dir, "elements")
    element_name = "key_check.bst"
    element = {
        "kind":
        "import",
        "sources": [
            {
                "kind": "remote",
                "url": os.path.join(file_url, "bin-files", "usr", "bin",
                                    "hello"),
                "directory": "usr/bin",
            },
            {
                "kind":
                "remote",
                "url":
                os.path.join(file_url, "dev-files", "usr", "include",
                             "pony.h"),
                "directory":
                "usr/include",
            },
            {
                "kind": "patch",
                "path": "files/hello-patch.diff"
            },
        ],
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    res = cli.run(project=project_dir, args=["source", "track", element_name])
    res.assert_success()

    res = cli.run(project=project_dir, args=["build", element_name])
    res.assert_success()

    # Should have source refs for the two remote sources
    remote_protos = os.path.join(project_dir, "cache", "source_protos",
                                 "remote")
    assert len(os.listdir(remote_protos)) == 2
    # Should not have any source refs for the patch source
    # as that is a transformation of the previous sources,
    # not cacheable on its own
    patch_protos = os.path.join(project_dir, "cache", "source_protos", "patch")
    assert not os.path.exists(patch_protos)
    # Should have one element sources ref
    elementsources_protos = os.path.join(project_dir, "cache",
                                         "elementsources")
    assert len(os.listdir(elementsources_protos)) == 1

    # modify hello-patch file and check tracking updates refs
    with open(os.path.join(file_path, "dev-files", "usr", "include", "pony.h"),
              "a",
              encoding="utf-8") as f:
        f.write("\nappending nonsense")

    res = cli.run(project=project_dir, args=["source", "track", element_name])
    res.assert_success()
    assert "Found new revision: " in res.stderr

    res = cli.run(project=project_dir, args=["source", "fetch", element_name])
    res.assert_success()

    # We should have a new element sources ref
    assert len(os.listdir(elementsources_protos)) == 2
コード例 #3
0
def generate_element(repo, element_path, dep_name=None):
    element = {"kind": "import", "sources": [repo.source_config()]}
    if dep_name:
        element["depends"] = [dep_name]

    _yaml.roundtrip_dump(element, element_path)
コード例 #4
0
def create_element(repo, name, path, dependencies, ref=None):
    element = {"kind": "import", "sources": [repo.source_config(ref=ref)], "depends": dependencies}
    _yaml.roundtrip_dump(element, os.path.join(path, name))
コード例 #5
0
ファイル: buildcheckout.py プロジェクト: nanonyme/buildstream
def test_build_checkout_workspaced_junction(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")
    workspace = os.path.join(cli.directory, "workspace")
    checkout = os.path.join(cli.directory, "checkout")

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path)

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # Now open a workspace on the junction
    #
    result = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, "junction.bst"])
    result.assert_success()
    filename = os.path.join(workspace, "files", "etc-files", "etc",
                            "animal.conf")

    # Assert the content of /etc/animal.conf in the workspace
    assert os.path.exists(filename)
    with open(filename, "r", encoding="utf-8") as f:
        contents = f.read()
    assert contents == "animal=Pony\n"

    # Modify the content of the animal.conf in the workspace
    with open(filename, "w", encoding="utf-8") as f:
        f.write("animal=Horsy\n")

    # Now try to build it, this should automatically result in fetching
    # the junction itself at load time.
    result = cli.run(project=project, args=["build", "junction-dep.bst"])
    result.assert_success()

    # Assert that it's cached now
    assert cli.get_element_state(project, "junction-dep.bst") == "cached"

    # Now check it out
    result = cli.run(project=project,
                     args=[
                         "artifact", "checkout", "junction-dep.bst",
                         "--directory", checkout
                     ])
    result.assert_success()

    # Assert the workspace modified content of /etc/animal.conf
    filename = os.path.join(checkout, "etc", "animal.conf")
    assert os.path.exists(filename)
    with open(filename, "r", encoding="utf-8") as f:
        contents = f.read()
    assert contents == "animal=Horsy\n"
コード例 #6
0
ファイル: mirror.py プロジェクト: wjt/buildstream
def test_mirror_git_submodule_fetch(cli, tmpdir, datafiles):
    # Test that it behaves as expected with submodules, both defined in config
    # and discovered when fetching.
    foo_file = os.path.join(str(datafiles), "files", "foo")
    bar_file = os.path.join(str(datafiles), "files", "bar")
    bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
    dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")
    mirror_dir = os.path.join(str(datafiles), "mirror")

    defined_subrepo = create_repo("git", str(tmpdir), "defined_subrepo")
    defined_subrepo.create(bin_files_path)
    defined_subrepo.copy(mirror_dir)
    defined_subrepo.add_file(foo_file)

    found_subrepo = create_repo("git", str(tmpdir), "found_subrepo")
    found_subrepo.create(dev_files_path)

    main_repo = create_repo("git", str(tmpdir))
    main_mirror_ref = main_repo.create(bin_files_path)
    main_repo.add_submodule("defined", "file://" + defined_subrepo.repo)
    main_repo.add_submodule("found", "file://" + found_subrepo.repo)
    main_mirror = main_repo.copy(mirror_dir)
    main_repo.add_file(bar_file)

    project_dir = os.path.join(str(tmpdir), "project")
    os.makedirs(project_dir)
    element_dir = os.path.join(project_dir, "elements")
    os.makedirs(element_dir)
    element = {"kind": "import", "sources": [main_repo.source_config(ref=main_mirror_ref)]}
    element_name = "test.bst"
    element_path = os.path.join(element_dir, element_name)

    # Alias the main repo
    full_repo = element["sources"][0]["url"]
    _, repo_name = os.path.split(full_repo)
    alias = "foo"
    aliased_repo = alias + ":" + repo_name
    element["sources"][0]["url"] = aliased_repo

    # Hide the found subrepo
    del element["sources"][0]["submodules"]["found"]

    # Alias the defined subrepo
    subrepo = element["sources"][0]["submodules"]["defined"]["url"]
    _, repo_name = os.path.split(subrepo)
    aliased_repo = alias + ":" + repo_name
    element["sources"][0]["submodules"]["defined"]["url"] = aliased_repo

    _yaml.roundtrip_dump(element, element_path)

    full_mirror = main_mirror.source_config()["url"]
    mirror_map, _ = os.path.split(full_mirror)
    project = {
        "name": "test",
        "min-version": "2.0",
        "element-path": "elements",
        "aliases": {alias: "http://www.example.com/"},
        "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],},},],
    }
    project_file = os.path.join(project_dir, "project.conf")
    _yaml.roundtrip_dump(project, project_file)

    result = cli.run(project=project_dir, args=["source", "fetch", element_name])
    result.assert_success()
コード例 #7
0
ファイル: push.py プロジェクト: wjt/buildstream
def test_source_push_split(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)

    with _configure_caches(tmpdir, "indexshare",
                           "storageshare") as (index, storage):
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [
                    {
                        "url": index.repo,
                        "push": True,
                        "type": "index"
                    },
                    {
                        "url": storage.repo,
                        "push": True,
                        "type": "storage"
                    },
                ]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        repo = create_repo("git", str(tmpdir))
        ref = repo.create(os.path.join(project_dir, "files"))
        element_path = os.path.join(project_dir, "elements")
        element_name = "push.bst"
        element = {"kind": "import", "sources": [repo.source_config(ref=ref)]}
        _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

        # get the source object
        with dummy_context(config=user_config_file) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements(["push.bst"])[0]
            element._initialize_state()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            # check we don't have it in the current cache
            assert not index.get_source_proto(source._get_source_name())

            # build the element, this should fetch and then push the source to the
            # remote
            res = cli.run(project=project_dir, args=["build", "push.bst"])
            res.assert_success()
            assert "Pushed source" in res.stderr

            # check that we've got the remote locally now
            sourcecache = context.sourcecache
            assert sourcecache.contains(source)

            # check that the remote CAS now has it
            digest = sourcecache.export(source)._get_digest()
            assert storage.has_object(digest)
コード例 #8
0
def test_pip_source_import_packages(cli, datafiles, setup_pypi_repo):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")
    element_path = os.path.join(project, "elements")
    element_name = "pip/hello.bst"

    # check that exotically named packages are imported correctly
    myreqs_packages = "hellolib"
    dependencies = [
        "app2",
        "app.3",
        "app-4",
        "app_5",
        "app.no.6",
        "app-no-7",
        "app_no_8",
    ]
    mock_packages = {
        myreqs_packages: {package: {}
                          for package in dependencies}
    }

    # create mock pypi repository
    pypi_repo = os.path.join(project, "files", "pypi-repo")
    os.makedirs(pypi_repo, exist_ok=True)
    setup_pypi_repo(mock_packages, pypi_repo)

    element = {
        "kind":
        "import",
        "sources": [
            {
                "kind": "local",
                "path": "files/pip-source"
            },
            {
                "kind": "pip",
                "url": "file://{}".format(os.path.realpath(pypi_repo)),
                "packages": [myreqs_packages],
            },
        ],
    }
    os.makedirs(
        os.path.dirname(os.path.join(element_path, element_name)),
        exist_ok=True,
    )
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    result = cli.run(project=project, args=["source", "track", element_name])
    assert result.exit_code == 0

    result = cli.run(project=project, args=["build", element_name])
    assert result.exit_code == 0

    result = cli.run(
        project=project,
        args=["artifact", "checkout", element_name, "--directory", checkout],
    )
    assert result.exit_code == 0

    assert_contains(
        checkout,
        [
            "/.bst_pip_downloads",
            "/.bst_pip_downloads/hellolib-0.1.tar.gz",
            "/.bst_pip_downloads/app2-0.1.tar.gz",
            "/.bst_pip_downloads/app.3-0.1.tar.gz",
            "/.bst_pip_downloads/app-4-0.1.tar.gz",
            "/.bst_pip_downloads/app_5-0.1.tar.gz",
            "/.bst_pip_downloads/app.no.6-0.1.tar.gz",
            "/.bst_pip_downloads/app-no-7-0.1.tar.gz",
            "/.bst_pip_downloads/app_no_8-0.1.tar.gz",
        ],
    )
コード例 #9
0
ファイル: source-determinism.py プロジェクト: wjt/buildstream
def test_deterministic_source_local(cli, tmpdir, datafiles):
    """Only user rights should be considered for local source.
    """
    project = str(datafiles)
    element_name = "test.bst"
    element_path = os.path.join(project, "elements", element_name)
    sourcedir = os.path.join(project, "source")

    element = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "source"
        }],
        "config": {
            "install-commands": ['ls -l >"%{install-root}/ls-l"']
        },
    }
    _yaml.roundtrip_dump(element, element_path)

    def get_value_for_mask(mask):
        checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(mask))

        create_test_file(sourcedir, "a.txt", mode=0o644 & mask)
        create_test_file(sourcedir, "b.txt", mode=0o755 & mask)
        create_test_file(sourcedir, "c.txt", mode=0o4755 & mask)
        create_test_file(sourcedir, "d.txt", mode=0o2755 & mask)
        create_test_file(sourcedir, "e.txt", mode=0o1755 & mask)
        create_test_directory(sourcedir, "dir-a", mode=0o0755 & mask)
        create_test_directory(sourcedir, "dir-b", mode=0o4755 & mask)
        create_test_directory(sourcedir, "dir-c", mode=0o2755 & mask)
        create_test_directory(sourcedir, "dir-d", mode=0o1755 & mask)
        try:
            test_values = []
            result = cli.run(project=project, args=["build", element_name])
            result.assert_success()

            result = cli.run(project=project,
                             args=[
                                 "artifact", "checkout", element_name,
                                 "--directory", checkoutdir
                             ])
            result.assert_success()

            with open(os.path.join(checkoutdir, "ls-l"), "r") as f:
                for line in f.readlines():
                    test_values.append(line.split()[0] + " " +
                                       line.split()[-1])
                return test_values
        finally:
            cli.remove_artifact_from_cache(project, element_name)

    if CASD_SEPARATE_USER:
        # buildbox-casd running as separate user of the same group can't
        # read files with too restrictive permissions.
        assert get_value_for_mask(0o7777) == get_value_for_mask(0o0770)
    else:
        assert get_value_for_mask(0o7777) == get_value_for_mask(0o0700)
コード例 #10
0
ファイル: junction.py プロジェクト: nanonyme/buildstream
def test_junction_build_remote(cli, tmpdir, datafiles):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    subproject_element_path = os.path.join(subproject_path, "elements")
    amhello_files_path = os.path.join(subproject_path, "files")
    element_path = os.path.join(project, "elements")
    junction_path = os.path.join(element_path, "junction.bst")

    # We need a repo for real trackable elements
    repo = create_repo("tar", str(tmpdir))
    ref = repo.create(amhello_files_path)

    # ensure that the correct project directory is also listed in the junction
    subproject_conf = os.path.join(subproject_path, "project.conf")
    with open(subproject_conf, encoding="utf-8") as f:
        config = f.read()
    config = config.format(project_dir=subproject_path)
    with open(subproject_conf, "w", encoding="utf-8") as f:
        f.write(config)

    # Create a trackable element to depend on the cross junction element,
    # this one has it's ref resolved already
    create_element(repo,
                   "sub-target.bst",
                   subproject_element_path, ["autotools/amhello.bst"],
                   ref=ref)

    # Create a trackable element to depend on the cross junction element
    create_element(repo, "target.bst", element_path,
                   [{
                       "junction": "junction.bst",
                       "filename": "sub-target.bst"
                   }])

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)

    # Now create a compose element at the top level
    element = {
        "kind": "compose",
        "depends": [{
            "filename": "target.bst",
            "type": "build"
        }]
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, "composed.bst"))

    # We're doing remote execution so ensure services are available
    services = cli.ensure_services()
    assert set(services) == set(["action-cache", "execution", "storage"])

    # track the junction first to ensure we have refs
    result = cli.run(project=project, args=["source", "track", "junction.bst"])
    result.assert_success()

    # track target to ensure we have refs
    result = cli.run(project=project,
                     args=["source", "track", "--deps", "all", "composed.bst"])
    result.assert_success()

    # build
    result = cli.run(project=project,
                     silent=True,
                     args=["build", "composed.bst"])
    result.assert_success()

    # Assert that the main target is cached as a result
    assert cli.get_element_state(project, "composed.bst") == "cached"
コード例 #11
0
def test_pip_source_build(cli, datafiles, setup_pypi_repo):
    project = str(datafiles)
    element_path = os.path.join(project, "elements")
    element_name = "pip/hello.bst"

    # check that exotically named packages are imported correctly
    myreqs_packages = "hellolib"
    dependencies = [
        "app2",
        "app.3",
        "app-4",
        "app_5",
        "app.no.6",
        "app-no-7",
        "app_no_8",
    ]
    mock_packages = {
        myreqs_packages: {package: {}
                          for package in dependencies}
    }

    # create mock pypi repository
    pypi_repo = os.path.join(project, "files", "pypi-repo")
    os.makedirs(pypi_repo, exist_ok=True)
    setup_pypi_repo(mock_packages, pypi_repo)

    element = {
        "kind":
        "manual",
        "depends": ["base.bst"],
        "sources": [
            {
                "kind": "local",
                "path": "files/pip-source"
            },
            {
                "kind": "pip",
                "url": "file://{}".format(os.path.realpath(pypi_repo)),
                "requirements-files": ["myreqs.txt"],
                "packages": dependencies,
            },
        ],
        "config": {
            "install-commands": [
                "pip3 install --no-index --prefix %{install-root}/usr .bst_pip_downloads/*.tar.gz",
                "install app1.py %{install-root}/usr/bin/",
            ]
        },
    }
    os.makedirs(
        os.path.dirname(os.path.join(element_path, element_name)),
        exist_ok=True,
    )
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    result = cli.run(project=project, args=["source", "track", element_name])
    assert result.exit_code == 0

    result = cli.run(project=project, args=["build", element_name])
    assert result.exit_code == 0

    result = cli.run(project=project,
                     args=["shell", element_name, "/usr/bin/app1.py"])
    assert result.exit_code == 0
    assert result.output == "Hello App1! This is hellolib\n"
コード例 #12
0
ファイル: junctions.py プロジェクト: tom--pollard/buildstream
def test_caching_elements_ignoring_remotes(cli, tmpdir, datafiles):
    project = os.path.join(str(datafiles), "parent")
    base_project = os.path.join(str(project), "base")

    # Load the junction element
    junction_element = os.path.join(project, "base.bst")
    junction_data = _yaml.roundtrip_load(junction_element)

    # Configure to push everything to the project's remote and nothing to the junction's
    junction_data["config"] = {
        "cache-junction-elements": True,
        "ignore-junction-remotes": True
    }
    _yaml.roundtrip_dump(junction_data, junction_element)

    with create_artifact_share(
            os.path.join(
                str(tmpdir),
                "artifactshare-parent")) as share, create_artifact_share(
                    os.path.join(str(tmpdir),
                                 "artifactshare-base")) as base_share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", "target.bst"])
        assert result.exit_code == 0

        # Assert that we are now cached locally
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state == "cached"

        project_set_artifacts(project, share.repo)
        project_set_artifacts(base_project, base_share.repo)

        # Push to the remote(s))
        result = cli.run(
            project=project,
            args=["artifact", "push", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And finally assert that the artifacts are in the right shares
        #
        # The parent project's cache should *also* contain elements from the junction
        assert_shared(cli, share, project, "target.bst", project_name="parent")
        assert_shared(cli, share, project, "app.bst", project_name="parent")
        assert_shared(cli,
                      share,
                      base_project,
                      "base-element.bst",
                      project_name="base")

        # The junction project's cache should be empty
        assert_not_shared(cli,
                          base_share,
                          project,
                          "target.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          project,
                          "app.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          base_project,
                          "base-element.bst",
                          project_name="base")
コード例 #13
0
ファイル: junctions.py プロジェクト: tom--pollard/buildstream
def test_ignore_junction_remotes(cli, tmpdir, datafiles):
    project = os.path.join(str(datafiles), "parent")
    base_project = os.path.join(str(project), "base")

    # Load the junction element
    junction_element = os.path.join(project, "base.bst")
    junction_data = _yaml.roundtrip_load(junction_element)

    with create_artifact_share(
            os.path.join(
                str(tmpdir),
                "artifactshare-parent")) as share, create_artifact_share(
                    os.path.join(str(tmpdir),
                                 "artifactshare-base")) as base_share:

        # Immediately declare the artifact caches in the appropriate project configs
        project_set_artifacts(project, share.repo)
        project_set_artifacts(base_project, base_share.repo)

        # Build and populate the project remotes with their respective elements
        result = cli.run(project=project, args=["build", "target.bst"])
        assert result.exit_code == 0

        # And finally assert that the artifacts are in the right shares
        #
        # The parent project's cache should only contain project elements
        assert_shared(cli, share, project, "target.bst", project_name="parent")
        assert_shared(cli, share, project, "app.bst", project_name="parent")
        assert_not_shared(cli,
                          share,
                          base_project,
                          "base-element.bst",
                          project_name="base")

        # The junction project's cache should only contain elements in the junction project
        assert_not_shared(cli,
                          base_share,
                          project,
                          "target.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          project,
                          "app.bst",
                          project_name="parent")
        assert_shared(cli,
                      base_share,
                      base_project,
                      "base-element.bst",
                      project_name="base")

        # Ensure that, from now on, we ignore junction element remotes
        junction_data["config"] = {"ignore-junction-remotes": True}
        _yaml.roundtrip_dump(junction_data, junction_element)

        # Now delete everything from the local cache and try to
        # redownload from the shares.
        #
        cas = os.path.join(cli.directory, "cas")
        shutil.rmtree(cas)
        artifact_dir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifact_dir)

        # Assert that nothing is cached locally anymore
        state = cli.get_element_state(project, "target.bst")
        assert state != "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state != "cached"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And assert that they are again in the local cache, without having built
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        # We shouldn't be able to download base-element!
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state != "cached"
コード例 #14
0
ファイル: _yaml.py プロジェクト: wjt/buildstream
def generate_element(element_dir, element_name, config=None):
    if config is None:
        config = {}
    element_path = os.path.join(element_dir, element_name)
    _yaml.roundtrip_dump(config, element_path)
コード例 #15
0
def test_incremental(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/incremental.bst"

    element = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "files/workspace-incremental"
        }],
        "config": {
            "build-commands": ["make"]
        },
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # We open a workspace on the above element
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    res.assert_success()

    # Initial (non-incremental) build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Save the random hash
    random_hash = get_buildtree_file_contents(cli, project, element_name,
                                              "random")

    # Verify the expected output file of the initial build
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "1"

    wait_for_cache_granularity()

    # Replace source file contents with '2'
    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that the output file matches the new source file
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "2"

    wait_for_cache_granularity()

    # Replace source file contents with '3', however, set an old mtime such
    # that `make` will not pick up the change
    with open(os.path.join(workspace, "source"), "w", encoding="utf-8") as f:
        f.write("3")
    os.utime(os.path.join(workspace, "source"),
             (BST_ARBITRARY_TIMESTAMP, BST_ARBITRARY_TIMESTAMP))

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that the output file still matches the previous content '2'
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy") == "2"
コード例 #16
0
def test_pip_element_should_install_pip_deps(cli, datafiles, setup_pypi_repo):
    project = str(datafiles)
    elements_path = os.path.join(project, "elements")
    element_name = "pip/hello.bst"

    # check that exotically named packages are imported correctly
    myreqs_packages = "alohalib"
    dependencies = [
        "app2",
        "app.3",
        "app-4",
        "app_5",
        "app.no.6",
        "app-no-7",
        "app_no_8",
    ]
    mock_packages = {
        myreqs_packages: {package: {}
                          for package in dependencies}
    }

    # set up directories
    pypi_repo = os.path.join(project, "files", "pypi-repo")
    os.makedirs(pypi_repo, exist_ok=True)
    os.makedirs(
        os.path.dirname(os.path.join(elements_path, element_name)),
        exist_ok=True,
    )
    setup_pypi_repo(mock_packages, pypi_repo)

    # create pip element
    element = {
        "kind":
        "pip",
        "variables": {
            "pip": "pip3"
        },
        "depends": [{
            "filename": "base.bst"
        }],
        "sources": [
            {
                "kind":
                "tar",
                "url":
                "file://{}/files/piphello.tar.xz".format(project),
                # FIXME: remove hardcoded ref once issue #1010 is closed
                "ref":
                "ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d",
            },
            {
                "kind": "pip",
                "url": "file://{}".format(os.path.realpath(pypi_repo)),
                "packages": [myreqs_packages],
            },
        ],
    }
    _yaml.roundtrip_dump(element, os.path.join(elements_path, element_name))

    result = cli.run(project=project, args=["source", "track", element_name])
    assert result.exit_code == 0

    result = cli.run(project=project, args=["build", element_name])
    assert result.exit_code == 0

    # get installed packages in sandbox
    installed_packages = set(
        cli.run(project=project,
                args=["shell", element_name, "pip3",
                      "freeze"]).output.split("\n"))
    # compare with packages that are expected to be installed
    pip_source_packages = {
        package.replace("_", "-") + "==0.1"
        for package in dependencies + [myreqs_packages]
    }
    assert pip_source_packages.issubset(installed_packages)
コード例 #17
0
def test_incremental_partial(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/incremental.bst"

    element = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "files/workspace-partial"
        }],
        "config": {
            "build-commands": ["make random", "make copy1", "make copy2"]
        },
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # We open a workspace on the above element
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    res.assert_success()

    # Initial (non-incremental) build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Save the random hash
    random_hash = get_buildtree_file_contents(cli, project, element_name,
                                              "random")

    # Verify the expected output files of the initial build
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy1") == "1"
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy2") == "1"

    wait_for_cache_granularity()

    # Delete source1 and replace source2 file contents with '2'
    os.unlink(os.path.join(workspace, "source1"))
    with open(os.path.join(workspace, "source2"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    # This should fail because of the missing source1 file.
    res = cli.run(project=project, args=["build", element_name])
    res.assert_main_error(ErrorDomain.STREAM, None)

    wait_for_cache_granularity()

    # Recreate source1 file
    with open(os.path.join(workspace, "source1"), "w", encoding="utf-8") as f:
        f.write("2")

    # Perform incremental build of the workspace
    res = cli.run(project=project, args=["build", element_name])
    res.assert_success()

    # Verify that this was an incremental build by comparing the random hash
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "random") == random_hash

    # Verify that both files got rebuilt
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy1") == "2"
    assert get_buildtree_file_contents(cli, project, element_name,
                                       "copy2") == "2"
コード例 #18
0
def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
    try:
        project = str(datafiles)
        generate_project(project, tmpdir)

        bst_path = os.path.join(project, "target.bst")
        tar_file = "{}.tar.gz".format(tar_name)

        _yaml.roundtrip_dump(
            {
                "kind": "import",
                "sources": [
                    {
                        "kind": "tar",
                        "url": "tmpdir:/{}".format(tar_file),
                        "ref": "foo",
                        "base-dir": base_dir,
                    }
                ],
            },
            bst_path,
        )

        # Get the tarball in tests/sources/tar/read-only/content
        #
        # NOTE that we need to do this because tarfile.open and tar.add()
        # are packing the tar up with writeable files and dirs
        tarball = os.path.join(str(datafiles), "content", tar_file)
        if not os.path.exists(tarball):
            raise FileNotFoundError("{} does not exist".format(tarball))
        copyfile(tarball, os.path.join(str(tmpdir), tar_file))

        # Because this test can potentially leave directories behind
        # which are difficult to remove, ask buildstream to use
        # our temp directory, so we can clean up.
        tmpdir_str = str(tmpdir)
        if not tmpdir_str.endswith(os.path.sep):
            tmpdir_str += os.path.sep
        env = {"TMP": tmpdir_str}

        # Track, fetch, build, checkout
        result = cli.run(
            project=project, args=["source", "track", "target.bst"], env=env
        )
        result.assert_success()
        result = cli.run(
            project=project, args=["source", "fetch", "target.bst"], env=env
        )
        result.assert_success()
        result = cli.run(
            project=project, args=["build", "target.bst"], env=env
        )
        result.assert_success()

    finally:

        # Make tmpdir deletable no matter what happens
        def make_dir_writable(_fn, path, _excinfo):
            os.chmod(os.path.dirname(path), 0o777)
            if os.path.isdir(path):
                os.rmdir(path)
            else:
                os.remove(path)

        rmtree(str(tmpdir), onerror=make_dir_writable)
コード例 #19
0
ファイル: mirror.py プロジェクト: wjt/buildstream
def test_mirror_fallback_git_with_submodules(cli, tmpdir, datafiles):
    # Main repo has mirror. But does not list submodules.
    #
    # We expect:
    #  - we will fetch submodules anyway

    bin_files_path = os.path.join(str(datafiles), "files", "bin-files", "usr")
    dev_files_path = os.path.join(str(datafiles), "files", "dev-files", "usr")

    bin_repodir = os.path.join(str(tmpdir), "bin-repo")
    bin_repo = create_repo("git", bin_repodir)
    bin_repo.create(bin_files_path)

    dev_repodir = os.path.join(str(tmpdir), "dev-repo")
    dev_repo = create_repo("git", dev_repodir)
    dev_repo.create(dev_files_path)

    main_files = os.path.join(str(tmpdir), "main-files")
    os.makedirs(main_files)
    with open(os.path.join(main_files, "README"), "w") as f:
        f.write("TEST\n")
    upstream_main_repodir = os.path.join(str(tmpdir), "main-upstream")
    upstream_main_repo = create_repo("git", upstream_main_repodir)
    upstream_main_repo.create(main_files)

    upstream_main_repo.add_submodule("bin", url="file://{}".format(bin_repo.repo))
    upstream_main_repo.add_submodule("dev", url="file://{}".format(dev_repo.repo))
    # Unlist submodules.
    del upstream_main_repo.submodules["bin"]
    del upstream_main_repo.submodules["dev"]

    upstream_main_ref = upstream_main_repo.latest_commit()

    mirror_main_repodir = os.path.join(str(tmpdir), "main-mirror")
    mirror_main_repo = upstream_main_repo.copy(mirror_main_repodir)

    upstream_url = mirror_main_repo.source_config()["url"]

    upstream_map, repo_name = os.path.split(upstream_url)
    alias = "foo"
    aliased_repo = "{}:{}".format(alias, repo_name)

    full_mirror = mirror_main_repo.source_config()["url"]
    mirror_map, _ = os.path.split(full_mirror)

    project_dir = os.path.join(str(tmpdir), "project")
    os.makedirs(project_dir)
    element_dir = os.path.join(project_dir, "elements")

    element = {
        "kind": "import",
        "sources": [upstream_main_repo.source_config_extra(ref=upstream_main_ref, checkout_submodules=True)],
    }
    element["sources"][0]["url"] = aliased_repo
    element_name = "test.bst"
    element_path = os.path.join(element_dir, element_name)
    os.makedirs(element_dir)
    _yaml.roundtrip_dump(element, element_path)

    project = {
        "name": "test",
        "min-version": "2.0",
        "element-path": "elements",
        "aliases": {alias: upstream_map + "/"},
        "mirrors": [{"name": "middle-earth", "aliases": {alias: [mirror_map + "/"],}}],
    }
    project_file = os.path.join(project_dir, "project.conf")
    _yaml.roundtrip_dump(project, project_file)

    # Now make the upstream unavailable.
    os.rename(upstream_main_repo.repo, "{}.bak".format(upstream_main_repo.repo))
    result = cli.run(project=project_dir, args=["source", "fetch", element_name])
    result.assert_success()

    result = cli.run(project=project_dir, args=["build", element_name])
    result.assert_success()

    checkout = os.path.join(str(tmpdir), "checkout")
    result = cli.run(project=project_dir, args=["artifact", "checkout", element_name, "--directory", checkout])
    result.assert_success()

    assert os.path.exists(os.path.join(checkout, "bin", "bin", "hello"))
    assert os.path.exists(os.path.join(checkout, "dev", "include", "pony.h"))
コード例 #20
0
def generate_project_file_server(base_url, project_dir):
    project_file = os.path.join(project_dir, "project.conf")
    _yaml.roundtrip_dump(
        {"name": "foo", "min-version": "2.0", "aliases": {"tmpdir": base_url}},
        project_file,
    )
コード例 #21
0
def configure_project(path, config):
    config["name"] = "test"
    config["element-path"] = "elements"
    _yaml.roundtrip_dump(config, os.path.join(path, "project.conf"))
コード例 #22
0
ファイル: junctions.py プロジェクト: wjt/buildstream
def project_set_artifacts(project, url):
    project_conf_file = os.path.join(project, "project.conf")
    project_config = _yaml.load(project_conf_file, shortname=None)
    project_config["artifacts"] = [{"url": url, "push": True}]
    _yaml.roundtrip_dump(project_config.strip_node_info(), file=project_conf_file)
コード例 #23
0
ファイル: buildcheckout.py プロジェクト: nanonyme/buildstream
def test_partial_artifact_checkout_fetch(cli, datafiles, tmpdir):
    project = str(datafiles)
    checkout_dir = os.path.join(str(tmpdir), "checkout")

    repo = create_repo("tar", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "partial-artifact-checkout-fetch",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()

        # A push artifact cache means we have to pull to push to them, so
        # delete some blobs from that CAS such that we have to fetch
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Verify that the build-only dependency is not (complete) in the local cache
        cli.configure({"artifacts": {}})
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", input_name, "--directory",
                             checkout_dir
                         ])
        result.assert_main_error(ErrorDomain.STREAM,
                                 "uncached-checkout-attempt")

        # Verify that the pull method fetches relevant artifacts in order to stage
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", input_name, "--directory",
                             checkout_dir
                         ])
        result.assert_success()

        # should have pulled whatever was deleted previous
        assert input_name in result.get_pulled_elements()
コード例 #24
0
ファイル: filter.py プロジェクト: wjt/buildstream
def test_filter_track_multi_exclude(datafiles, cli, tmpdir):
    repo = create_repo("git", str(tmpdir))
    ref = repo.create(os.path.join(str(datafiles), "files"))
    elements_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    input_name = "input.bst"
    input2_name = "input2.bst"

    project_config = {
        "name": "filter-track-test",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)

    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }

    input_file = os.path.join(elements_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    input2_config = dict(input_config)
    input2_file = os.path.join(elements_dir, input2_name)
    _yaml.roundtrip_dump(input2_config, input2_file)

    filter1_config = {
        "kind": "filter",
        "depends": [{
            "filename": input_name,
            "type": "build"
        }]
    }
    filter1_file = os.path.join(elements_dir, "filter1.bst")
    _yaml.roundtrip_dump(filter1_config, filter1_file)

    filter2_config = {
        "kind": "filter",
        "depends": [{
            "filename": input2_name,
            "type": "build"
        }]
    }
    filter2_file = os.path.join(elements_dir, "filter2.bst")
    _yaml.roundtrip_dump(filter2_config, filter2_file)

    # Assert that a fetch is needed
    states = cli.get_element_states(project, [input_name, input2_name])
    assert states == {
        input_name: "no reference",
        input2_name: "no reference",
    }

    # Now try to track it
    result = cli.run(project=project,
                     args=[
                         "source", "track", "filter1.bst", "filter2.bst",
                         "--except", input_name
                     ])
    result.assert_success()

    # Now check that a ref field exists
    new_input = _yaml.load(input_file, shortname=None)
    source_node = new_input.get_sequence("sources").mapping_at(0)
    assert "ref" not in source_node

    new_input2 = _yaml.load(input2_file, shortname=None)
    source_node2 = new_input2.get_sequence("sources").mapping_at(0)
    new_ref2 = source_node2.get_str("ref")
    assert new_ref2 == ref
コード例 #25
0
def test_deterministic_source_umask(cli, tmpdir, datafiles, kind):
    if CASD_SEPARATE_USER and kind == "ostree":
        pytest.xfail("The ostree plugin ignores the umask")

    project = str(datafiles)
    element_name = "list.bst"
    element_path = os.path.join(project, "elements", element_name)
    repodir = os.path.join(str(tmpdir), "repo")
    sourcedir = os.path.join(project, "source")

    create_test_file(sourcedir, "a.txt", mode=0o700)
    create_test_file(sourcedir, "b.txt", mode=0o755)
    create_test_file(sourcedir, "c.txt", mode=0o600)
    create_test_file(sourcedir, "d.txt", mode=0o400)
    create_test_file(sourcedir, "e.txt", mode=0o644)
    create_test_file(sourcedir, "f.txt", mode=0o4755)
    create_test_file(sourcedir, "g.txt", mode=0o2755)
    create_test_file(sourcedir, "h.txt", mode=0o1755)
    create_test_directory(sourcedir, "dir-a", mode=0o0700)
    create_test_directory(sourcedir, "dir-c", mode=0o0755)
    create_test_directory(sourcedir, "dir-d", mode=0o4755)
    create_test_directory(sourcedir, "dir-e", mode=0o2755)
    create_test_directory(sourcedir, "dir-f", mode=0o1755)

    repo = create_repo(kind, repodir)
    ref = repo.create(sourcedir)
    source = repo.source_config(ref=ref)
    element = {
        "kind": "manual",
        "depends": [{"filename": "base.bst", "type": "build"}],
        "sources": [source],
        "config": {"install-commands": ['ls -l >"%{install-root}/ls-l"']},
    }
    _yaml.roundtrip_dump(element, element_path)

    def get_value_for_umask(umask):
        checkoutdir = os.path.join(str(tmpdir), "checkout-{}".format(umask))

        old_umask = os.umask(umask)

        try:
            test_values = []
            result = cli.run(project=project, args=["build", element_name])
            result.assert_success()

            result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkoutdir])
            result.assert_success()

            with open(os.path.join(checkoutdir, "ls-l"), "r", encoding="utf-8") as f:
                for line in f.readlines():
                    test_values.append(line.split()[0] + " " + line.split()[-1])
                return test_values
        finally:
            os.umask(old_umask)
            cli.remove_artifact_from_cache(project, element_name)

    if CASD_SEPARATE_USER:
        # buildbox-casd running as separate user of the same group can't
        # function in a test environment with a too restrictive umask.
        assert get_value_for_umask(0o002) == get_value_for_umask(0o007)
    else:
        assert get_value_for_umask(0o022) == get_value_for_umask(0o077)
コード例 #26
0
def test_workspace_update_dependency_failed(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/workspace-updated-dependency-failed.bst"
    dep_name = "workspace/dependency.bst"

    dependency = {
        "kind": "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "config": {
            "build-commands": [
                "mkdir -p %{install-root}/etc/test/",
                'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
                'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt',
            ]
        },
    }
    os.makedirs(os.path.dirname(os.path.join(element_path, dep_name)),
                exist_ok=True)
    _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))

    # First open the workspace
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    assert res.exit_code == 0

    # We build the workspaced element, so that we have an artifact
    # with specific built dependencies
    res = cli.run(project=project, args=["build", element_name])
    assert res.exit_code == 0

    # Now we update a dependency of our element.
    dependency["config"]["build-commands"] = [
        "mkdir -p %{install-root}/etc/test/",
        'echo "Hello china!" > %{install-root}/etc/test/hello.txt',
        'echo "Hello brazil!" > %{install-root}/etc/test/brazil.txt',
    ]
    _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))

    # And our build fails!
    with open(os.path.join(workspace, "Makefile"), "a", encoding="utf-8") as f:
        f.write("\texit 1")

    res = cli.run(project=project, args=["build", element_name])
    assert res.exit_code != 0

    # We update our dependency again...
    dependency["config"]["build-commands"] = [
        "mkdir -p %{install-root}/etc/test/",
        'echo "Hello world!" > %{install-root}/etc/test/hello.txt',
        'echo "Hello spain!" > %{install-root}/etc/test/brazil.txt',
    ]
    _yaml.roundtrip_dump(dependency, os.path.join(element_path, dep_name))

    # And fix the source
    with open(os.path.join(workspace, "Makefile"), "r", encoding="utf-8") as f:
        makefile = f.readlines()
    with open(os.path.join(workspace, "Makefile"), "w", encoding="utf-8") as f:
        f.write("\n".join(makefile[:-1]))

    # Since buildstream thinks hello.txt did not change, we could end
    # up not rebuilding a file! We need to make sure that a case like
    # this can't blind-side us.
    res = cli.run(project=project, args=["build", element_name])
    assert res.exit_code == 0

    res = cli.run(project=project,
                  args=["shell", element_name, "/usr/bin/test.sh"])
    assert res.exit_code == 0
    assert res.output == "Hello world!\nHello spain!\n\n"
コード例 #27
0
def test_track_cross_junction(cli, tmpdir, datafiles, cross_junction,
                              ref_storage):
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files")
    target_path = os.path.join(project, "target.bst")
    subtarget_path = os.path.join(project, "subproject", "subtarget.bst")

    # Create our repo object of the given source type with
    # the dev files, and then collect the initial ref.
    #
    repo = create_repo("git", str(tmpdir))
    repo.create(dev_files_path)

    # Generate two elements using the git source, one in
    # the main project and one in the subproject.
    generate_element(repo, target_path, dep_name="subproject.bst")
    generate_element(repo, subtarget_path)

    # Generate project.conf
    #
    project_conf = {
        "name": "test",
        "min-version": "2.0",
        "ref-storage": ref_storage
    }
    _yaml.roundtrip_dump(project_conf, os.path.join(project, "project.conf"))

    #
    # FIXME: This can be simplified when we have support
    #        for addressing of junctioned elements.
    #
    def get_subproject_element_state():
        result = cli.run(project=project,
                         args=[
                             "show", "--deps", "all", "--format",
                             "%{name}|%{state}", "target.bst"
                         ])
        result.assert_success()

        # Create two dimentional list of the result,
        # first line should be the junctioned element
        lines = [line.split("|") for line in result.output.splitlines()]
        assert lines[0][0] == "subproject-junction.bst:subtarget.bst"
        return lines[0][1]

    #
    # Assert that we have no reference yet for the cross junction element
    #
    assert get_subproject_element_state() == "no reference"

    # Track recursively across the junction
    args = ["source", "track", "--deps", "all"]
    if cross_junction == "cross":
        args += ["--cross-junctions"]
    args += ["target.bst"]

    result = cli.run(project=project, args=args)

    if ref_storage == "inline":

        if cross_junction == "cross":
            #
            # Cross junction tracking is not allowed when the toplevel project
            # is using inline ref storage.
            #
            result.assert_main_error(ErrorDomain.STREAM, "untrackable-sources")
        else:
            #
            # No cross juction tracking was requested
            #
            result.assert_success()
            assert get_subproject_element_state() == "no reference"
    else:
        #
        # Tracking is allowed with project.refs ref storage
        #
        result.assert_success()

        #
        # If cross junction tracking was enabled, we should now be buildable
        #
        if cross_junction == "cross":
            assert get_subproject_element_state() == "buildable"
        else:
            assert get_subproject_element_state() == "no reference"
コード例 #28
0
def test_incremental_configure_commands_run_only_once(cli, datafiles):
    project = str(datafiles)
    workspace = os.path.join(cli.directory, "workspace")
    element_path = os.path.join(project, "elements")
    element_name = "workspace/incremental.bst"

    element = {
        "kind":
        "manual",
        "depends": [{
            "filename": "base.bst",
            "type": "build"
        }],
        "sources": [{
            "kind": "local",
            "path": "files/workspace-configure-only-once"
        }],
        "config": {
            "configure-commands": ["$SHELL configure"]
        },
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_name))

    # We open a workspace on the above element
    res = cli.run(
        project=project,
        args=["workspace", "open", "--directory", workspace, element_name])
    res.assert_success()

    # Then we build, and check whether the configure step succeeded
    res = cli.run(project=project,
                  args=["--cache-buildtrees", "always", "build", element_name])
    res.assert_success()
    # check that the workspace was not configured outside the sandbox
    assert not os.path.exists(os.path.join(workspace, "prepared"))

    # the configure should have been run in the sandbox, so check the buildtree
    res = cli.run(
        project=project,
        args=[
            "shell",
            "--build",
            element_name,
            "--use-buildtree",
            "--",
            "find",
            ".",
            "-mindepth",
            "1",
        ],
    )
    res.assert_success()

    files = res.output.splitlines()
    assert "./prepared" in files
    assert not "./prepared-again" in files

    # Add file to workspace to trigger an (incremental) build
    with open(os.path.join(workspace, "newfile"), "w", encoding="utf-8"):
        pass

    # When we build again, the configure commands should not be
    # called, and we should therefore exit cleanly (the configure
    # commands are set to always fail after the first run)
    res = cli.run(project=project,
                  args=["--cache-buildtrees", "always", "build", element_name])
    res.assert_success()

    assert not os.path.exists(os.path.join(workspace, "prepared-again"))
    res = cli.run(
        project=project,
        args=[
            "shell",
            "--build",
            element_name,
            "--use-buildtree",
            "--",
            "find",
            ".",
            "-mindepth",
            "1",
        ],
    )
    res.assert_success()

    files = res.output.splitlines()
    assert "./prepared" in files
    assert not "./prepared-again" in files
コード例 #29
0
def test_track_skip(cli, tmpdir, datafiles):
    project = str(datafiles)
    dev_files_path = os.path.join(project, "files", "dev-files")
    element_path = os.path.join(project, "elements")
    element_dep_name = "track-test-dep.bst"
    element_workspace_name = "track-test-workspace.bst"
    element_target_name = "track-test-target.bst"
    workspace_dir = os.path.join(str(tmpdir), "workspace")

    # Generate an import element with some local source plugins, these
    # do not implement track() and thus can be skipped.
    #
    element = {
        "kind":
        "import",
        "sources": [
            {
                "kind": "local",
                "path": "files/dev-files",
                "directory": "/foo"
            },
            {
                "kind": "local",
                "path": "files/dev-files",
                "directory": "/bar"
            },
        ],
    }
    _yaml.roundtrip_dump(element, os.path.join(element_path, element_dep_name))

    # Generate a regular import element which will have a workspace open
    #
    repo = create_repo("tar", str(tmpdir))
    repo.create(dev_files_path)
    generate_element(repo, os.path.join(element_path, element_workspace_name))

    # Generate a stack element which depends on the import of local files
    #
    # Stack elements do not have any sources, as such they are also skipped.
    #
    element = {
        "kind": "stack",
        "depends": [element_dep_name, element_workspace_name],
    }
    _yaml.roundtrip_dump(element,
                         os.path.join(element_path, element_target_name))

    # First track and fetch the workspace element
    result = cli.run(
        project=project,
        args=["source", "track", "--deps", "none", element_workspace_name])
    result.assert_success()
    result = cli.run(
        project=project,
        args=["source", "fetch", "--deps", "none", element_workspace_name])
    result.assert_success()

    # Open the workspace so it really is a workspace
    result = cli.run(project=project,
                     args=[
                         "workspace", "open", "--directory", workspace_dir,
                         element_workspace_name
                     ])
    result.assert_success()

    # Now run track on the stack and all the deps
    result = cli.run(
        project=project,
        args=["source", "track", "--deps", "all", element_target_name])
    result.assert_success()

    # Assert we got the expected skip messages
    pattern = r"\[.*track:track-test-dep\.bst.*\] SKIPPED"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1
    pattern = r"\[.*track:track-test-workspace\.bst.*\] SKIPPED"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 1

    # For now, we expect to not see the job for stack elements
    #
    # This may be revisited, need to consider if we should emit
    # START/SKIPPED message pairs for jobs which were assessed to
    # be unneeded before ever processing.
    #
    pattern = r"\[.*track:track-test-target\.bst.*\]"
    assert len(re.findall(pattern, result.stderr, re.MULTILINE)) == 0
コード例 #30
0
ファイル: pull.py プロジェクト: nanonyme/buildstream
def test_pull_tree(cli, tmpdir, datafiles):
    project_dir = str(datafiles)

    # Set up an artifact cache.
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # Configure artifact share
        rootcache_dir = os.path.join(str(tmpdir), "cache")
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": rootcache_dir,
        }

        # Write down the user configuration file
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        # Ensure CLI calls will use it
        cli.configure(user_config)

        # First build the project with the artifact cache configured
        result = cli.run(project=project_dir, args=["build", "target.bst"])
        result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project_dir, "target.bst") == "cached"
        # Assert that we shared/pushed the cached artifact
        assert share.get_artifact(
            cli.get_artifact_name(project_dir, "test", "target.bst"))

        with dummy_context(config=user_config_file) as context:
            # Load the project and CAS cache
            project = Project(project_dir, context)
            project.ensure_fully_loaded()
            cas = context.get_cascache()

            # Assert that the element's artifact is cached
            element = project.load_elements(["target.bst"])[0]
            element_key = cli.get_element_key(project_dir, "target.bst")
            assert cli.artifact.is_cached(rootcache_dir, element, element_key)

            # Retrieve the Directory object from the cached artifact
            artifact_digest = cli.artifact.get_digest(rootcache_dir, element,
                                                      element_key)

            # Initialize remotes
            context.initialize_remotes(True, True, None, None)

            artifactcache = context.artifactcache
            assert artifactcache.has_push_remotes()

            directory = remote_execution_pb2.Directory()

            with open(cas.objpath(artifact_digest), "rb") as f:
                directory.ParseFromString(f.read())

            # Build the Tree object while we are still cached
            tree = remote_execution_pb2.Tree()
            tree_maker(cas, tree, directory)

            # Push the Tree as a regular message
            tree_digest = artifactcache.push_message(project, tree)
            tree_hash, tree_size = tree_digest.hash, tree_digest.size_bytes
            assert tree_hash and tree_size

            # Now delete the artifact locally
            cli.remove_artifact_from_cache(project_dir, "target.bst")

            # Assert that we are not cached locally anymore
            artifactcache.release_resources()
            cas._casd_channel.request_shutdown()
            cas.close_grpc_channels()
            assert cli.get_element_state(project_dir, "target.bst") != "cached"

            tree_digest = remote_execution_pb2.Digest(hash=tree_hash,
                                                      size_bytes=tree_size)

            # Pull the artifact using the Tree object
            directory_digest = artifactcache.pull_tree(project,
                                                       artifact_digest)
            directory_hash, directory_size = directory_digest.hash, directory_digest.size_bytes

        # Directory size now zero with AaaP and stack element commit #1cbc5e63dc
        assert directory_hash and not directory_size

        directory_digest = remote_execution_pb2.Digest(
            hash=directory_hash, size_bytes=directory_size)

        # Ensure the entire Tree stucture has been pulled
        assert os.path.exists(cas.objpath(directory_digest))