예제 #1
0
def test_filter_pass_integration(datafiles, cli):
    project = str(datafiles)

    # Explicitly not passing integration commands should be fine
    result = cli.run(project=project, args=["build", "no-pass-integration.bst"])
    result.assert_success()

    # Passing integration commands should build nicely
    result = cli.run(project=project, args=["build", "pass-integration.bst"])
    result.assert_success()

    # Checking out elements which don't pass integration commands should still work
    checkout_dir = os.path.join(project, "no-pass")
    result = cli.run(
        project=project,
        args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "no-pass-integration.bst"],
    )
    result.assert_success()

    # Checking out the artifact should fail if we run integration commands, as
    # the staged artifacts don't have a shell
    checkout_dir = os.path.join(project, "pass")
    result = cli.run(
        project=project,
        args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "pass-integration.bst"],
    )
    result.assert_main_error(ErrorDomain.STREAM, "missing-command")
예제 #2
0
def test_push_missing_source_after_build(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_name = "import-bin.bst"

    res = cli.run(project=project_dir, args=["build", element_name])
    res.assert_success()

    # Delete source but keep artifact in cache
    shutil.rmtree(os.path.join(cache_dir, "elementsources"))
    shutil.rmtree(os.path.join(cache_dir, "source_protos"))

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        res = cli.run(project=project_dir,
                      args=["source", "push", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) in res.stderr
        assert "Pushed source" in res.stderr
예제 #3
0
def test_custom_transform_source(cli, datafiles):
    project = str(datafiles)

    # Set the project_dir alias in project.conf to the path to the tested project
    project_config_path = os.path.join(project, "project.conf")
    project_config = load_yaml(project_config_path)
    aliases = project_config.get_mapping("aliases")
    aliases["project_dir"] = "file://{}".format(project)
    generate_project(project, project_config)

    # Ensure we can track
    result = cli.run(project=project, args=["source", "track", "target.bst"])
    result.assert_success()

    # Ensure we can fetch
    result = cli.run(project=project, args=["source", "fetch", "target.bst"])
    result.assert_success()

    # Ensure we get correct output from foo_transform
    cli.run(project=project, args=["build", "target.bst"])
    destpath = os.path.join(cli.directory, "checkout")
    result = cli.run(
        project=project,
        args=["artifact", "checkout", "target.bst", "--directory", destpath])
    result.assert_success()
    # Assert that files from both sources exist, and that they have
    # the same content
    assert os.path.exists(os.path.join(destpath, "file"))
    assert os.path.exists(os.path.join(destpath, "filetransform"))
    with open(os.path.join(destpath, "file"), encoding="utf-8") as file1:
        with open(os.path.join(destpath, "filetransform"),
                  encoding="utf-8") as file2:
            assert file1.read() == file2.read()
예제 #4
0
def test_inconsistent_junction(cli, tmpdir, datafiles, ref_storage,
                               workspaced):
    project = str(datafiles)
    subproject_path = os.path.join(project, "files", "sub-project")
    junction_path = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    configure_project(project, {"ref-storage": ref_storage})

    # Create a repo to hold the subproject and generate a junction element for it
    generate_junction(tmpdir, subproject_path, junction_path, store_ref=False)

    # Create a stack element to depend on a cross junction element
    #
    element = {
        "kind": "stack",
        "depends": [{
            "junction": "junction.bst",
            "filename": "import-etc.bst"
        }]
    }
    _yaml.roundtrip_dump(element, element_path)

    # Open a workspace if we're testing workspaced behavior
    if workspaced:
        result = cli.run(
            project=project,
            silent=True,
            args=[
                "workspace", "open", "--no-checkout", "--directory",
                subproject_path, "junction.bst"
            ],
        )
        result.assert_success()

    # Assert the correct error when trying to show the pipeline
    dep_result = cli.run(project=project,
                         silent=True,
                         args=["show", "junction-dep.bst"])

    # Assert the correct error when trying to show the pipeline
    etc_result = cli.run(project=project,
                         silent=True,
                         args=["show", "junction.bst:import-etc.bst"])

    # If a workspace is open, no ref is needed
    if workspaced:
        dep_result.assert_success()
        etc_result.assert_success()
    else:
        # Assert that we have the expected provenance encoded into the error
        element_node = _yaml.load(element_path, shortname="junction-dep.bst")
        ref_node = element_node.get_sequence("depends").mapping_at(0)
        provenance = ref_node.get_provenance()
        assert str(provenance) in dep_result.stderr

        dep_result.assert_main_error(ErrorDomain.LOAD,
                                     LoadErrorReason.SUBPROJECT_INCONSISTENT)
        etc_result.assert_main_error(ErrorDomain.LOAD,
                                     LoadErrorReason.SUBPROJECT_INCONSISTENT)
예제 #5
0
def test_build_push_source_twice(cli, tmpdir, datafiles):
    cache_dir = os.path.join(str(tmpdir), "cache")
    project_dir = str(datafiles)
    element_name = "import-bin.bst"

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        user_config_file = str(tmpdir.join("buildstream.conf"))
        user_config = {
            "scheduler": {
                "pushers": 1
            },
            "source-caches": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
            "cachedir": cache_dir,
        }
        _yaml.roundtrip_dump(user_config, file=user_config_file)
        cli.configure(user_config)

        res = cli.run(project=project_dir, args=["build", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) in res.stderr
        assert "Pushed source" in res.stderr

        # The second build pipeline is a no-op as everything is already cached.
        # However, this verifies that the pipeline behaves as expected.
        res = cli.run(project=project_dir, args=["build", element_name])
        res.assert_success()
        assert "fetch:{}".format(element_name) not in res.stderr
        assert "Pushed source" not in res.stderr
예제 #6
0
def test_artifact_delete_artifact(cli, tmpdir, datafiles, with_project):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the project.conf if we're going to try this without a project
    if not with_project:
        os.remove(os.path.join(project, "project.conf"))

    # Delete the artifact
    result = cli.run(project=project, args=["artifact", "delete", artifact])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(
        os.path.join(local_cache, "cas", "refs", "heads", artifact))
예제 #7
0
def test_filter_pass_integration_uncached(datafiles, cli):
    project = str(datafiles)

    # Passing integration commands should build nicely
    result = cli.run(project=project, args=["build", "filter/filter.bst"])
    result.assert_success()

    # Delete the build dependency of the filter element.
    # The built filter element should be usable even if the build dependency
    # is not available in the local cache.
    result = cli.run(project=project,
                     args=["artifact", "delete", "filter/parent.bst"])
    result.assert_success()

    # Checking out the element should work
    checkout_dir = os.path.join(project, "filter")
    result = cli.run(
        project=project,
        args=[
            "artifact", "checkout", "--integrate", "--directory", checkout_dir,
            "filter/filter.bst"
        ],
    )
    result.assert_success()

    # Check that the integration command was run
    assert_contains(checkout_dir, ["/foo"])
    shutil.rmtree(checkout_dir)
예제 #8
0
파일: tar.py 프로젝트: nanonyme/buildstream
def test_stage_default_basedir_lzip(cli, tmpdir, datafiles, srcdir):
    project = str(datafiles)
    generate_project(project,
                     config={"aliases": {
                         "tmpdir": "file:///" + str(tmpdir)
                     }})
    checkoutdir = os.path.join(str(tmpdir), "checkout")

    # Create a local tar
    src_tar = os.path.join(str(tmpdir), "a.tar.lz")
    _assemble_tar_lz(os.path.join(str(datafiles), "content"), srcdir, src_tar)

    # Track, fetch, build, checkout
    result = cli.run(project=project,
                     args=["source", "track", "target-lz.bst"])
    result.assert_success()
    result = cli.run(project=project,
                     args=["source", "fetch", "target-lz.bst"])
    result.assert_success()
    result = cli.run(project=project, args=["build", "target-lz.bst"])
    result.assert_success()
    result = cli.run(project=project,
                     args=[
                         "artifact", "checkout", "target-lz.bst",
                         "--directory", checkoutdir
                     ])
    result.assert_success()

    # Check that the content of the first directory is checked out (base-dir: '*')
    original_dir = os.path.join(str(datafiles), "content", "a")
    original_contents = list_dir_contents(original_dir)
    checkout_contents = list_dir_contents(checkoutdir)
    assert checkout_contents == original_contents
예제 #9
0
def test_rebuild(datafiles, cli, strict):
    project = str(datafiles)

    # First build intermediate target.bst
    result = cli.run(project=project,
                     args=strict_args(["build", "target.bst"], strict))
    result.assert_success()

    # Modify base import
    with open(os.path.join(project, "files", "dev-files", "usr", "include",
                           "new.h"),
              "w",
              encoding="utf-8") as f:
        f.write("#define NEW")

    # Rebuild base import and build top-level rebuild-target.bst
    # In non-strict mode, this does not rebuild intermediate target.bst,
    # which means that a weakly cached target.bst will be staged as dependency.
    result = cli.run(project=project,
                     args=strict_args(["build", "rebuild-target.bst"], strict))
    result.assert_success()

    built_elements = result.get_built_elements()

    assert "rebuild-target.bst" in built_elements
    if strict == "strict":
        assert "target.bst" in built_elements
    else:
        assert "target.bst" not in built_elements
예제 #10
0
def test_stage_symlink(cli, tmpdir, datafiles):

    project = str(datafiles)
    checkoutdir = os.path.join(str(tmpdir), "checkout")

    # Workaround datafiles bug:
    #
    #   https://github.com/omarkohl/pytest-datafiles/issues/1
    #
    # Create the symlink by hand.
    symlink = os.path.join(project, "files", "symlink-to-file.txt")
    os.symlink("file.txt", symlink)

    # Build, checkout
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()
    result = cli.run(project=project,
                     args=[
                         "artifact", "checkout", "target.bst", "--directory",
                         checkoutdir
                     ])
    result.assert_success()

    # Check that the checkout contains the expected file and directory and other file
    assert os.path.exists(os.path.join(checkoutdir, "file.txt"))
    assert os.path.exists(os.path.join(checkoutdir, "symlink-to-file.txt"))
    assert os.path.islink(os.path.join(checkoutdir, "symlink-to-file.txt"))
예제 #11
0
def test_build_checkout_nonempty(datafiles, cli, hardlinks):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")
    filename = os.path.join(checkout, "file.txt")

    # First build it
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    # Assert that after a successful build, the builddir is empty
    builddir = os.path.join(cli.directory, "build")
    assert os.path.isdir(builddir)
    assert not os.listdir(builddir)

    # Create the checkout dir and add a file to it, should cause checkout to fail
    os.makedirs(checkout, exist_ok=True)
    with open(filename, "w", encoding="utf-8") as f:
        f.write("Hello")

    # Prepare checkout args
    checkout_args = ["artifact", "checkout"]
    if hardlinks == "hardlinks":
        checkout_args += ["--hardlinks"]
    checkout_args += ["target.bst", "--directory", checkout]

    # Now check it out
    result = cli.run(project=project, args=checkout_args)
    result.assert_main_error(ErrorDomain.STREAM, None)
예제 #12
0
def test_artifact_show_element_available_remotely(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Set up remote and local shares
    local_cache = os.path.join(str(tmpdir), "artifacts")
    with create_artifact_share(os.path.join(str(tmpdir), "remote")) as remote:
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": remote.repo,
                    "push": True
                }]
            },
            "cachedir": local_cache,
        })

        # Build the element
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Make sure it's in the share
        assert remote.get_artifact(
            cli.get_artifact_name(project, "test", element))

        # Delete the artifact from the local cache
        result = cli.run(project=project, args=["artifact", "delete", element])
        result.assert_success()
        assert cli.get_element_state(project, element) != "cached"

        result = cli.run(project=project, args=["artifact", "show", element])
        result.assert_success()
        assert "available {}".format(element) in result.output
예제 #13
0
def test_build_checkout_tarball_is_deterministic(datafiles, cli):
    project = str(datafiles)
    tarball1 = os.path.join(cli.directory, "tarball1.tar")
    tarball2 = os.path.join(cli.directory, "tarball2.tar")

    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    builddir = os.path.join(cli.directory, "build")
    assert os.path.isdir(builddir)
    assert not os.listdir(builddir)

    checkout_args = ["artifact", "checkout", "--force", "target.bst"]

    checkout_args1 = checkout_args + ["--tar", tarball1]
    result = cli.run(project=project, args=checkout_args1)
    result.assert_success()

    checkout_args2 = checkout_args + ["--tar", tarball2]
    result = cli.run(project=project, args=checkout_args2)
    result.assert_success()

    with open(tarball1, "rb") as f:
        contents = f.read()
    hash1 = hashlib.sha1(contents).hexdigest()

    with open(tarball2, "rb") as f:
        contents = f.read()
    hash2 = hashlib.sha1(contents).hexdigest()

    assert hash1 == hash2
예제 #14
0
def test_build_checkout_links(datafiles, cli):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    # Create the link before running the tests.
    # This is needed for users working on Windows, git checks out symlinks as files which content is the name
    # of the symlink and the test therefore doesn't have the correct content
    os.symlink(
        os.path.join("..", "basicfile"),
        os.path.join(project, "files", "files-and-links", "basicfolder",
                     "basicsymlink"),
    )

    result = cli.run(project=project, args=["build", "import-links.bst"])
    result.assert_success()

    builddir = os.path.join(cli.directory, "build")
    assert os.path.isdir(builddir)
    assert not os.listdir(builddir)

    checkout_args = [
        "artifact", "checkout", "--directory", checkout, "import-links.bst"
    ]

    result = cli.run(project=project, args=checkout_args)
    result.assert_success()

    with open(os.path.join(checkout, "basicfolder", "basicsymlink"),
              encoding="utf-8") as fp:
        data = fp.read()
    assert data == "file contents\n"
예제 #15
0
def test_build_checkout_tarball(datafiles, cli):
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout.tar")

    # Work-around datafiles not preserving mode
    os.chmod(os.path.join(project, "files/bin-files/usr/bin/hello"), 0o0755)

    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    builddir = os.path.join(cli.directory, "build")
    assert os.path.isdir(builddir)
    assert not os.listdir(builddir)

    checkout_args = ["artifact", "checkout", "--tar", checkout, "target.bst"]

    result = cli.run(project=project, args=checkout_args)
    result.assert_success()

    with tarfile.TarFile(checkout) as tar:
        tarinfo = tar.getmember(os.path.join(".", "usr", "bin", "hello"))
        assert tarinfo.mode == 0o755
        assert tarinfo.uid == 0 and tarinfo.gid == 0
        assert tarinfo.uname == "" and tarinfo.gname == ""

        tarinfo = tar.getmember(os.path.join(".", "usr", "include", "pony.h"))
        assert tarinfo.mode == 0o644
        assert tarinfo.uid == 0 and tarinfo.gid == 0
        assert tarinfo.uname == "" and tarinfo.gname == ""
예제 #16
0
def test_build_checkout(datafiles, cli, strict, hardlinks):
    if CASD_SEPARATE_USER and hardlinks == "hardlinks":
        pytest.xfail(
            "Cannot hardlink with buildbox-casd running as a separate user")

    project = str(datafiles)
    checkout = os.path.join(cli.directory, "checkout")

    # First build it
    result = cli.run(project=project,
                     args=strict_args(["build", "target.bst"], strict))
    result.assert_success()

    # Assert that after a successful build, the builddir is empty
    builddir = os.path.join(cli.directory, "build")
    assert os.path.isdir(builddir)
    assert not os.listdir(builddir)

    # Prepare checkout args
    checkout_args = strict_args(["artifact", "checkout"], strict)
    if hardlinks == "hardlinks":
        checkout_args += ["--hardlinks"]
    checkout_args += ["target.bst", "--directory", checkout]

    # Now check it out
    result = cli.run(project=project, args=checkout_args)
    result.assert_success()

    # Check that the executable hello file is found in the checkout
    filename = os.path.join(checkout, "usr", "bin", "hello")
    assert os.path.exists(filename)

    filename = os.path.join(checkout, "usr", "include", "pony.h")
    assert os.path.exists(filename)
예제 #17
0
def test_unique_key(cli, tmpdir, datafiles):
    """This test confirms that the 'filename' parameter is honoured when it comes
    to generating a cache key for the source.
    """
    project = str(datafiles)
    generate_project(project,
                     {"aliases": {
                         "tmpdir": "file:///" + str(tmpdir)
                     }})

    states = cli.get_element_states(
        project,
        ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
    assert states["target.bst"] == "fetch needed"
    assert states["target-custom.bst"] == "fetch needed"
    assert states["target-custom-executable.bst"] == "fetch needed"

    # Try to fetch it
    cli.run(project=project, args=["source", "fetch", "target.bst"])

    # We should download_yaml the file only once
    states = cli.get_element_states(
        project,
        ["target.bst", "target-custom.bst", "target-custom-executable.bst"])
    assert states["target.bst"] == "buildable"
    assert states["target-custom.bst"] == "buildable"
    assert states["target-custom-executable.bst"] == "buildable"

    # But the cache key is different because the 'filename' is different.
    assert (cli.get_element_key(project, "target.bst") != cli.get_element_key(
        project, "target-custom.bst") != cli.get_element_key(
            project, "target-custom-executable.bst"))
예제 #18
0
def test_use_netrc(cli, datafiles, server_type, tmpdir):
    fake_home = os.path.join(str(tmpdir), "fake_home")
    os.makedirs(fake_home, exist_ok=True)
    project = str(datafiles)
    checkoutdir = os.path.join(str(tmpdir), "checkout")

    os.environ["HOME"] = fake_home
    with open(os.path.join(fake_home, ".netrc"), "wb") as f:
        os.fchmod(f.fileno(), 0o700)
        f.write(b"machine 127.0.0.1\n")
        f.write(b"login testuser\n")
        f.write(b"password 12345\n")

    with create_file_server(server_type) as server:
        server.add_user("testuser", "12345", project)
        generate_project(project, {"aliases": {"tmpdir": server.base_url()}})

        server.start()

        result = cli.run(project=project,
                         args=["source", "fetch", "target.bst"])
        result.assert_success()
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        result = cli.run(project=project,
                         args=[
                             "artifact", "checkout", "target.bst",
                             "--directory", checkoutdir
                         ])
        result.assert_success()

        checkout_file = os.path.join(checkoutdir, "file")
        assert os.path.exists(checkout_file)
예제 #19
0
def test_pull_missing_local_blob(cli, tmpdir, datafiles):
    project = os.path.join(datafiles.dirname, datafiles.basename)
    repo = create_repo("tar", str(tmpdir))
    repo.create(os.path.join(str(datafiles), "files"))
    element_dir = os.path.join(str(tmpdir), "elements")
    project = str(tmpdir)
    project_config = {
        "name": "pull-missing-local-blob",
        "min-version": "2.0",
        "element-path": "elements",
    }
    project_file = os.path.join(str(tmpdir), "project.conf")
    _yaml.roundtrip_dump(project_config, project_file)
    input_config = {
        "kind": "import",
        "sources": [repo.source_config()],
    }
    input_name = "input.bst"
    input_file = os.path.join(element_dir, input_name)
    _yaml.roundtrip_dump(input_config, input_file)

    depends_name = "depends.bst"
    depends_config = {"kind": "stack", "depends": [input_name]}
    depends_file = os.path.join(element_dir, depends_name)
    _yaml.roundtrip_dump(depends_config, depends_file)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the import-bin element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["source", "track", input_name])
        result.assert_success()
        result = cli.run(project=project, args=["build", input_name])
        result.assert_success()
        assert cli.get_element_state(project, input_name) == "cached"

        # Delete a file blob from the local cache.
        # This is a placeholder to test partial CAS handling until we support
        # partial artifact pulling (or blob-based CAS expiry).
        #
        digest = utils.sha256sum(
            os.path.join(project, "files", "bin-files", "usr", "bin", "hello"))
        objpath = os.path.join(cli.directory, "cas", "objects", digest[:2],
                               digest[2:])
        os.unlink(objpath)

        # Now try bst build
        result = cli.run(project=project, args=["build", depends_name])
        result.assert_success()

        # Assert that the import-bin artifact was pulled (completing the partial artifact)
        assert result.get_pulled_elements() == [input_name]
예제 #20
0
def test_source_checkout(datafiles, cli, tmpdir_factory, with_workspace,
                         guess_element):
    tmpdir = tmpdir_factory.mktemp(os.path.basename(__file__))
    project = str(datafiles)
    checkout = os.path.join(cli.directory, "source-checkout")
    target = "checkout-deps.bst"
    workspace = os.path.join(str(tmpdir), "workspace")
    elm_cmd = [target] if not guess_element else []

    if with_workspace:
        ws_cmd = ["-C", workspace]
        result = cli.run(
            project=project,
            args=["workspace", "open", "--directory", workspace, target])
        result.assert_success()
    else:
        ws_cmd = []

    args = ws_cmd + [
        "source", "checkout", "--deps", "none", "--directory", checkout,
        *elm_cmd
    ]
    result = cli.run(project=project, args=args)
    result.assert_success()

    assert os.path.exists(
        os.path.join(checkout, "checkout-deps", "etc", "buildstream",
                     "config"))
예제 #21
0
def test_artifact_delete_element_and_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"
    dep = "compose-all.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    # First build an element so that we can find its artifact
    result = cli.run(project=project, args=["build", element])
    result.assert_success()
    assert cli.get_element_states(project, [element, dep], deps="none") == {
        element: "cached",
        dep: "cached",
    }

    # Obtain the artifact ref
    cache_key = cli.get_element_key(project, element)
    artifact = os.path.join("test", os.path.splitext(element)[0], cache_key)

    # Explicitly check that the ARTIFACT exists in the cache
    assert os.path.exists(
        os.path.join(local_cache, "artifacts", "refs", artifact))

    # Delete the artifact
    result = cli.run(project=project,
                     args=["artifact", "delete", artifact, dep])
    result.assert_success()

    # Check that the ARTIFACT is no longer in the cache
    assert not os.path.exists(os.path.join(local_cache, "artifacts", artifact))

    # Check that the dependency ELEMENT is no longer cached
    assert cli.get_element_state(project, dep) != "cached"
예제 #22
0
def test_nested_junction_tally(cli, tmpdir, datafiles):
    # Check that the progress reporting messages count elements in
    # junctions of junctions
    project = str(datafiles)
    sub1_path = os.path.join(project, "files", "sub-project")
    sub2_path = os.path.join(project, "files", "sub2-project")
    # A junction element which pulls sub1 into sub2
    sub1_element = os.path.join(project, "files", "sub2-project", "elements", "sub-junction.bst")
    # A junction element which pulls sub2 into the main project
    sub2_element = os.path.join(project, "elements", "junction.bst")
    element_path = os.path.join(project, "elements", "junction-dep.bst")

    generate_junction(tmpdir / "sub-project", sub1_path, sub1_element, store_ref=True)
    generate_junction(tmpdir / "sub2-project", sub2_path, sub2_element, store_ref=True)

    # Create a stack element to depend on a cross junction element
    #
    element = {"kind": "stack", "depends": [{"junction": "junction.bst", "filename": "import-sub.bst"}]}
    _yaml.roundtrip_dump(element, element_path)

    result = cli.run(project=project, silent=True, args=["source", "fetch", "junction.bst"])
    result.assert_success()

    # Assert the correct progress tallies are in the logging
    result = cli.run(project=project, args=["show", "junction-dep.bst"])
    assert "  3 subtasks processed" in result.stderr
    assert "3 of 3 subtasks processed" in result.stderr
예제 #23
0
def test_artifact_list_exact_contents_long(cli, datafiles, target):
    project = str(datafiles)

    # Ensure we have an artifact to read
    result = cli.run(project=project, args=["build", "import-bin.bst"])
    assert result.exit_code == 0

    if target == "element-name":
        arg = "import-bin.bst"
    elif target == "artifact-name":
        key = cli.get_element_key(project, "import-bin.bst")
        arg = "test/import-bin/" + key

    # List the contents via the element name
    result = cli.run(project=project, args=["artifact", "list-contents", "--long", arg])
    assert result.exit_code == 0
    expected_output_template = (
        "{target}:\n"
        "\tdrwxr-xr-x  dir    0           usr\n"
        "\tdrwxr-xr-x  dir    0           usr/bin\n"
        "\t-rw-r--r--  reg    28          usr/bin/hello\n\n"
    )
    expected_output = expected_output_template.format(target=arg)

    assert expected_output in result.output
예제 #24
0
def test_artifact_list_exact_contents(cli, datafiles, target, with_project):
    project = str(datafiles)

    # Get the cache key of our test element
    key = cli.get_element_key(project, "import-bin.bst")

    # Ensure we have an artifact to read
    result = cli.run(project=project, args=["build", "import-bin.bst"])
    result.assert_success()

    if target == "element-name":
        arg = "import-bin.bst"
    elif target == "artifact-name":
        key = cli.get_element_key(project, "import-bin.bst")
        arg = "test/import-bin/" + key

    # Delete the project.conf if we're going to try this without a project
    if not with_project:
        os.remove(os.path.join(project, "project.conf"))

    # List the contents via the key
    result = cli.run(project=project, args=["artifact", "list-contents", arg])

    # Expect to fail if we try to list by element name and there is no project
    if target == "element-name" and not with_project:
        result.assert_main_error(ErrorDomain.STREAM, "project-not-loaded")
    else:
        result.assert_success()

        expected_output_template = "{target}:\n\tusr\n\tusr/bin\n\tusr/bin/hello\n\n"
        expected_output = expected_output_template.format(target=arg)
        assert expected_output in result.output
예제 #25
0
def test_artifact_list_exact_contents_glob(cli, datafiles):
    project = str(datafiles)

    # Ensure we have an artifact to read
    result = cli.run(project=project, args=["build", "target.bst"])
    assert result.exit_code == 0

    # List the contents via glob
    result = cli.run(project=project, args=["artifact", "list-contents", "test/**"])
    assert result.exit_code == 0

    # get the cahe keys for each element in the glob
    import_bin_key = cli.get_element_key(project, "import-bin.bst")
    import_dev_key = cli.get_element_key(project, "import-dev.bst")
    compose_all_key = cli.get_element_key(project, "compose-all.bst")
    target_key = cli.get_element_key(project, "target.bst")

    expected_artifacts = [
        "test/import-bin/" + import_bin_key,
        "test/import-dev/" + import_dev_key,
        "test/compose-all/" + compose_all_key,
        "test/target/" + target_key,
    ]

    for artifact in expected_artifacts:
        assert artifact in result.output
예제 #26
0
def test_source_pull_partial_fallback_fetch(cli, tmpdir, datafiles):
    project_dir = str(datafiles)
    element_name, repo, ref = create_test_element(tmpdir, project_dir)
    cache_dir = os.path.join(str(tmpdir), "cache")

    # use artifact cache for sources for now, they should work the same
    with create_artifact_share(os.path.join(str(tmpdir),
                                            "sourceshare")) as share:
        with context_with_source_cache(cli, cache_dir, share,
                                       tmpdir) as context:
            project = Project(project_dir, context)
            project.ensure_fully_loaded()

            element = project.load_elements([element_name])[0]
            element._query_source_cache()
            assert not element._cached_sources()
            source = list(element.sources())[0]

            assert not share.get_artifact_proto(source._get_source_name())

            # Just check that we sensibly fetch and build the element
            res = cli.run(project=project_dir, args=["build", element_name])
            res.assert_success()

            assert os.listdir(
                os.path.join(str(tmpdir), "cache", "sources", "tar")) != []

            # get root digest of source
            sourcecache = context.sourcecache
            digest = sourcecache.export(source)._get_digest()

            # Push the source to the remote
            res = cli.run(project=project_dir,
                          args=[
                              "source", "push", "--source-remote", share.repo,
                              element_name
                          ])
            res.assert_success()

            # Remove the cas content, only keep the proto and such around
            shutil.rmtree(
                os.path.join(str(tmpdir), "sourceshare", "repo", "cas",
                             "objects"))
            # check the share doesn't have the object
            assert not share.has_object(digest)

            # Delete the source locally
            shutil.rmtree(os.path.join(str(cache_dir), "sources"))
            shutil.rmtree(os.path.join(str(cache_dir), "cas"))
            state = cli.get_element_state(project_dir, element_name)
            assert state == "fetch needed"

            # Now fetch the source and check
            res = cli.run(project=project_dir,
                          args=["source", "fetch", element_name])
            res.assert_success()

            assert ("SUCCESS Fetching {}".format(
                repo.source_config(ref=ref)["url"])) in res.stderr
예제 #27
0
파일: tar.py 프로젝트: nanonyme/buildstream
def test_read_only_dir(cli, tmpdir, datafiles, tar_name, base_dir):
    try:
        project = str(datafiles)
        generate_project(
            project, config={"aliases": {
                "tmpdir": "file:///" + str(tmpdir)
            }})

        tar_file = "{}.tar.gz".format(tar_name)

        generate_element(
            project,
            "target.bst",
            {
                "kind":
                "import",
                "sources": [{
                    "kind": "tar",
                    "url": "tmpdir:/{}".format(tar_file),
                    "ref": "foo",
                    "base-dir": base_dir
                }],
            },
        )

        # Get the tarball in tests/sources/tar/read-only/content
        #
        # NOTE that we need to do this because tarfile.open and tar.add()
        # are packing the tar up with writeable files and dirs
        tarball = os.path.join(str(datafiles), "content", tar_file)
        if not os.path.exists(tarball):
            raise FileNotFoundError("{} does not exist".format(tarball))
        copyfile(tarball, os.path.join(str(tmpdir), tar_file))

        # Because this test can potentially leave directories behind
        # which are difficult to remove, ask buildstream to use
        # our temp directory, so we can clean up.
        tmpdir_str = str(tmpdir)
        if not tmpdir_str.endswith(os.path.sep):
            tmpdir_str += os.path.sep
        env = {"TMP": tmpdir_str}

        # Track, fetch, build, checkout
        result = cli.run(project=project,
                         args=["source", "track", "target.bst"],
                         env=env)
        result.assert_success()
        result = cli.run(project=project,
                         args=["source", "fetch", "target.bst"],
                         env=env)
        result.assert_success()
        result = cli.run(project=project,
                         args=["build", "target.bst"],
                         env=env)
        result.assert_success()

    finally:
        utils._force_rmtree(str(tmpdir))
예제 #28
0
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "goodartifactshare")) as good_share, create_artifact_share(
                os.path.join(str(tmpdir), "badartifactshare")) as bad_share:

        # Build the target so we have it cached locally only.
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"

        # Configure the default push location to be bad_share; we will assert that
        # nothing actually gets pushed there.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": bad_share.repo,
                        "push": True
                    },
                ]
            }
        })

        # Now try `bst artifact push` to the good_share.
        result = cli.run(project=project,
                         args=[
                             "artifact", "push", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # Assert that all the artifacts are in the share we pushed
        # to, and not the other.
        assert_shared(cli, good_share, project, "target.bst")
        assert_not_shared(cli, bad_share, project, "target.bst")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the good_share.
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        result = cli.run(project=project,
                         args=[
                             "artifact", "pull", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
예제 #29
0
def test_filter_orphans(datafiles, cli, tmpdir):
    project = str(datafiles)
    result = cli.run(project=project, args=["build", "output-orphans.bst"])
    result.assert_success()

    checkout = os.path.join(tmpdir.dirname, tmpdir.basename, "checkout")
    result = cli.run(project=project, args=["artifact", "checkout", "output-orphans.bst", "--directory", checkout])
    result.assert_success()
    assert os.path.exists(os.path.join(checkout, "baz"))
예제 #30
0
def test_filter_deps_ok(datafiles, cli):
    project = str(datafiles)
    result = cli.run(project=project, args=["build", "deps-permitted.bst"])
    result.assert_success()

    result = cli.run(project=project, args=["show", "--deps=run", "--format='%{name}'", "deps-permitted.bst"])
    result.assert_success()

    assert "output-exclude.bst" in result.output
    assert "output-orphans.bst" in result.output