Esempio n. 1
0
def test_large_directory(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Number of files chosen to ensure the complete list of digests exceeds
    # our 1 MB gRPC message limit. I.e., test message splitting.
    MAX_MESSAGE_LENGTH = 1024 * 1024
    NUM_FILES = MAX_MESSAGE_LENGTH // 64 + 1

    large_directory_dir = os.path.join(project, "files", "large-directory")
    os.mkdir(large_directory_dir)
    for i in range(NUM_FILES):
        with open(os.path.join(large_directory_dir, str(i)), "w") as f:
            # The files need to have different content as we want different digests.
            f.write(str(i))

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:
        # Configure bst to push to the artifact share
        cli.configure({"artifacts": {"servers": [{"url": share.repo, "push": True},]}})

        # Enforce 1 MB gRPC message limit
        with limit_grpc_message_length(MAX_MESSAGE_LENGTH):
            # Build and push
            result = cli.run(project=project, args=["build", "import-large-directory.bst"])
            result.assert_success()

        # Assert that we are now cached locally
        assert cli.get_element_state(project, "import-large-directory.bst") == "cached"

        # Assert that the push was successful
        assert_shared(cli, share, project, "import-large-directory.bst")
Esempio n. 2
0
def test_push_after_pull(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Set the scene: share1 has the artifact, share2 does not.
        #
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                ]
            }})

        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        cli.remove_artifact_from_cache(project, "target.bst")

        assert_shared(cli, share1, project, "target.bst")
        assert_not_shared(cli, share2, project, "target.bst")
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now run the build again. Correct `bst build` behaviour is to download the
        # artifact from share1 but not push it back again.
        #
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" not in result.get_pushed_elements()

        # Delete the artifact locally again.
        cli.remove_artifact_from_cache(project, "target.bst")

        # Now we add share2 into the mix as a second push remote. This time,
        # `bst build` should push to share2 after pulling from share1.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": True
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert "target.bst" in result.get_pulled_elements()
        assert "target.bst" in result.get_pushed_elements()
Esempio n. 3
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as shareuser, create_artifact_share(
        os.path.join(str(tmpdir), "artifactshare2")
    ) as shareproject, create_artifact_share(os.path.join(str(tmpdir), "artifactshare3")) as sharecli:

        # Add shareproject repo url to project.conf
        with open(os.path.join(project, "project.conf"), "a") as projconf:
            projconf.write("artifacts:\n  url: {}\n  push: True".format(shareproject.repo))

        # Configure shareuser remote in user conf
        cli.configure({"artifacts": {"url": shareuser.repo, "push": True}})

        result = cli.run(project=project, args=["build", "--remote", sharecli.repo, "target.bst"])

        # Artifacts should have only been pushed to sharecli, as that was provided via the cli
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_shared(cli, sharecli, project, element_name)
            assert_not_shared(cli, shareuser, project, element_name)
            assert_not_shared(cli, shareproject, project, element_name)
Esempio n. 4
0
def test_push_pull_non_strict(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:
        # First build the target element and push to the remote.
        cli.configure({
            "artifacts": {
                "url": share.repo,
                "push": True
            },
            "projects": {
                "test": {
                    "strict": False
                }
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        assert cli.get_element_state(project, "target.bst") == "cached"

        # Assert that everything is now cached in the remote.
        all_elements = [
            "target.bst", "import-bin.bst", "import-dev.bst", "compose-all.bst"
        ]
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) != "cached"

        # Add a file to force change in strict cache key of import-bin.bst
        with open(
                os.path.join(str(project), "files", "bin-files", "usr", "bin",
                             "world"), "w") as f:
            f.write("world")

        # Assert that the workspaced element requires a rebuild
        assert cli.get_element_state(project, "import-bin.bst") == "buildable"
        # Assert that the target is still waiting due to --no-strict
        assert cli.get_element_state(project, "target.bst") == "waiting"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "--deps", "all", "target.bst"])
        result.assert_success()

        # And assert that the target is again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
Esempio n. 5
0
def test_push_pull_specific_remote(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "goodartifactshare")) as good_share, create_artifact_share(
                os.path.join(str(tmpdir), "badartifactshare")) as bad_share:

        # Build the target so we have it cached locally only.
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"

        # Configure the default push location to be bad_share; we will assert that
        # nothing actually gets pushed there.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": bad_share.repo,
                        "push": True
                    },
                ]
            }
        })

        # Now try `bst artifact push` to the good_share.
        result = cli.run(project=project,
                         args=[
                             "artifact", "push", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # Assert that all the artifacts are in the share we pushed
        # to, and not the other.
        assert_shared(cli, good_share, project, "target.bst")
        assert_not_shared(cli, bad_share, project, "target.bst")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the good_share.
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        result = cli.run(project=project,
                         args=[
                             "artifact", "pull", "target.bst",
                             "--artifact-remote", good_share.repo
                         ])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert cli.get_element_state(project, "target.bst") == "cached"
Esempio n. 6
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "artifactshare1")) as shareuser, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as sharecli:

        # Configure shareuser remote in user conf
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": shareuser.repo,
                    "push": True
                }]
            }
        })

        # Push the artifacts to the shareuser remote.
        # Assert that shareuser has the artfifacts cached, but sharecli doesn't,
        # then delete locally cached elements
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()
            assert_not_shared(cli, sharecli, project, element_name)
            assert_shared(cli, shareuser, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a build with cli set as sharecli results in nothing being pulled,
        # as it doesn't have them cached and shareuser should be ignored. This
        # will however result in the artifacts being built and pushed to it
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert element_name not in result.get_pulled_elements()
            assert_shared(cli, sharecli, project, element_name)
            cli.remove_artifact_from_cache(project, element_name)

        # Now check that a clean build with cli set as sharecli should result in artifacts only
        # being pulled from it, as that was provided via the cli and is populated
        result = cli.run(
            project=project,
            args=["build", "--artifact-remote", sharecli.repo, "target.bst"])
        result.assert_success()
        for element_name in all_elements:
            assert cli.get_element_state(project, element_name) == "cached"
            assert element_name in result.get_pulled_elements()
        assert shareuser.repo not in result.stderr
        assert sharecli.repo in result.stderr
Esempio n. 7
0
def test_push_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test",
                                    os.path.splitext(element)[0], cache_key)
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Configure artifact share
        cli.configure({
            #
            # FIXME: This test hangs "sometimes" if we allow
            #        concurrent push.
            #
            #        It's not too bad to ignore since we're
            #        using the local artifact cache functionality
            #        only, but it should probably be fixed.
            #
            "scheduler": {
                "pushers": 1
            },
            "artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True,
                }]
            },
        })

        # Now try bst artifact push all the deps
        result = cli.run(project=project,
                         args=["artifact", "push", artifact_ref])
        result.assert_success()

        # And finally assert that all the artifacts are in the share
        #
        # Note that assert shared tests that an element is shared by obtaining
        # the artifact ref and asserting that the path exists in the share
        assert_shared(cli, share, project, element)
Esempio n. 8
0
def test_dynamic_build_plan(cli, tmpdir, datafiles):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache directory
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try to rebuild target
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that target and runtime dependency were pulled
        # but build dependency was not pulled as it wasn't needed
        # (dynamic build plan).
        assert target in result.get_pulled_elements()
        assert runtime_dep in result.get_pulled_elements()
        assert build_dep not in result.get_pulled_elements()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        assert states[target] == "cached"
        assert states[runtime_dep] == "cached"
        assert states[build_dep] != "cached"
Esempio n. 9
0
def test_artifact_too_large(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Mock a file system with 5 MB total space
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(5e6)) as share:

        # Configure bst to push to the remote cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and push a 3MB element
        create_element_size("small_element.bst", project, element_path, [],
                            int(3e6))
        result = cli.run(project=project, args=["build", "small_element.bst"])
        result.assert_success()

        # Create and try to push a 6MB element.
        create_element_size("large_element.bst", project, element_path, [],
                            int(6e6))
        result = cli.run(project=project, args=["build", "large_element.bst"])
        # This should fail; the server will refuse to store the CAS
        # blobs for the artifact, and then fail to find the files for
        # the uploaded artifact proto.
        #
        # FIXME: This should be extremely uncommon in practice, since
        # the artifact needs to be at least half the cache size for
        # this to happen. Nonetheless, a nicer error message would be
        # nice (perhaps we should just disallow uploading artifacts
        # that large).
        result.assert_main_error(ErrorDomain.STREAM, None)

        # Ensure that the small artifact is still in the share
        states = cli.get_element_states(
            project, ["small_element.bst", "large_element.bst"])
        assert states["small_element.bst"] == "cached"
        assert_shared(cli, share, project, "small_element.bst")

        # Ensure that the artifact is cached locally but NOT remotely
        assert states["large_element.bst"] == "cached"
        assert_not_shared(cli, share, project, "large_element.bst")
Esempio n. 10
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"), quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {"url": share.repo, "push": True},}
        )

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [], int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project, ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        os.mkdir(cli2_path)
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project, args=["artifact", "pull", "element1.bst", "--remote", share.repo])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [], int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
Esempio n. 11
0
def test_pull_artifact(cli, tmpdir, datafiles):
    project = str(datafiles)
    element = "target.bst"

    # Configure a local cache
    local_cache = os.path.join(str(tmpdir), "cache")
    cli.configure({"cachedir": local_cache})

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})

        result = cli.run(project=project, args=["build", element])
        result.assert_success()

        # Assert that the *artifact* is cached locally
        cache_key = cli.get_element_key(project, element)
        artifact_ref = os.path.join("test",
                                    os.path.splitext(element)[0], cache_key)
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Assert that the target is shared (note that assert shared will use the artifact name)
        assert_shared(cli, share, project, element)

        # Now we've pushed, remove the local cache
        shutil.rmtree(os.path.join(local_cache, "artifacts"))

        # Assert that nothing is cached locally anymore
        assert not os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", artifact_ref])
        result.assert_success()

        # And assert that it's again in the local cache, without having built
        assert os.path.exists(
            os.path.join(local_cache, "artifacts", "refs", artifact_ref))
Esempio n. 12
0
def test_artifact_expires(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # Create an artifact share (remote artifact cache) in the tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [
                    {
                        "url": share.repo,
                        "push": True
                    },
                ]
            }})

        # Create and build an element of 15 MB
        create_element_size("element1.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        # Create and build an element of 5 MB
        create_element_size("element2.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # check that element's 1 and 2 are cached both locally and remotely
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])

        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Create and build another element of 5 MB (This will exceed the free disk space available)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Ensure it is cached both locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure element1 has been removed from the share
        assert_not_shared(cli, share, project, "element1.bst")
        # Ensure that elemen2 remains
        assert_shared(cli, share, project, "element2.bst")
Esempio n. 13
0
def test_push_pull_deps(cli, tmpdir, datafiles, deps, expected_states):
    project = str(datafiles)
    target = "checkout-deps.bst"
    build_dep = "import-dev.bst"
    runtime_dep = "import-bin.bst"
    all_elements = [target, build_dep, runtime_dep]

    with create_artifact_share(os.path.join(str(tmpdir),
                                            "artifactshare")) as share:

        # First build the target element and push to the remote.
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }]
            }})
        result = cli.run(project=project, args=["build", target])
        result.assert_success()

        # Assert that everything is now cached in the remote.
        for element_name in all_elements:
            assert_shared(cli, share, project, element_name)

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that nothing is cached locally anymore
        states = cli.get_element_states(project, all_elements)
        assert not any(states[e] == "cached" for e in all_elements)

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "--deps", deps, target])
        result.assert_success()

        # And assert that the pulled elements are again in the local cache
        states = cli.get_element_states(project, all_elements)
        states_flattended = (states[target], states[build_dep],
                             states[runtime_dep])
        assert states_flattended == expected_states
Esempio n. 14
0
def test_pull_secondary_cache(cli, tmpdir, datafiles):
    project = str(datafiles)

    with create_artifact_share(os.path.join(
            str(tmpdir), "artifactshare1")) as share1, create_artifact_share(
                os.path.join(str(tmpdir), "artifactshare2")) as share2:

        # Build the target and push it to share2 only.
        cli.configure({
            "artifacts": {
                "servers": [
                    {
                        "url": share1.repo,
                        "push": False
                    },
                    {
                        "url": share2.repo,
                        "push": True
                    },
                ]
            }
        })
        result = cli.run(project=project, args=["build", "target.bst"])
        result.assert_success()

        assert_not_shared(cli, share1, project, "target.bst")
        assert_shared(cli, share2, project, "target.bst")

        # Delete the user's local artifact cache.
        casdir = os.path.join(cli.directory, "cas")
        shutil.rmtree(casdir)
        artifactdir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifactdir)

        # Assert that the element is not cached anymore.
        assert cli.get_element_state(project, "target.bst") != "cached"

        # Now try bst artifact pull
        result = cli.run(project=project,
                         args=["artifact", "pull", "target.bst"])
        result.assert_success()

        # And assert that it's again in the local cache, without having built,
        # i.e. we found it in share2.
        assert cli.get_element_state(project, "target.bst") == "cached"
Esempio n. 15
0
def test_push_pull(cli, tmpdir, datafiles):
    project = os.path.join(str(datafiles), "parent")
    base_project = os.path.join(str(project), "base")

    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare-parent")) as share, create_artifact_share(
        os.path.join(str(tmpdir), "artifactshare-base")
    ) as base_share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", "target.bst"])
        assert result.exit_code == 0

        # Assert that we are now cached locally
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state == "cached"

        project_set_artifacts(project, share.repo)
        project_set_artifacts(base_project, base_share.repo)

        # Now try bst artifact push
        result = cli.run(project=project, args=["artifact", "push", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And finally assert that the artifacts are in the right shares
        #
        # In the parent project's cache
        assert_shared(cli, share, project, "target.bst", project_name="parent")
        assert_shared(cli, share, project, "app.bst", project_name="parent")
        assert_not_shared(cli, share, base_project, "base-element.bst", project_name="base")

        # In the junction project's cache
        assert_not_shared(cli, base_share, project, "target.bst", project_name="parent")
        assert_not_shared(cli, base_share, project, "app.bst", project_name="parent")
        assert_shared(cli, base_share, base_project, "base-element.bst", project_name="base")

        # Now we've pushed, delete the user's local artifact cache
        # directory and try to redownload it from the share
        #
        cas = os.path.join(cli.directory, "cas")
        shutil.rmtree(cas)
        artifact_dir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifact_dir)

        # Assert that nothing is cached locally anymore
        state = cli.get_element_state(project, "target.bst")
        assert state != "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state != "cached"

        # Now try bst artifact pull
        result = cli.run(project=project, args=["artifact", "pull", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And assert that they are again in the local cache, without having built
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state == "cached"
Esempio n. 16
0
def test_build_remote_option(caplog, cli, tmpdir, datafiles, use_remote,
                             ignore_project):
    project = str(datafiles)
    caplog.set_level(1)

    with create_artifact_share(os.path.join(
            str(tmpdir),
            "artifactshare1")) as shareuser, create_artifact_share(
                os.path.join(
                    str(tmpdir),
                    "artifactshare2")) as shareproject, create_artifact_share(
                        os.path.join(str(tmpdir),
                                     "artifactshare3")) as sharecli:

        # Add shareproject repo url to project.conf
        with open(os.path.join(project, "project.conf"), "a") as projconf:
            projconf.write("artifacts:\n- url: {}\n  push: True".format(
                shareproject.repo))

        # Configure shareuser remote in user conf
        cli.configure({
            "artifacts": {
                "servers": [{
                    "url": shareuser.repo,
                    "push": True
                }]
            }
        })

        args = ["build", "target.bst"]
        if use_remote:
            args += ["--artifact-remote", sharecli.repo]
        if ignore_project:
            args += ["--ignore-project-artifact-remotes"]

        result = cli.run(project=project, args=args)

        # Artifacts should have only been pushed to sharecli, as that was provided via the cli
        result.assert_success()
        all_elements = ["target.bst", "import-bin.bst", "compose-all.bst"]
        for element_name in all_elements:
            assert element_name in result.get_pushed_elements()

            # Test shared state of project recommended cache depending
            # on whether we decided to ignore project suggestions.
            #
            if ignore_project:
                assert_not_shared(cli, shareproject, project, element_name)
            else:
                assert_shared(cli, shareproject, project, element_name)

            # If we specified a remote on the command line, this replaces any remotes
            # specified in user configuration.
            #
            if use_remote:
                assert_not_shared(cli, shareuser, project, element_name)
                assert_shared(cli, sharecli, project, element_name)
            else:
                assert_shared(cli, shareuser, project, element_name)
                assert_not_shared(cli, sharecli, project, element_name)
Esempio n. 17
0
def test_push_fails_with_on_error_continue(cli, tmpdir, datafiles):
    project = str(datafiles)

    # Set up the share
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare")) as share:

        # First build the target (and its deps)
        result = cli.run(project=project, args=["build", "target.bst"])
        assert cli.get_element_state(project, "target.bst") == "cached"
        assert cli.get_element_state(project, "import-dev.bst") == "cached"

        # Now delete the artifact of a dependency and ensure it is not in the cache
        result = cli.run(project=project, args=["artifact", "delete", "import-dev.bst"])
        assert cli.get_element_state(project, "import-dev.bst") != "cached"

        # Configure bst to be able to push to the share
        cli.configure({"artifacts": [{"url": share.repo, "push": True},]})

        # Now try and push the target with its deps using --on-error continue
        # and assert that push failed, but what could be pushed was pushed
        result = cli.run(
            project=project, args=["--on-error=continue", "artifact", "push", "--deps", "all", "target.bst"]
        )

        # The overall process should return as failed
        result.assert_main_error(ErrorDomain.STREAM, None)

        # We should still have pushed what we could
        assert_shared(cli, share, project, "import-bin.bst")
        assert_shared(cli, share, project, "compose-all.bst")
        assert_shared(cli, share, project, "target.bst")

        assert_not_shared(cli, share, project, "import-dev.bst")

        assert "Push failed: import-dev.bst is not cached" in result.stderr
Esempio n. 18
0
def test_push(cli, tmpdir, datafiles):
    project = str(datafiles)

    # First build the project without the artifact cache configured
    result = cli.run(project=project, args=["build", "target.bst"])
    result.assert_success()

    # Assert that we are now cached locally
    assert cli.get_element_state(project, "target.bst") == "cached"

    # Set up two artifact shares.
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare1")) as share1:

        with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:

            # Try pushing with no remotes configured. This should fail.
            result = cli.run(project=project, args=["artifact", "push", "target.bst"])
            result.assert_main_error(ErrorDomain.STREAM, None)

            # Configure bst to pull but not push from a cache and run `bst artifact push`.
            # This should also fail.
            cli.configure({"artifacts": {"servers": [{"url": share1.repo, "push": False}]}})
            result = cli.run(project=project, args=["artifact", "push", "target.bst"])
            result.assert_main_error(ErrorDomain.STREAM, None)

            # Configure bst to push to one of the caches and run `bst artifact push`. This works.
            cli.configure(
                {"artifacts": {"servers": [{"url": share1.repo, "push": False}, {"url": share2.repo, "push": True},]}}
            )
            cli.run(project=project, args=["artifact", "push", "target.bst"])

            assert_not_shared(cli, share1, project, "target.bst")
            assert_shared(cli, share2, project, "target.bst")

        # Now try pushing to both

        with create_artifact_share(os.path.join(str(tmpdir), "artifactshare2")) as share2:
            cli.configure(
                {"artifacts": {"servers": [{"url": share1.repo, "push": True}, {"url": share2.repo, "push": True},]}}
            )
            cli.run(project=project, args=["artifact", "push", "target.bst"])

            assert_shared(cli, share1, project, "target.bst")
            assert_shared(cli, share2, project, "target.bst")
Esempio n. 19
0
def test_ignore_junction_remotes(cli, tmpdir, datafiles):
    project = os.path.join(str(datafiles), "parent")
    base_project = os.path.join(str(project), "base")

    # Load the junction element
    junction_element = os.path.join(project, "base.bst")
    junction_data = _yaml.roundtrip_load(junction_element)

    with create_artifact_share(
            os.path.join(
                str(tmpdir),
                "artifactshare-parent")) as share, create_artifact_share(
                    os.path.join(str(tmpdir),
                                 "artifactshare-base")) as base_share:

        # Immediately declare the artifact caches in the appropriate project configs
        project_set_artifacts(project, share.repo)
        project_set_artifacts(base_project, base_share.repo)

        # Build and populate the project remotes with their respective elements
        result = cli.run(project=project, args=["build", "target.bst"])
        assert result.exit_code == 0

        # And finally assert that the artifacts are in the right shares
        #
        # The parent project's cache should only contain project elements
        assert_shared(cli, share, project, "target.bst", project_name="parent")
        assert_shared(cli, share, project, "app.bst", project_name="parent")
        assert_not_shared(cli,
                          share,
                          base_project,
                          "base-element.bst",
                          project_name="base")

        # The junction project's cache should only contain elements in the junction project
        assert_not_shared(cli,
                          base_share,
                          project,
                          "target.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          project,
                          "app.bst",
                          project_name="parent")
        assert_shared(cli,
                      base_share,
                      base_project,
                      "base-element.bst",
                      project_name="base")

        # Ensure that, from now on, we ignore junction element remotes
        junction_data["config"] = {"ignore-junction-remotes": True}
        _yaml.roundtrip_dump(junction_data, junction_element)

        # Now delete everything from the local cache and try to
        # redownload from the shares.
        #
        cas = os.path.join(cli.directory, "cas")
        shutil.rmtree(cas)
        artifact_dir = os.path.join(cli.directory, "artifacts")
        shutil.rmtree(artifact_dir)

        # Assert that nothing is cached locally anymore
        state = cli.get_element_state(project, "target.bst")
        assert state != "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state != "cached"

        # Now try bst artifact pull
        result = cli.run(
            project=project,
            args=["artifact", "pull", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And assert that they are again in the local cache, without having built
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        # We shouldn't be able to download base-element!
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state != "cached"
Esempio n. 20
0
def test_recently_pulled_artifact_does_not_expire(cli, datafiles, tmpdir):
    project = str(datafiles)
    element_path = "elements"

    # The artifact expiry logic relies on mtime changes, in real life second precision
    # should be enough for this to work almost all the time, but test cases happen very
    # quickly, resulting in all artifacts having the same mtime.
    #
    # This test requires subsecond mtime to be reliable.
    #
    if not have_subsecond_mtime(project):
        pytest.skip(
            "Filesystem does not support subsecond mtime precision: {}".format(
                project))

    # Create an artifact share (remote cache) in tmpdir/artifactshare
    # Set a 22 MB quota
    with create_artifact_share(os.path.join(str(tmpdir), "artifactshare"),
                               quota=int(22e6)) as share:

        # Configure bst to push to the cache
        cli.configure(
            {"artifacts": {
                "servers": [{
                    "url": share.repo,
                    "push": True
                }],
            }})

        # Create and build 2 elements, one 5 MB and one 15 MB.
        create_element_size("element1.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element1.bst"])
        result.assert_success()

        create_element_size("element2.bst", project, element_path, [],
                            int(15e6))
        result = cli.run(project=project, args=["build", "element2.bst"])
        result.assert_success()

        # Ensure they are cached locally
        states = cli.get_element_states(project,
                                        ["element1.bst", "element2.bst"])
        assert states == {
            "element1.bst": "cached",
            "element2.bst": "cached",
        }

        # Ensure that they have  been pushed to the cache
        assert_shared(cli, share, project, "element1.bst")
        assert_shared(cli, share, project, "element2.bst")

        # Pull the element1 from the remote cache (this should update its mtime).
        # Use a separate local cache for this to ensure the complete element is pulled.
        cli2_path = os.path.join(str(tmpdir), "cli2")
        cli2 = Cli(cli2_path)
        result = cli2.run(project=project,
                          args=[
                              "artifact", "pull", "element1.bst",
                              "--artifact-remote", share.repo
                          ])
        result.assert_success()

        # Ensure element1 is cached locally
        assert cli2.get_element_state(project, "element1.bst") == "cached"

        wait_for_cache_granularity()

        # Create and build the element3 (of 5 MB)
        create_element_size("element3.bst", project, element_path, [],
                            int(5e6))
        result = cli.run(project=project, args=["build", "element3.bst"])
        result.assert_success()

        # Make sure it's cached locally and remotely
        assert cli.get_element_state(project, "element3.bst") == "cached"
        assert_shared(cli, share, project, "element3.bst")

        # Ensure that element2 was deleted from the share and element1 remains
        assert_not_shared(cli, share, project, "element2.bst")
        assert_shared(cli, share, project, "element1.bst")
Esempio n. 21
0
def test_caching_elements_ignoring_remotes(cli, tmpdir, datafiles):
    project = os.path.join(str(datafiles), "parent")
    base_project = os.path.join(str(project), "base")

    # Load the junction element
    junction_element = os.path.join(project, "base.bst")
    junction_data = _yaml.roundtrip_load(junction_element)

    # Configure to push everything to the project's remote and nothing to the junction's
    junction_data["config"] = {
        "cache-junction-elements": True,
        "ignore-junction-remotes": True
    }
    _yaml.roundtrip_dump(junction_data, junction_element)

    with create_artifact_share(
            os.path.join(
                str(tmpdir),
                "artifactshare-parent")) as share, create_artifact_share(
                    os.path.join(str(tmpdir),
                                 "artifactshare-base")) as base_share:

        # First build it without the artifact cache configured
        result = cli.run(project=project, args=["build", "target.bst"])
        assert result.exit_code == 0

        # Assert that we are now cached locally
        state = cli.get_element_state(project, "target.bst")
        assert state == "cached"
        state = cli.get_element_state(base_project, "base-element.bst")
        assert state == "cached"

        project_set_artifacts(project, share.repo)
        project_set_artifacts(base_project, base_share.repo)

        # Push to the remote(s))
        result = cli.run(
            project=project,
            args=["artifact", "push", "--deps", "all", "target.bst"])
        assert result.exit_code == 0

        # And finally assert that the artifacts are in the right shares
        #
        # The parent project's cache should *also* contain elements from the junction
        assert_shared(cli, share, project, "target.bst", project_name="parent")
        assert_shared(cli, share, project, "app.bst", project_name="parent")
        assert_shared(cli,
                      share,
                      base_project,
                      "base-element.bst",
                      project_name="base")

        # The junction project's cache should be empty
        assert_not_shared(cli,
                          base_share,
                          project,
                          "target.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          project,
                          "app.bst",
                          project_name="parent")
        assert_not_shared(cli,
                          base_share,
                          base_project,
                          "base-element.bst",
                          project_name="base")