def test_tar_build(cli, datafiles): project = str(datafiles) checkout_dir = os.path.join(cli.directory, "tar_checkout") tarpath = os.path.join(checkout_dir, "hello.tar.gz") result = cli.run(project=project, args=["build", "tar-test.bst"]) result.assert_success() result = cli.run( project=project, args=[ "artifact", "checkout", "--directory", checkout_dir, "tar-test.bst", ], ) result.assert_success() assert_contains(checkout_dir, ["/hello.tar.gz"]) tar_hello = tarfile.open(tarpath) contents = tar_hello.getnames() assert contents == ["hello.c"]
def test_remote_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) result = cli.run(project=project, args=["build", element_name]) result.assert_success() result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_autotools_confroot_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhelloconfroot.bst" result = cli.run(project=project, args=["build", element_name]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) assert result.exit_code == 0 assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_remote_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" result = cli.run(project=project, args=["build", element_name]) result.assert_success() assert element_name in result.get_pushed_elements() result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], ) # then remove it locally result = cli.run(project=project, args=["artifact", "delete", element_name]) result.assert_success() result = cli.run(project=project, args=["build", element_name]) result.assert_success() assert element_name in result.get_pulled_elements()
def test_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") # Check that the project can be built correctly. result = cli.run(project=project, args=["build", "hello.bst"]) result.assert_success() result = cli.run( project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" result = cli.run(project=project, args=["build", element_name]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) assert result.exit_code == 0 assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], ) # Check the log result = cli.run(project=project, args=["artifact", "log", element_name]) assert result.exit_code == 0 log = result.output # Verify we get expected output exactly once assert log.count("Making all in src") == 1
def test_collect_integration(cli, datafiles): project = str(datafiles) checkout = os.path.join(project, "checkout") element_name = "collect_integration/collect.bst" result = cli.run(project=project, args=["build", element_name]) result.assert_success() result = cli.run( project=project, args=["artifact", "checkout", "--directory", checkout, element_name], ) result.assert_success() assert_contains(checkout, ["/script.sh"]) with open(os.path.join(checkout, "script.sh"), "r") as f: artifact = f.readlines() expected = [ "#!/bin/sh\n", "set -e\n", "\n", "# integration commands from collect_integration/dep1.bst\n", "foo\n", "\n", "bar\n", "\n", "# integration commands from collect_integration/dep2.bst\n", "baz\n", "\n", "quuz\n", "\n", ] assert artifact == expected
def test_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") workaround_setuptools_bug(project) result = cli.run(project=project, args=["build", "hello.bst"]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout]) assert result.exit_code == 0 assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_pip_source_import_requirements_files(cli, datafiles, setup_pypi_repo): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_path = os.path.join(project, "elements") element_name = "pip/hello.bst" # check that exotically named packages are imported correctly myreqs_packages = "hellolib" dependencies = ["app2", "app.3", "app-4", "app_5", "app.no.6", "app-no-7", "app_no_8"] mock_packages = {myreqs_packages: {package: {} for package in dependencies}} # create mock pypi repository pypi_repo = os.path.join(project, "files", "pypi-repo") os.makedirs(pypi_repo, exist_ok=True) setup_pypi_repo(mock_packages, pypi_repo) element = { "kind": "import", "sources": [ {"kind": "local", "path": "files/pip-source"}, { "kind": "pip", "url": "file://{}".format(os.path.realpath(pypi_repo)), "requirements-files": ["myreqs.txt"], }, ], } os.makedirs(os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True) _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) result = cli.run(project=project, args=["source", "track", element_name]) assert result.exit_code == 0 result = cli.run(project=project, args=["build", element_name]) assert result.exit_code == 0 result = cli.run(project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) assert result.exit_code == 0 assert_contains( checkout, [ "/.bst_pip_downloads", "/.bst_pip_downloads/hellolib-0.1.tar.gz", "/.bst_pip_downloads/app2-0.1.tar.gz", "/.bst_pip_downloads/app.3-0.1.tar.gz", "/.bst_pip_downloads/app-4-0.1.tar.gz", "/.bst_pip_downloads/app_5-0.1.tar.gz", "/.bst_pip_downloads/app.no.6-0.1.tar.gz", "/.bst_pip_downloads/app-no-7-0.1.tar.gz", "/.bst_pip_downloads/app_no_8-0.1.tar.gz", ], )
def test_first_project_build_checkout(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") result = cli.run(project=project, args=["build", "hello.bst"]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", "hello.bst", "--directory", checkout]) assert result.exit_code == 0 assert_contains(checkout, ["/hello.world"])
def test_pip_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_path = os.path.join(project, "elements") element_name = "pip/hello.bst" element = { "kind": "pip", "variables": { "pip": "pip3" }, "depends": [{ "filename": "base.bst" }], "sources": [{ "kind": "tar", "url": "file://{}/files/piphello.tar.xz".format(project), "ref": "ad96570b552498807abec33c06210bf68378d854ced6753b77916c5ed517610d", }], } os.makedirs( os.path.dirname(os.path.join(element_path, element_name)), exist_ok=True, ) _yaml.roundtrip_dump(element, os.path.join(element_path, element_name)) result = cli.run(project=project, args=["build", element_name]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout], ) assert result.exit_code == 0 assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/bin/hello", "/usr/lib/python3.6", ], )
def test_cmake_confroot_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "cmake/cmakeconfroothello.bst" result = cli.run(project=project, args=["build", element_name]) assert result.exit_code == 0 result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout], ) assert result.exit_code == 0 assert_contains(checkout, ["/usr", "/usr/bin", "/usr/bin/hello"])
def test_filter_pass_integration(datafiles, cli): project = str(datafiles) # Passing integration commands should build nicely result = cli.run(project=project, args=["build", "filter/filter.bst"]) result.assert_success() # Checking out the element should work checkout_dir = os.path.join(project, "filter") result = cli.run( project=project, args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "filter/filter.bst"], ) result.assert_success() # Check that the integration command was run assert_contains(checkout_dir, ["/foo"]) shutil.rmtree(checkout_dir)
def test_build_dependency_partial_local_cas(cli, datafiles, pull_artifact_files, build_all): project = str(datafiles) element_name = "no-runtime-deps.bst" builddep_element_name = "autotools/amhello.bst" checkout = os.path.join(cli.directory, "checkout") builddep_checkout = os.path.join(cli.directory, "builddep-checkout") services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # configure pull blobs if build_all: cli.configure({"build": {"dependencies": "all"}}) cli.config["remote-execution"]["pull-artifact-files"] = pull_artifact_files result = cli.run(project=project, args=["build", element_name]) result.assert_success() # Verify artifact is pulled bar files when ensure artifact files is set result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) if pull_artifact_files: result.assert_success() assert_contains(checkout, ["/test"]) else: result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt") # Verify build dependencies is pulled for ALL and BUILD result = cli.run(project=project, args=[ "artifact", "checkout", builddep_element_name, "--directory", builddep_checkout ]) if build_all and pull_artifact_files: result.assert_success() else: result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt")
def test_quilt_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "quilt_checkout") result = cli.run(project=project, args=["build", "quilt-build-test.bst"]) result.assert_success() result = cli.run( project=project, args=[ "artifact", "checkout", "--directory", checkout, "quilt-build-test.bst", ], ) result.assert_success() assert_contains(checkout, ["/patches/series", "/patches/test", "/src/hello.c"])
def test_dpkg_build(cli, datafiles): project = str(datafiles) checkout_dir = os.path.join(cli.directory, "checkout") result = cli.run(project=project, args=["build", "dpkg-build-test.bst"]) result.assert_success() result = cli.run( project=project, args=[ "artifact", "checkout", "--directory", checkout_dir, "dpkg-build-test.bst", ], ) result.assert_success() assert_contains(checkout_dir, ["/usr/share/foo", "/usr/share/doc/test/changelog.gz"])
def test_filter_pass_integration_uncached(datafiles, cli): project = str(datafiles) # Passing integration commands should build nicely result = cli.run(project=project, args=["build", "filter/filter.bst"]) result.assert_success() # Delete the build dependency of the filter element. # The built filter element should be usable even if the build dependency # is not available in the local cache. result = cli.run(project=project, args=["artifact", "delete", "filter/parent.bst"]) result.assert_success() # Checking out the element should work checkout_dir = os.path.join(project, "filter") result = cli.run( project=project, args=["artifact", "checkout", "--integrate", "--directory", checkout_dir, "filter/filter.bst"], ) result.assert_success() # Check that the integration command was run assert_contains(checkout_dir, ["/foo"]) shutil.rmtree(checkout_dir)
def test_gen_ccimports(cli, datafiles): project = str(datafiles) checkout = os.path.join(project, "checkout") prj_prefix = "bazelize-" element_name = "bazelize/imports.bst" build_file = "BUILD" # default build file name # try to build result = cli.run( project=project, args=["build", element_name], ) result.assert_success() # try to checkout result = cli.run( project=project, args=["artifact", "checkout", "--directory", checkout, element_name], ) result.assert_success() # check for existence of the build file assert_contains(checkout, [os.path.sep + build_file]) # format test content to check against the content of the build file # format expected library data def gen_cc_lib(lib_type): libname = "ccimp_" + lib_type lib_map = {"interface": ".ifso", "shared": ".so", "static": ".a"} if lib_type == "multi": lib_ext = [lib_map["interface"], lib_map["shared"]] else: lib_ext = [lib_map[lib_type]] files = get_files(libname, lib_ext, LIB_PREFIX) lib_entries = {} for fname in files: for k, v in lib_map.items(): if fname.endswith(v): lib_entries["{}_library".format(k)] = fname entry = { "rule": "cc_import", "name": prj_prefix + libname, "hdrs": sorted(get_hdrs(libname)), } for k, v in lib_entries.items(): entry[k] = v return entry # nb. current rules are sorted by name field in the plugin expected = [ 'package(default_visibility = ["//visibility:public"])' + os.linesep ] expected.extend( ['load("@rules_cc//cc:defs.bzl", "cc_import")' + os.linesep]) lib_types = sorted(["shared", "static", "multi"]) for lib_type in lib_types: expected += render_entry(gen_cc_lib(lib_type)) with open(os.path.join(checkout, build_file), "r") as fdata: artifact = fdata.readlines() assert artifact == expected
def test_gen_buildrules(cli, datafiles): project = str(datafiles) checkout = os.path.join(project, "checkout") prj_prefix = "bazelize-" element_name = "bazelize/empty.bst" build_file = "BUILD" # default build file name # try to build result = cli.run( project=project, args=["build", element_name], ) result.assert_success() # try to checkout result = cli.run( project=project, args=["artifact", "checkout", "--directory", checkout, element_name], ) result.assert_success() # check for existence of the build file assert_contains(checkout, [os.path.sep + build_file]) # format test content to check against the content of the build file # format expected library data def gen_cc_lib(num): libname = "cclib" + str(num) return { "rule": "cc_library", "name": prj_prefix + libname, "srcs": sorted(get_libs(libname) + get_srcs(libname)), "hdrs": sorted(get_hdrs(libname)), } # format expected binary data # FIXME: bin1_srcs are [glob(app/*)] or ["app/afile.cpp", "app/bfile.c"] # see #6 bin1_deps = [prj_prefix + "cclib2", prj_prefix + "cclib1"] bin1_opts = ["-I/lib/inc", "-I/include/someinc"] bin1_lopts = ["-lboost_thread", "-lboost_system"] bin1 = { "rule": "cc_binary", "name": prj_prefix + "bazelize", "deps": sorted(bin1_deps), "copts": sorted(bin1_opts), "linkopts": sorted(bin1_lopts), } # nb. current rules are sorted by name field in the plugin expected = [ 'package(default_visibility = ["//visibility:public"])' + os.linesep ] expected.extend([ 'load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library")' + os.linesep ]) expected += render_entry(bin1) expected += render_entry(gen_cc_lib(1)) expected += render_entry(gen_cc_lib(2)) with open(os.path.join(checkout, build_file), "r") as fdata: artifact = fdata.readlines() assert artifact == expected
def test_workspace_build(cli, tmpdir, datafiles, modification): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") workspace = os.path.join(cli.directory, "workspace") element_name = "autotools/amhello.bst" # cli args artifact_checkout = ["artifact", "checkout", element_name, "--directory", checkout] build = ["--cache-buildtrees", "always", "build", element_name] input_files, generated_files, artifacts = files() services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # open a workspace for the element in the workspace directory result = cli.run(project=project, args=["workspace", "open", "--directory", workspace, element_name]) result.assert_success() # check that the workspace path exists assert os.path.exists(workspace) # add a file (asserting later that this is in the buildtree) newfile = "newfile.cfg" newfile_path = os.path.join(workspace, newfile) with open(newfile_path, "w", encoding="utf-8") as fdata: fdata.write("somestring") input_files.append(os.sep + newfile) # check that the workspace *only* contains the expected input files assert_contains(workspace, input_files, strict=True) # save the mtimes for later comparison ws_times = get_mtimes(workspace) # build the element and cache the buildtree result = cli.run(project=project, args=build) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" build_key = cli.get_element_key(project, element_name) # check that the local workspace is unchanged assert_contains(workspace, input_files, strict=True) assert ws_times == get_mtimes(workspace) # check modified workspace dir was cached and save the time # build was run. Incremental build conditions do not apply since the workspace # was initially opened using magic timestamps. build_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False) build_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) # check that the artifacts are available result = cli.run(project=project, args=artifact_checkout) result.assert_success() assert_contains(checkout, artifacts) shutil.rmtree(checkout) # rebuild the element result = cli.run(project=project, args=build) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" rebuild_key = cli.get_element_key(project, element_name) assert rebuild_key == build_key rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False) rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) # buildmark time should be the same assert build_timemark == rebuild_timemark assert all(rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format( rebuild_times, build_times ) # modify the open workspace and rebuild main_path = os.path.join(workspace, MAIN) assert os.path.exists(main_path) if modification == "time": # touch a file in the workspace and save the mtime os.utime(main_path) touched_time = int(os.stat(main_path).st_mtime) elif modification == "content": # change a source file (there's a race here but it's not serious) with open(main_path, "r", encoding="utf-8") as fdata: data = fdata.readlines() with open(main_path, "w", encoding="utf-8") as fdata: for line in data: fdata.write(re.sub(r"Hello", "Goodbye", line)) touched_time = int(os.stat(main_path).st_mtime) # refresh input times ws_times = get_mtimes(workspace) # rebuild the element result = cli.run(project=project, args=build) result.assert_success() rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=True) rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) assert rebuild_timemark > build_timemark # check the times of the changed files assert rebuild_times[os.sep + MAIN] == touched_time del rebuild_times[os.sep + MAIN] del rebuild_times[os.sep + MAINO] del rebuild_times[os.sep + SRC + os.sep + "hello"] del rebuild_times[os.sep + DEPS + os.sep + "main.Po"] del rebuild_times[os.sep + BLDMARK] # check the times of the unmodified files assert all(rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format( rebuild_times, build_times ) # Check workspace is unchanged assert_contains(workspace, input_files, strict=True) assert ws_times == get_mtimes(workspace)