def test_remote_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) result = cli.run(project=project, args=["build", element_name]) result.assert_success() result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_buildtree_remote(cli, tmpdir, datafiles): project = str(datafiles) element_name = "build-shell/buildtree.bst" share_path = os.path.join(str(tmpdir), "share") services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) with create_artifact_share(share_path) as share: cli.configure({ "artifacts": { "servers": [{ "url": share.repo, "push": True }] }, "cache": { "pull-buildtrees": False } }) res = cli.run( project=project, args=["--cache-buildtrees", "always", "build", element_name]) res.assert_success() # remove local cache shutil.rmtree(os.path.join(str(tmpdir), "cache", "cas")) shutil.rmtree(os.path.join(str(tmpdir), "cache", "artifacts")) # pull without buildtree res = cli.run(project=project, args=["artifact", "pull", "--deps", "all", element_name]) res.assert_success() # check shell doesn't work res = cli.run( project=project, args=["shell", "--build", element_name, "--", "cat", "test"]) res.assert_shell_error() # pull with buildtree res = cli.run(project=project, args=[ "--pull-buildtrees", "artifact", "pull", "--deps", "all", element_name ]) res.assert_success() # check it works this time res = cli.run(project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "--", "cat", "test" ]) res.assert_success() assert "Hi" in res.output
def test_remote_autotools_build(cli, datafiles, remote_services): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # Enable remote cache and remove explicit remote execution CAS configuration. config_without_remote_cache = copy.deepcopy(cli.config) cli.configure({ "cache": { "storage-service": { "url": remote_services.storage_service } } }) del cli.config["remote-execution"]["storage-service"] config_with_remote_cache = cli.config # Build element with remote execution. result = cli.run(project=project, args=["build", element_name]) result.assert_success() # Attempt checkout from local cache by temporarily disabling remote cache. # This should fail as the build result shouldn't have been downloaded to the local cache. cli.config = config_without_remote_cache result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_main_error(ErrorDomain.STREAM, "uncached-checkout-attempt") cli.config = config_with_remote_cache # Attempt checkout again with remote cache. result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], )
def test_remote_autotools_run(cli, datafiles): project = str(datafiles) element_name = "autotools/amhello.bst" services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) services = cli.ensure_services() result = cli.run(project=project, args=["build", element_name]) result.assert_success() result = cli.run(project=project, args=["shell", element_name, "/usr/bin/hello"]) result.assert_success() assert result.output == "Hello World!\nThis is amhello 1.0.\n"
def get_timemark(cli, project, element_name, marker): result = cli.run( project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "--", "cat", marker[1:] ], ) result.assert_success() marker_time = int(result.output) return marker_time
def test_build_remote_failure(cli, datafiles): project = str(datafiles) element_path = os.path.join(project, "elements", "element.bst") checkout_path = os.path.join(cli.directory, "checkout") # Write out our test target element = { "kind": "script", "depends": [ { "filename": "base.bst", "type": "build", }, ], "config": { "commands": [ "touch %{install-root}/foo", "false", ], }, } _yaml.roundtrip_dump(element, element_path) services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # Try to build it, this should result in a failure that contains the content result = cli.run(project=project, args=["build", "element.bst"]) result.assert_main_error(ErrorDomain.STREAM, None) result = cli.run(project=project, args=[ "artifact", "checkout", "element.bst", "--directory", checkout_path ]) result.assert_success() # check that the file created before the failure exists filename = os.path.join(checkout_path, "foo") assert os.path.isfile(filename)
def test_remote_autotools_build(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" result = cli.run(project=project, args=["build", element_name]) result.assert_success() assert element_name in result.get_pushed_elements() result = cli.run( project=project, args=["artifact", "checkout", element_name, "--directory", checkout]) result.assert_success() assert_contains( checkout, [ "/usr", "/usr/lib", "/usr/bin", "/usr/share", "/usr/bin/hello", "/usr/share/doc", "/usr/share/doc/amhello", "/usr/share/doc/amhello/README", ], ) # then remove it locally result = cli.run(project=project, args=["artifact", "delete", element_name]) result.assert_success() result = cli.run(project=project, args=["build", element_name]) result.assert_success() assert element_name in result.get_pulled_elements()
def test_remote_autotools_build_no_cache(cli, datafiles): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") element_name = "autotools/amhello.bst" cli.configure({ "artifacts": { "servers": [{ "url": "http://fake.url.service", "push": True }] } }) result = cli.run(project=project, args=["build", element_name]) result.assert_success() assert """WARNING Failed to initialize remote""" in result.stderr assert """Remote initialisation failed with status UNAVAILABLE: DNS resolution failed""" in result.stderr
def test_workspace_build(cli, tmpdir, datafiles, modification): project = str(datafiles) checkout = os.path.join(cli.directory, "checkout") workspace = os.path.join(cli.directory, "workspace") element_name = "autotools/amhello.bst" # cli args artifact_checkout = [ "artifact", "checkout", element_name, "--directory", checkout ] build = ["--cache-buildtrees", "always", "build", element_name] input_files, generated_files, artifacts = files() services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # open a workspace for the element in the workspace directory result = cli.run( project=project, args=["workspace", "open", "--directory", workspace, element_name]) result.assert_success() # check that the workspace path exists assert os.path.exists(workspace) # add a file (asserting later that this is in the buildtree) newfile = "newfile.cfg" newfile_path = os.path.join(workspace, newfile) with open(newfile_path, "w", encoding="utf-8") as fdata: fdata.write("somestring") input_files.append(os.sep + newfile) # check that the workspace *only* contains the expected input files assert_contains(workspace, input_files, strict=True) # save the mtimes for later comparison ws_times = get_mtimes(workspace) # build the element and cache the buildtree result = cli.run(project=project, args=build) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" build_key = cli.get_element_key(project, element_name) # check that the local workspace is unchanged assert_contains(workspace, input_files, strict=True) assert ws_times == get_mtimes(workspace) # check modified workspace dir was cached and save the time # build was run. Incremental build conditions do not apply since the workspace # was initially opened using magic timestamps. build_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False) build_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) # check that the artifacts are available result = cli.run(project=project, args=artifact_checkout) result.assert_success() assert_contains(checkout, artifacts) shutil.rmtree(checkout) # rebuild the element result = cli.run(project=project, args=build) result.assert_success() assert cli.get_element_state(project, element_name) == "cached" rebuild_key = cli.get_element_key(project, element_name) assert rebuild_key == build_key rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=False) rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) # buildmark time should be the same assert build_timemark == rebuild_timemark assert all( rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format( rebuild_times, build_times) # modify the open workspace and rebuild main_path = os.path.join(workspace, MAIN) assert os.path.exists(main_path) if modification == "time": # touch a file in the workspace and save the mtime os.utime(main_path) touched_time = int(os.stat(main_path).st_mtime) elif modification == "content": # change a source file (there's a race here but it's not serious) with open(main_path, "r", encoding="utf-8") as fdata: data = fdata.readlines() with open(main_path, "w", encoding="utf-8") as fdata: for line in data: fdata.write(re.sub(r"Hello", "Goodbye", line)) touched_time = int(os.stat(main_path).st_mtime) # refresh input times ws_times = get_mtimes(workspace) # rebuild the element result = cli.run(project=project, args=build) result.assert_success() rebuild_times = check_buildtree(cli, project, element_name, input_files, generated_files, incremental=True) rebuild_timemark = get_timemark(cli, project, element_name, (os.sep + BLDMARK)) assert rebuild_timemark > build_timemark # check the times of the changed files assert rebuild_times[os.sep + MAIN] == touched_time del rebuild_times[os.sep + MAIN] del rebuild_times[os.sep + MAINO] del rebuild_times[os.sep + SRC + os.sep + "hello"] del rebuild_times[os.sep + DEPS + os.sep + "main.Po"] del rebuild_times[os.sep + BLDMARK] # check the times of the unmodified files assert all( rebuild_time == build_times[fname] for fname, rebuild_time in rebuild_times.items()), "{}\n{}".format( rebuild_times, build_times) # Check workspace is unchanged assert_contains(workspace, input_files, strict=True) assert ws_times == get_mtimes(workspace)
def check_buildtree( cli, project, element_name, input_files, generated_files, incremental=False, ): # check modified workspace dir was cached # - generated files are present # - generated files are newer than inputs # - check the date recorded in the marker file # - check that the touched file mtime is preserved from before assert cli and project and element_name and input_files and generated_files result = cli.run( project=project, args=[ "shell", "--build", element_name, "--use-buildtree", "--", "find", ".", "-mindepth", "1", "-exec", "stat", "-c", "%n::%Y", "{}", ";", ], ) result.assert_success() buildtree = {} output = result.output.splitlines() typ_inptime = None typ_gentime = None for line in output: assert "::" in line fname, mtime = line.split("::") # remove the symbolic dir fname = fname[1:] mtime = int(mtime) buildtree[fname] = mtime if incremental: # directory timestamps are not meaningful if fname in DIRS: continue if fname in input_files: if fname != os.sep + MAIN and not typ_inptime: typ_inptime = mtime if fname in generated_files: if fname != os.sep + MAINO and not typ_gentime: typ_gentime = mtime # all expected files should have been found for filename in input_files + generated_files: assert filename in buildtree if incremental: # the source file was changed so should be more recent than other input files # it should be older than the main object. # The main object should be more recent than generated files. assert buildtree[os.sep + MAIN] > typ_inptime assert buildtree[os.sep + MAINO] > buildtree[os.sep + MAIN] assert buildtree[os.sep + MAINO] > typ_gentime for fname in DIRS: del buildtree[fname] return buildtree
def test_junction_build_remote(cli, tmpdir, datafiles): project = str(datafiles) subproject_path = os.path.join(project, "files", "sub-project") subproject_element_path = os.path.join(subproject_path, "elements") amhello_files_path = os.path.join(subproject_path, "files") element_path = os.path.join(project, "elements") junction_path = os.path.join(element_path, "junction.bst") # We need a repo for real trackable elements repo = create_repo("tar", str(tmpdir)) ref = repo.create(amhello_files_path) # ensure that the correct project directory is also listed in the junction subproject_conf = os.path.join(subproject_path, "project.conf") with open(subproject_conf, encoding="utf-8") as f: config = f.read() config = config.format(project_dir=subproject_path) with open(subproject_conf, "w", encoding="utf-8") as f: f.write(config) # Create a trackable element to depend on the cross junction element, # this one has it's ref resolved already create_element(repo, "sub-target.bst", subproject_element_path, ["autotools/amhello.bst"], ref=ref) # Create a trackable element to depend on the cross junction element create_element(repo, "target.bst", element_path, [{ "junction": "junction.bst", "filename": "sub-target.bst" }]) # Create a repo to hold the subproject and generate a junction element for it generate_junction(tmpdir, subproject_path, junction_path, store_ref=False) # Now create a compose element at the top level element = { "kind": "compose", "depends": [{ "filename": "target.bst", "type": "build" }] } _yaml.roundtrip_dump(element, os.path.join(element_path, "composed.bst")) # We're doing remote execution so ensure services are available services = cli.ensure_services() assert set(services) == set(["action-cache", "execution", "storage"]) # track the junction first to ensure we have refs result = cli.run(project=project, args=["source", "track", "junction.bst"]) result.assert_success() # track target to ensure we have refs result = cli.run(project=project, args=["source", "track", "--deps", "all", "composed.bst"]) result.assert_success() # build result = cli.run(project=project, silent=True, args=["build", "composed.bst"]) result.assert_success() # Assert that the main target is cached as a result assert cli.get_element_state(project, "composed.bst") == "cached"