def dep_fetcher(dep, *, initial_load=False): if dep not in loaded_deps and in_sandbox: if not initial_load: raise BuildException( f"New dep {dep} found when rerunning rule, it's likely not deterministic!" ) if not dep.startswith(":"): log(f"Loading dependency {dep} into sandbox") copy_helper( src_root=os.curdir, dest_root=scratch_path, src_names=[dep], symlink=not rule.do_not_symlink, ) # check that these deps are built! Since they may not have been checked by the PreviewExecution. dep_rule = None if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup(build_state, dep) if dep_rule not in build_state.ready: raise MissingDependency(dep) loaded_deps.add(dep) return dep_rule
def dep_fetcher(input_path, *, get_hash=False) -> Union[str, bytes]: try: if input_path not in build_state.source_files: rule = build_state.target_rule_lookup.lookup( build_state, input_path) # this input may be stale / unbuilt # if so, do not read it, but instead throw MissingDependency if rule not in build_state.ready: raise MissingDependency(input_path) # so it's already ready for use! if get_hash: return hash_file(input_path) else: with open(input_path) as f: return f.read() except FileNotFoundError: raise MissingDependency(input_path)
def dep_fetcher(dep): if dep.startswith(":"): if dep not in direct_lookup: raise BuildException(f"Unable to find setup rule {dep}") dep_rule = direct_lookup[dep] log(f"Looking up setup rule {dep}") if dep_rule not in ready: raise MissingDependency(dep) return dep_rule
def dep_fetcher(input_path, type: str = "rule"): try: rule = None if input_path not in build_state.source_files: rule = build_state.target_rule_lookup.try_lookup(input_path) # this input may be stale / unbuilt / no longer exists # if so, do not read it, but instead throw MissingDependency if rule is None or rule not in build_state.ready: raise MissingDependency(input_path) # so it's already ready for use! if type == "hash": return hash_file(input_path) elif type == "rule": return rule else: raise Exception(f"Unknown dep type {type}") except FileNotFoundError: raise MissingDependency(input_path)
def input(self, sh: Optional[str] = None, *, env: Env = None): # we want the state *before* running the action state = self.hashstate.state() super().input(sh, env=env) if self.out_of_date_deps: raise MissingDependency(*self.out_of_date_deps) self.run_shell_queue() log("RUNNING", sh) out = run_shell( sh, shell=True, cwd=os.path.join(self.base, self.cwd), capture_output=True, quiet=True, inherit_env=False, env=self.normalize(env), ).decode("utf-8") self.memorize(state, HashState().record(sh, env).state(), out) return out
def load_deps(deps): deps = set(deps) - loaded_deps # check that these deps are built! Since they have not been checked by the PreviewExecution. missing_deps = [] for dep in deps: if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup( build_state, dep) if dep_rule not in build_state.ready: missing_deps.append(dep) if missing_deps: raise MissingDependency(*missing_deps) loaded_deps.update(deps) if in_sandbox: log(f"Loading dependencies {deps} into sandbox") copy_helper( src_root=build_state.repo_root, dest_root=scratch_path, src_names=[dep for dep in deps if not dep.startswith(":")], symlink=not rule.do_not_symlink, )
def initialize_workspace( setup_rule_lookup: TargetLookup, setup_targets: List[str], state_directory: str, quiet: bool, ): # we don't need the indirect lookup as we only have rule and source deps direct_lookup: Dict[str, Rule] = setup_rule_lookup.direct_lookup work_queue = [] for setup_target in setup_targets: if setup_target not in direct_lookup: raise BuildException( f"Unknown or unspecified setup target {setup_target}") work_queue.append(direct_lookup[setup_target]) rebuilt: Set[str] = set() ready: Set[str] = set() cache_load_string, _ = make_cache_load(state_directory) cache_store_string, _ = make_cache_store(state_directory) if work_queue: status_monitor = create_status_monitor(1, quiet) status_monitor.move(total=len(work_queue)) def dep_fetcher(dep): if dep.startswith(":"): if dep not in direct_lookup: raise BuildException(f"Unable to find setup rule {dep}") dep_rule = direct_lookup[dep] log(f"Looking up setup rule {dep}") if dep_rule not in ready: raise MissingDependency(dep) return dep_rule while work_queue: todo = work_queue.pop() log(f"Popping setup rule {todo} off work queue") try: if todo.name is None: raise BuildException( f"All setup rules must have names, but {todo} does not.") hashstate = HashState() ctx = WorkspaceExecutionContext(hashstate, dep_fetcher) unchecked_rules = [] try: todo.set_provided_value( todo.impl(ctx), None, ctx.inputs, ctx.deferred_inputs, [], # todo: implement output providers for setup rules ) if ctx.out_of_date_deps: raise MissingDependency(*ctx.out_of_date_deps) except MissingDependency as e: unchecked_rules = [direct_lookup[x] for x in e.paths] if unchecked_rules: for dep in unchecked_rules: if dep not in work_queue: log(f"Setup rule {todo} is enqueuing {dep}") status_monitor.move(total=1) work_queue.append(dep) else: log(f"Setup rule {todo} is waiting on {dep}, which is already enqueued" ) dep.runtime_dependents.add(todo) todo.pending_rule_dependencies.add(dep) else: log(f"Setup rule {todo} ran with inputs {ctx.inputs + ctx.deferred_inputs}" ) for dep in ctx.inputs + ctx.deferred_inputs: if dep.startswith(":"): continue try: hashstate.record(dep) hashstate.update(hash_file(dep)) except FileNotFoundError: raise BuildException(f"Source file {dep} not found.") try: ok = cache_load_string("workspace", todo.name) == hashstate.state() if not ok: log(f"State mismatch for rule {todo}, need to rerun") except CacheMiss: log(f"State not found for rule {todo}, need to run for first time" ) ok = False for dep in ctx.inputs + ctx.deferred_inputs: if dep.startswith(":"): if direct_lookup[dep] in rebuilt: log(f"Dependency {dep} of setup rule {todo} was rebuilt, so we must rebuild {todo} as well" ) ok = False for out in todo.outputs: if not os.path.exists(out): log(f"Output {out} is missing for setup rule {todo}, forcing rerun" ) ok = False break if not ok: # we need to fully run log(f"Fully running setup rule {todo}") ctx.run_shell_queue() rebuilt.add(todo) cache_store_string("workspace", todo.name, hashstate.state()) # either way, now we can trigger our dependents ready.add(todo) for dep in todo.runtime_dependents: dep.pending_rule_dependencies.remove(todo) if not dep.pending_rule_dependencies: work_queue.append(dep) status_monitor.move(total=1) status_monitor.move(curr=1) except Exception as e: if not isinstance(e, BuildException): suffix = f"\n{Style.RESET_ALL}" + traceback.format_exc() else: suffix = "" status_monitor.stop() raise BuildException(f"Error while executing rule {todo}: " + str(e) + suffix)
def build( build_state: BuildState, rule: Rule, *, precomputed_deps: Optional[List[str]] = None, scratch_path: Optional[Path], skip_cache_key: bool, ): """ All the dependencies that can be determined from caches have been obtained. Now we need to run. Either we will successfully finish everything, or we will get a missing dependency and have to requeue """ cache_store_string, _ = make_cache_store(build_state.cache_directory) in_sandbox = scratch_path is not None loaded_deps = set() def dep_fetcher(dep, *, initial_load=False): if dep not in loaded_deps and in_sandbox: if not initial_load: raise BuildException( f"New dep {dep} found when rerunning rule, it's likely not deterministic!" ) if not dep.startswith(":"): log(f"Loading dependency {dep} into sandbox") copy_helper( src_root=os.curdir, dest_root=scratch_path, src_names=[dep], symlink=not rule.do_not_symlink, ) # check that these deps are built! Since they may not have been checked by the PreviewExecution. dep_rule = None if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup(build_state, dep) if dep_rule not in build_state.ready: raise MissingDependency(dep) loaded_deps.add(dep) return dep_rule if precomputed_deps: assert in_sandbox for dep in precomputed_deps: dep_fetcher(dep, initial_load=True) hashstate = HashState() ctx = ExecutionContext( scratch_path if in_sandbox else os.curdir, rule.location, build_state.macros, hashstate, dep_fetcher, cache_store_string, ) try: if not skip_cache_key: for out in rule.outputs: # needed so that if we ask for another output, we don't panic if it's not in the cache hashstate.record(out) provided_value = rule.impl(ctx) if ctx.out_of_date_deps: raise MissingDependency(*ctx.out_of_date_deps) if in_sandbox: ctx.run_shell_queue() except CalledProcessError as e: raise BuildException( "".join( [ str(e) + "\n", Style.RESET_ALL, f"Location: {scratch_path}\n", f"Working Directory: {scratch_path}/{ctx.cwd}\n", e.stdout.decode("utf-8"), e.stderr.decode("utf-8"), ] ) ) if in_sandbox: try: copy_helper( src_root=scratch_path, src_names=rule.outputs, dest_root=os.curdir, ) except FileNotFoundError as e: raise BuildException( f"Output file {e.filename} from rule {rule} was not generated." ) if not skip_cache_key: for input_path in ctx.inputs: if input_path.startswith(":"): # don't hash rule deps continue hashstate.update(input_path.encode("utf-8")) hashstate.update(hash_file(input_path)) hashstate.record("done") return provided_value, hashstate.state() if not skip_cache_key else None