def callback( *, name: Optional[str] = None, deps: Sequence[str] = (), impl: Callable = lambda _: None, out: Union[str, Sequence[str]] = (), do_not_symlink: bool = False, do_not_cache: bool = False, ): build_root = make_callback.build_root if build_root is None: raise BuildException( "Rules files can only define functions, not invoke callback()" ) if isinstance(out, str): out = [out] def wrapped_impl(ctx): ctx.add_deps(deps) return impl(ctx) rule = Rule( name=name, location=build_root, impl=wrapped_impl, outputs=[normalize_path(build_root, output) for output in out], do_not_symlink=do_not_symlink, do_not_cache=do_not_cache, ) for output in rule.outputs: add_target_rule(output, rule) if name is not None: add_target_rule(":" + name, rule) return f":{name}"
def callback( *, name: Optional[str] = None, deps: Sequence[str] = (), impl: Callable, out: Union[str, Sequence[str]] = (), do_not_symlink: bool = False, ): build_root = make_callback.build_root if build_root is None: raise BuildException( "Rules files can only define functions, not invoke callback()") if isinstance(out, str): out = [out] rule = Rule( name=name, location=build_root, deps=[ dep if dep.startswith(":") else normalize_path( repo_root, build_root, dep) for dep in deps ], impl=impl, outputs=[ normalize_path(repo_root, build_root, output) for output in out ], do_not_symlink=do_not_symlink, ) for output in rule.outputs: add_target_rule(output, rule) if name is not None: add_target_rule(":" + name, rule) return f":{name}"
def build( build_state: BuildState, rule: Rule, deps: Collection[str], *, scratch_path: Optional[Path], ): """ All the dependencies that can be determined from caches have been obtained. Now we need to run. Either we will successfully finish everything, or we will get a missing dependency and have to requeue """ cache_memorize, _ = make_cache_memorize(build_state.cache_directory) in_sandbox = scratch_path is not None loaded_deps = set() def load_deps(deps): deps = set(deps) - loaded_deps # check that these deps are built! Since they have not been checked by the PreviewExecution. missing_deps = [] for dep in deps: if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup( build_state, dep) if dep_rule not in build_state.ready: missing_deps.append(dep) if missing_deps: raise MissingDependency(*missing_deps) loaded_deps.update(deps) if in_sandbox: log(f"Loading dependencies {deps} into sandbox") copy_helper( src_root=build_state.repo_root, dest_root=scratch_path, src_names=[dep for dep in deps if not dep.startswith(":")], symlink=not rule.do_not_symlink, ) load_deps(deps) hashstate = HashState() ctx = ExecutionContext( scratch_path if in_sandbox else build_state.repo_root, scratch_path.joinpath(rule.location) if in_sandbox else Path(build_state.repo_root).joinpath(rule.location), hashstate, load_deps, cache_memorize, ) for dep in rule.deps: dep_rule = build_state.target_rule_lookup.try_lookup(dep) if dep.startswith(":"): setattr(ctx.deps, dep[1:], dep_rule.provided_value) else: hashstate.update(dep.encode("utf-8")) hashstate.update(hash_file(dep)) if dep not in build_state.source_files: ctx.deps[dep] = dep_rule.provided_value try: rule.provided_value = rule.impl(ctx) for out in rule.outputs: # needed so that if we ask for another output, we don't panic if it's not in the cache hashstate.record(out) if in_sandbox: ctx.run_shell_queue() except CalledProcessError as e: raise BuildException("".join([ str(e) + "\n", Style.RESET_ALL, f"Location: {scratch_path}\n", f"Working Directory: {ctx.cwd}\n", e.stdout.decode("utf-8"), e.stderr.decode("utf-8"), traceback.format_exc(), ])) if in_sandbox: try: copy_helper( src_root=scratch_path, src_names=rule.outputs, dest_root=build_state.repo_root, ) except FileNotFoundError as e: raise BuildException( f"Output file {e.filename} from rule {rule} was not generated." ) for input_path in ctx.inputs: if input_path.startswith(":"): # don't hash rule deps continue hashstate.update(input_path.encode("utf-8")) hashstate.update(hash_file(input_path)) return hashstate.state()
def get_deps(build_state: BuildState, rule: Rule): """ Use static dependencies and caches to try and identify as *many* needed dependencies as possible, without *any* spurious dependencies. """ hashstate = HashState() cache_fetcher, _ = make_cache_fetcher(build_state.cache_directory) dep_fetcher = make_dep_fetcher(build_state) ctx = PreviewContext( build_state.repo_root, rule.location, hashstate, dep_fetcher, cache_fetcher, ) log(f"Looking for static dependencies of {rule}") for dep in rule.deps: if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup(build_state, dep) if dep_rule not in build_state.ready: log(f"Static dependency {dep} of {dep_rule} is not ready, skipping impl" ) # static deps are not yet ready break ctx.deps[dep] = dep_rule.provided_value if dep.startswith(":"): setattr(ctx.deps, dep[1:], dep_rule.provided_value) continue hashstate.update(dep.encode("utf-8")) try: hashstate.update(dep_fetcher(dep, get_hash=True)) except MissingDependency: # get static deps before running the impl! # this means that a source file is *missing*, but the error will be thrown in enqueue_deps break else: ok = False try: log(f"Running impl of {rule} to discover dynamic dependencies") rule.provided_value = rule.impl(ctx) log(f"Impl of {rule} completed with discovered deps: {ctx.inputs}") for out in rule.outputs: # needed so that if we ask for another output, we don't panic if it's not in the cache hashstate.record(out) ok = True except CacheMiss: log(f"Cache miss while running impl of {rule}") pass # stops context execution except MissingDependency as e: log(f"Dependencies {e.paths} were unavailable while running impl of {rule}" ) pass # dep already added to ctx.inputs except Exception as e: print( "Error occurred during PreviewExecution. This may be normal, if a cached file that has not " "yet been reported / processed has been changed. However, it may also be an internal error, so " "it is being logged here. If it is an internal error, please contact the maintainer." ) print(repr(e)) # if `ok`, hash loaded dynamic dependencies if ok: log(f"Runtime dependencies resolved for {rule}, now checking dynamic dependencies" ) for input_path in ctx.inputs: if input_path.startswith(":"): if input_path not in build_state.ready: ok = False log(f"Dynamic rule dependency {input_path} is not yet ready" ) break else: hashstate.update(input_path.encode("utf-8")) try: data = dep_fetcher(input_path, get_hash=True) except MissingDependency as e: # this dependency was not needed for deps calculation # but is not verified to be up-to-date ok = False log(f"Dynamic dependencies {e.paths} were not needed for the impl, but are not up to date" ) break else: hashstate.update(data) return ( hashstate.state() if ok else None, ctx.inputs + rule.deps, ) return None, rule.deps
def build( build_state: BuildState, rule: Rule, *, precomputed_deps: Optional[List[str]] = None, scratch_path: Optional[Path], skip_cache_key: bool, ): """ All the dependencies that can be determined from caches have been obtained. Now we need to run. Either we will successfully finish everything, or we will get a missing dependency and have to requeue """ cache_store_string, _ = make_cache_store(build_state.cache_directory) in_sandbox = scratch_path is not None loaded_deps = set() def dep_fetcher(dep, *, initial_load=False): if dep not in loaded_deps and in_sandbox: if not initial_load: raise BuildException( f"New dep {dep} found when rerunning rule, it's likely not deterministic!" ) if not dep.startswith(":"): log(f"Loading dependency {dep} into sandbox") copy_helper( src_root=os.curdir, dest_root=scratch_path, src_names=[dep], symlink=not rule.do_not_symlink, ) # check that these deps are built! Since they may not have been checked by the PreviewExecution. dep_rule = None if dep not in build_state.source_files: dep_rule = build_state.target_rule_lookup.lookup(build_state, dep) if dep_rule not in build_state.ready: raise MissingDependency(dep) loaded_deps.add(dep) return dep_rule if precomputed_deps: assert in_sandbox for dep in precomputed_deps: dep_fetcher(dep, initial_load=True) hashstate = HashState() ctx = ExecutionContext( scratch_path if in_sandbox else os.curdir, rule.location, build_state.macros, hashstate, dep_fetcher, cache_store_string, ) try: if not skip_cache_key: for out in rule.outputs: # needed so that if we ask for another output, we don't panic if it's not in the cache hashstate.record(out) provided_value = rule.impl(ctx) if ctx.out_of_date_deps: raise MissingDependency(*ctx.out_of_date_deps) if in_sandbox: ctx.run_shell_queue() except CalledProcessError as e: raise BuildException( "".join( [ str(e) + "\n", Style.RESET_ALL, f"Location: {scratch_path}\n", f"Working Directory: {scratch_path}/{ctx.cwd}\n", e.stdout.decode("utf-8"), e.stderr.decode("utf-8"), ] ) ) if in_sandbox: try: copy_helper( src_root=scratch_path, src_names=rule.outputs, dest_root=os.curdir, ) except FileNotFoundError as e: raise BuildException( f"Output file {e.filename} from rule {rule} was not generated." ) if not skip_cache_key: for input_path in ctx.inputs: if input_path.startswith(":"): # don't hash rule deps continue hashstate.update(input_path.encode("utf-8")) hashstate.update(hash_file(input_path)) hashstate.record("done") return provided_value, hashstate.state() if not skip_cache_key else None
def get_deps(build_state: BuildState, rule: Rule, *, skip_cache_key: bool): """ Use static dependencies and caches to try and identify as *many* needed dependencies as possible, without *any* spurious dependencies. """ hashstate = HashState() cache_load_string, _ = make_cache_load(build_state.cache_directory) dep_fetcher = make_dep_fetcher(build_state) ctx = PreviewContext( rule.location, build_state.macros, hashstate, dep_fetcher, cache_load_string, ) ok = False provided_value = None try: log(f"Running impl of {rule} to discover dependencies") if not skip_cache_key: for out in rule.outputs: # needed so that if we ask for another output, we don't panic if it's not in the cache hashstate.record(out) provided_value = rule.impl(ctx) log(f"Impl of {rule} completed with deps: {ctx.inputs}") ok = True except CacheMiss: log(f"Cache miss while running impl of {rule}") pass # stops context execution except MissingDependency as e: log(f"Dependencies {e.paths} were unavailable while running impl of {rule}" ) pass # dep already added to ctx.inputs except Exception as e: print( "Error occurred during PreviewExecution. This may be normal, if a cached file that has not " "yet been reported / processed has been changed. However, it may also be an internal error, so " "it is being logged here. If it is an internal error, please contact the maintainer." ) print(repr(e)) if not ctx.uses_dynamic_inputs: raise # if `ok`, hash loaded dynamic dependencies if ok: log(f"Inputs and dependencies resolved for {rule}") for input_path in ctx.inputs: if input_path.startswith(":"): input_dep = build_state.target_rule_lookup.try_lookup( input_path) if input_dep is None or input_dep not in build_state.ready: ok = False log(f"Rule dependency {input_path} is not yet ready (or does not exist)" ) break else: if not skip_cache_key: hashstate.update(input_path.encode("utf-8")) try: data = dep_fetcher(input_path, "rule" if skip_cache_key else "hash") except MissingDependency as e: # this dependency was not needed for deps calculation # but is not verified to be up-to-date ok = False log(f"Dependencies {e.paths} were not needed for the impl, but are not up to date" ) break else: if not skip_cache_key: hashstate.update(data) hashstate.record("done") return ( hashstate.state() if ok else None, provided_value, ctx.inputs, ctx.deferred_inputs, ctx.uses_dynamic_inputs, )