def display(ctx, task, reverse=None, show_cache=False): """ Display a task and its dependencies visually. """ registry = TaskRegistry.get() gb = graph.GraphBuilder(registry, ctx.obj["manifest"]) dag = gb.build(task, influence=show_cache) options = JoltOptions() acache = cache.ArtifactCache.get(options) if reverse: def iterator(task): return list(dag.predecessors(task)) reverse = utils.as_list(reverse) tasklist = dag.select(lambda graph, node: node.short_qualified_name in reverse or node.qualified_name in reverse) else: def iterator(task): return task.children tasklist = dag.requested_goals if dag.has_tasks(): def _display(task, indent=0, last=None): header = "" if indent > 0: for pipe in last[:-1]: if pipe: header += "\u2502 " else: header += " " if last[-1]: header += "\u251c\u2574" else: header += "\u2514\u2574" if not show_cache: colorize = str elif task.is_cacheable() and not acache.is_available(task): colorize = colors.red else: colorize = colors.green print(header + colorize(task.short_qualified_name)) children = iterator(task) for i in range(0, len(children)): _display(children[i], indent + 1, last=(last or []) + [i + 1 != len(children)]) for task in tasklist: _display(task) else: log.info("no tasks to display")
def clean(ctx, task, deps, expired): """ Delete task artifacts and intermediate files. When run without arguments, this command removes all task artifacts from the local cache, but no intermediate files are removed. When TASK is specified, the task clean() method is invoked to remove any intermediate files still present in persistent build directories. Secondly, the task artifact will be removed from the local cache. Global caches are not affected. The --deps parameter can be used to also clean all dependencies of the specified TASK. By default, task artifacts are removed without considering any artifact expiration metadata. To only remove artifact which have expired, use the --expired parameter. Artifacts typically expire immediately after creation unless explicitly configured not to. """ acache = cache.ArtifactCache.get() if task: task = [utils.stable_task_name(t) for t in task] registry = TaskRegistry.get() dag = graph.GraphBuilder(registry, ctx.obj["manifest"]).build(task) if deps: tasks = dag.tasks else: tasks = dag.goals for task in tasks: task.clean(acache, expired) else: acache.discard_all(expired)
def _list(ctx, task=None, all=False, reverse=None): """ List all tasks, or dependencies of a task. By default, when no TASK is specified, all known task names are listed in alphabetical order. When a TASK is specified, only direct dependencies of that task are listed. Use -a to also list its indirect dependencies. Multiple TASK names are allowed. """ raise_error_if(not task and reverse, "TASK required with --reverse") registry = TaskRegistry.get() if not task: classes = registry.get_task_classes() for task in sorted(classes, key=lambda x: x.name): if task.name: print(task.name) return task = [utils.stable_task_name(t) for t in task] reverse = [utils.stable_task_name(t) for t in utils.as_list(reverse or [])] try: dag = graph.GraphBuilder(registry, ctx.obj["manifest"]).build(task, influence=False) except JoltError as e: raise e except Exception: raise_error( "an exception occurred during task dependency evaluation, see log for details" ) task = reverse or task nodes = dag.select(lambda graph, node: node.short_qualified_name in task or node.qualified_name in task) nodes = list(nodes) iterator = dag.predecessors if reverse else dag.successors tasklist = set() while nodes: node = nodes.pop() for task in iterator(node): if all and task.short_qualified_name not in tasklist: new_node = dag.get_task(task.qualified_name) nodes.append(new_node) tasklist.add(task.short_qualified_name) for task in sorted(list(tasklist)): print(task)
def _export(ctx, task): acache = cache.ArtifactCache.get() task = [utils.stable_task_name(t) for t in task] registry = TaskRegistry.get() executors = scheduler.ExecutorRegistry.get() strategy = scheduler.LocalStrategy(executors, acache) dag = graph.GraphBuilder(registry, ctx.obj["manifest"]) dag = dag.build(task) gp = graph.GraphPruner(strategy) dag = gp.prune(dag) class Export(object): def __init__(self): self.environ = {} self.prepend_environ = {} def setenv(self, name, value): self.environ[name] = value class Context(object): def __init__(self, tasks): self.tasks = tasks self.environ = set() self.exports = {} def add_export(self, task, visitor): self.exports[task] = visitor self.environ.update(set(visitor.environ.keys())) tasks = list( filter(lambda t: t.is_cacheable(), reversed(dag.topological_nodes))) context = Context(tasks) for task in context.tasks: artifact = acache.get_artifact(task) raise_task_error_if( artifact.is_temporary(), task, "Task artifact not found in local cache, build first") visitor = Export() cache.visit_artifact(task, artifact, visitor) context.add_export(task, visitor) script = utils.render("export.sh.template", ctx=context) print(script)
def _list(ctx, task=None, reverse=None): """ List all tasks, or dependencies of a task. """ raise_error_if(not task and reverse, "TASK required with --reverse") registry = TaskRegistry.get() if not task: classes = registry.get_task_classes() classes += registry.get_test_classes() for task in sorted(classes, key=lambda x: x.name): if task.name: print(task.name) return task = [utils.stable_task_name(t) for t in task] reverse = [utils.stable_task_name(t) for t in utils.as_list(reverse or [])] try: dag = graph.GraphBuilder(registry, ctx.obj["manifest"]).build(task, influence=False) except JoltError as e: raise e except Exception: raise_error( "an exception occurred during task dependency evaluation, see log for details" ) task = reverse or task nodes = dag.select(lambda graph, node: node.short_qualified_name in task or node.qualified_name in task) tasklist = set() iterator = dag.predecessors if reverse else dag.successors for node in nodes: for task in iterator(node): tasklist.add(task.short_qualified_name) for task in sorted(list(tasklist)): print(task)
def freeze(ctx, task, default, output, remove): """ Freeze the identity of a task. <WIP> """ manifest = ctx.obj["manifest"] options = JoltOptions(default=default) acache = cache.ArtifactCache.get(options) scheduler.ExecutorRegistry.get(options) registry = TaskRegistry.get() for params in default: registry.set_default_parameters(params) gb = graph.GraphBuilder(registry, manifest) dag = gb.build(task) available_in_cache = [ (t.is_available_locally(acache) or (t.is_available_remotely(acache) and acache.download_enabled()), t) for t in dag.tasks if t.is_cacheable() ] for available, task in available_in_cache: raise_task_error_if( not remove and not available, task, "task artifact is not available in any cache, build it first") for task in dag.tasks: if task.is_resource() or not task.is_cacheable(): continue manifest_task = manifest.find_task(task) if remove and manifest_task: manifest.remove_task(manifest_task) continue if not remove: if not manifest_task: manifest_task = manifest.create_task() manifest_task.name = task.qualified_name manifest_task.identity = task.identity manifest.write(fs.path.join(JoltLoader.get().joltdir, output))
def inspect(ctx, task, influence=False, artifact=False, salt=None): """ View information about a task. This command displays information about a task, such as its class documentation, parameters and their accepted values, requirements, task class origin (file/line), influence attributes, artifact identity, cache status, and more. Default parameter values, if any, are highlighted. """ task_name = task task_cls_name, task_params = utils.parse_task_name(task_name) task_registry = TaskRegistry.get() task = task_registry.get_task_class(task_cls_name) raise_task_error_if(not task, task_name, "no such task") from jolt import inspection print() print(" {0}".format(task.name)) print() if task.__doc__: print(" {0}".format(task.__doc__.strip())) print() print(" Parameters") has_param = False params = { key: getattr(task, key) for key in dir(task) if isinstance(utils.getattr_safe(task, key), Parameter) } for item, param in params.items(): has_param = True print(" {0:<15} {1}".format(item, param.help or "")) if not has_param: print(" None") print() print(" Definition") print(" {0:<15} {1} ({2})".format( "File", fs.path.relpath(inspection.getfile(task), JoltLoader.get().joltdir), inspection.getlineno(task))) print() print(" Requirements") manifest = ctx.obj["manifest"] try: task = task_registry.get_task(task_name, manifest=manifest) for req in sorted( utils.as_list(utils.call_or_return(task, task.requires))): print(" {0}".format(task.tools.expand(req))) if not task.requires: print(" None") print() except Exception as e: log.exception() if "has not been set" in str(e): print(" Unavailable (parameters must be set)") print() return print(" Unavailable (exception during evaluation)") print() return if salt: task.taint = salt if artifact: acache = cache.ArtifactCache.get() builder = graph.GraphBuilder(task_registry, manifest) dag = builder.build([task.qualified_name]) tasks = dag.select(lambda graph, node: node.task is task) assert len(tasks) == 1, "graph produced multiple tasks, one expected" proxy = tasks[0] task = proxy.task print(" Cache") print(" Identity {0}".format(proxy.identity)) if acache.is_available_locally(proxy): with acache.get_artifact(proxy) as artifact: print(" Location {0}".format(artifact.path)) print(" Local True ({0})".format( utils.as_human_size(acache.get_artifact(proxy).get_size()))) else: print(" Local False") print(" Remote {0}".format( acache.is_available_remotely(proxy))) print() if influence: print(" Influence") for string in HashInfluenceRegistry.get().get_strings(task): string = string.split(":", 1) print(" {:<18}{}".format(string[0][10:], string[1].strip()))
def build(ctx, task, network, keep_going, default, local, no_download, no_upload, download, upload, worker, force, salt, copy, debug, result, jobs): """ Build task artifact. TASK is the name of the task to execute. It is optionally followed by a colon and parameter value assignments. Assignments are separated by commas. Example: taskname:param1=value1,param2=value2 Default parameter values can be overridden for any task in the dependency tree with --default. DEFAULT is a qualified task name, just like TASK, but parameter assignments change default values. By default, a task is executed locally and the resulting artifact is stored in the local artifact cache. If an artifact is already available in the cache, no execution takes place. Artifacts are identified with a hash digest, constructed from hashing task attributes. When remote cache providers are configured, artifacts may be downloaded from and/or uploaded to the remote cache as execution progresses. Several options exist to control the behavior, such as --local which disables all remote caches. Distributed task execution is enabled by passing the --network option. Tasks are then distributed to and executed by a pool of workers, if one has been configured. Rebuilds can be forced with either --force or --salt. --force rebuilds the requested task, but not its dependencies. --salt affects the entire dependency tree. Both add an extra attribute to the task hash calculation in order to taint the identity and induce a cache miss. In both cases, existing intermediate files in build directories are removed before execution starts. """ raise_error_if(network and local, "The -n and -l flags are mutually exclusive") raise_error_if(network and debug, "The -g and -n flags are mutually exclusive") raise_error_if( no_download and download, "The --download and --no-download flags are mutually exclusive") raise_error_if( no_upload and upload, "The --upload and --no-upload flags are mutually exclusive") duration = utils.duration() task = list(task) task = [utils.stable_task_name(t) for t in task] if network: _download = config.getboolean("network", "download", True) _upload = config.getboolean("network", "upload", True) else: _download = config.getboolean("jolt", "download", True) _upload = config.getboolean("jolt", "upload", True) if local: _download = False _upload = False else: if no_download: _download = False if no_upload: _upload = False if download: _download = True if upload: _upload = True options = JoltOptions(network=network, local=local, download=_download, upload=_upload, keep_going=keep_going, default=default, worker=worker, debug=debug, salt=salt, jobs=jobs) acache = cache.ArtifactCache.get(options) executors = scheduler.ExecutorRegistry.get(options) if worker: log.set_worker() log.verbose("Local build as a worker") strategy = scheduler.WorkerStrategy(executors, acache) elif network: log.verbose("Distributed build as a user") strategy = scheduler.DistributedStrategy(executors, acache) else: log.verbose("Local build as a user") strategy = scheduler.LocalStrategy(executors, acache) hooks.TaskHookRegistry.get(options) registry = TaskRegistry.get(options) for params in default: registry.set_default_parameters(params) manifest = ctx.obj["manifest"] for mb in manifest.builds: for mt in mb.tasks: task.append(mt.name) for mt in mb.defaults: registry.set_default_parameters(mt.name) if force: for goal in task: registry.get_task(goal, manifest=manifest).taint = uuid.uuid4() gb = graph.GraphBuilder(registry, manifest, options, progress=True) dag = gb.build(task) gp = graph.GraphPruner(strategy) dag = gp.prune(dag) goal_tasks = dag.goals goal_task_duration = 0 queue = scheduler.TaskQueue(strategy) try: if not dag.has_tasks(): return progress = log.progress( "Progress", dag.number_of_tasks(filterfn=lambda t: not t.is_resource()), " tasks", estimates=False, debug=debug) with progress: while dag.has_tasks(): # Find all tasks ready to be executed leafs = dag.select(lambda graph, task: task.is_ready()) # Order the tasks by their weights to improve build times leafs.sort(key=lambda x: x.weight) while leafs: task = leafs.pop() queue.submit(acache, task) task, error = queue.wait() if not task: dag.debug() break elif task.is_goal() and task.duration_running: goal_task_duration += task.duration_running.seconds if not task.is_resource(): progress.update(1) if not keep_going and error is not None: queue.abort() raise error if dag.failed: log.error("List of failed tasks") for failed in dag.failed: log.error("- {}", failed.log_name.strip("()")) raise_error("no more tasks could be executed") for goal in goal_tasks: if acache.is_available_locally(goal): with acache.get_artifact(goal) as artifact: log.info("Location: {0}", artifact.path) if copy: artifact.copy("*", utils.as_dirpath( fs.path.join( workdir, click.format_filename(copy))), symlinks=True) except KeyboardInterrupt: print() log.warning("Interrupted by user") try: queue.abort() sys.exit(1) except KeyboardInterrupt: print() log.warning("Interrupted again, exiting") _exit(1) finally: log.info("Total execution time: {0} {1}", str(duration), str(queue.duration_acc) if network else '') if result: with report.update() as manifest: manifest.duration = str(goal_task_duration) manifest.write(result)
def compdb(ctx, task, default): """ Generate a compilation database for a task. Aggregates compilation databases found in artifacts of the specified task and its dependencies. The commands are then post-processed and localized to the current workspace. All task artifacts are sandboxed and their directory trees are recreated using symlinks pointing to the origin of collected files. When opening a file, an IDE can then follow the symlinks into the workspace instead of opening files in the artifact cache. The database must be regenerated if dependencies or the directory tree of an artifact change. """ manifest = ctx.obj["manifest"] options = JoltOptions(default=default) acache = cache.ArtifactCache.get(options) TaskHookRegistry.get(options) executors = scheduler.ExecutorRegistry.get(options) registry = TaskRegistry.get() strategy = scheduler.DownloadStrategy(executors, acache) queue = scheduler.TaskQueue(strategy) for params in default: registry.set_default_parameters(params) gb = graph.GraphBuilder(registry, manifest, options, progress=True) dag = gb.build(task) try: with log.progress("Progress", dag.number_of_tasks(), " tasks", estimates=False, debug=False) as p: while dag.has_tasks(): leafs = dag.select(lambda graph, task: task.is_ready()) # Order the tasks by their weights to improve build times leafs.sort(key=lambda x: x.weight) while leafs: task = leafs.pop() queue.submit(acache, task) task, error = queue.wait() p.update(1) except KeyboardInterrupt: print() log.warning("Interrupted by user") try: queue.abort() sys.exit(1) except KeyboardInterrupt: print() log.warning("Interrupted again, exiting") os._exit(1) for goal in dag.goals: artifact, deps = get_task_artifacts(goal) db = CompDB("all_compile_commands.json", artifact) db.read() db.relocate(goal, sandboxes=True) outdir = goal.tools.builddir("compdb", incremental=True) dbpath = fs.path.join(outdir, "all_compile_commands.json") db.write(dbpath, force=True) stage_artifacts(deps + [artifact], goal.tools) log.info("Compilation DB: {}", dbpath)