Esempio n. 1
0
    def import_manifest(self, manifest):
        loader = JoltLoader.get()
        loader.set_joltdir(manifest.joltdir)

        for recipe in manifest.recipes:
            recipe = Recipe(recipe.path, source=recipe.source)
            recipe.save()

        for project in manifest.projects:
            for recipe in project.recipes:
                loader._add_project_recipe(project.name, recipe.joltdir,
                                           recipe.src)

            for resource in project.resources:
                loader._add_project_resource(project.name, resource.name,
                                             resource.text)

                # Acquire resource immediately
                task = TaskRegistry.get().get_task(resource.text,
                                                   manifest=manifest)
                raise_task_error_if(
                    not isinstance(task, WorkspaceResource), task,
                    "only workspace resources are allowed in manifest")
                task.acquire_ws()

            for module in project.modules:
                loader._add_project_module(project.name, module.src)
                sys.path.append(fs.path.join(manifest.joltdir, module.src))
Esempio n. 2
0
 def shorten_task_names(self, report):
     # Use the short qualified task names in the report
     for mtask in report.tasks:
         with utils.ignore_exception():
             # mtask.name is a fully qualified name
             task = TaskRegistry.get().get_task(mtask.name)
             mtask.name = task.short_qualified_name
Esempio n. 3
0
File: cli.py Progetto: srand/jolt
def display(ctx, task, reverse=None, show_cache=False):
    """
    Display a task and its dependencies visually.

    """
    registry = TaskRegistry.get()
    gb = graph.GraphBuilder(registry, ctx.obj["manifest"])
    dag = gb.build(task, influence=show_cache)

    options = JoltOptions()
    acache = cache.ArtifactCache.get(options)

    if reverse:

        def iterator(task):
            return list(dag.predecessors(task))

        reverse = utils.as_list(reverse)
        tasklist = dag.select(lambda graph, node: node.short_qualified_name in
                              reverse or node.qualified_name in reverse)
    else:

        def iterator(task):
            return task.children

        tasklist = dag.requested_goals

    if dag.has_tasks():

        def _display(task, indent=0, last=None):
            header = ""
            if indent > 0:
                for pipe in last[:-1]:
                    if pipe:
                        header += "\u2502 "
                    else:
                        header += "  "
                if last[-1]:
                    header += "\u251c\u2574"
                else:
                    header += "\u2514\u2574"

            if not show_cache:
                colorize = str
            elif task.is_cacheable() and not acache.is_available(task):
                colorize = colors.red
            else:
                colorize = colors.green

            print(header + colorize(task.short_qualified_name))
            children = iterator(task)
            for i in range(0, len(children)):
                _display(children[i],
                         indent + 1,
                         last=(last or []) + [i + 1 != len(children)])

        for task in tasklist:
            _display(task)
    else:
        log.info("no tasks to display")
Esempio n. 4
0
def clean(ctx, task, deps, expired):
    """
    Delete task artifacts and intermediate files.

    When run without arguments, this command removes all task artifacts
    from the local cache, but no intermediate files are removed.

    When TASK is specified, the task clean() method is invoked to remove
    any intermediate files still present in persistent build directories.
    Secondly, the task artifact will be removed from the local cache.
    Global caches are not affected. The --deps parameter can be used to also
    clean all dependencies of the specified TASK.

    By default, task artifacts are removed without considering any
    artifact expiration metadata. To only remove artifact which have expired,
    use the --expired parameter. Artifacts typically expire immediately after
    creation unless explicitly configured not to.
    """
    acache = cache.ArtifactCache.get()
    if task:
        task = [utils.stable_task_name(t) for t in task]
        registry = TaskRegistry.get()
        dag = graph.GraphBuilder(registry, ctx.obj["manifest"]).build(task)
        if deps:
            tasks = dag.tasks
        else:
            tasks = dag.goals
        for task in tasks:
            task.clean(acache, expired)
    else:
        acache.discard_all(expired)
Esempio n. 5
0
File: cli.py Progetto: srand/jolt
def _list(ctx, task=None, all=False, reverse=None):
    """
    List all tasks, or dependencies of a task.

    By default, when no TASK is specified, all known task names
    are listed in alphabetical order.

    When a TASK is specified, only direct dependencies of that task
    are listed. Use -a to also list its indirect dependencies.

    Multiple TASK names are allowed.
    """

    raise_error_if(not task and reverse, "TASK required with --reverse")

    registry = TaskRegistry.get()

    if not task:
        classes = registry.get_task_classes()
        for task in sorted(classes, key=lambda x: x.name):
            if task.name:
                print(task.name)
        return

    task = [utils.stable_task_name(t) for t in task]
    reverse = [utils.stable_task_name(t) for t in utils.as_list(reverse or [])]

    try:
        dag = graph.GraphBuilder(registry,
                                 ctx.obj["manifest"]).build(task,
                                                            influence=False)
    except JoltError as e:
        raise e
    except Exception:
        raise_error(
            "an exception occurred during task dependency evaluation, see log for details"
        )

    task = reverse or task
    nodes = dag.select(lambda graph, node: node.short_qualified_name in task or
                       node.qualified_name in task)
    nodes = list(nodes)
    iterator = dag.predecessors if reverse else dag.successors

    tasklist = set()
    while nodes:
        node = nodes.pop()
        for task in iterator(node):
            if all and task.short_qualified_name not in tasklist:
                new_node = dag.get_task(task.qualified_name)
                nodes.append(new_node)
            tasklist.add(task.short_qualified_name)

    for task in sorted(list(tasklist)):
        print(task)
Esempio n. 6
0
 def get_parameters(self, task):
     registry = TaskRegistry()
     registry.add_task_class(Jolt)
     acache = ArtifactCache.get()
     env = JoltEnvironment(cache=acache)
     gb = GraphBuilder(registry, JoltManifest())
     dag = gb.build(["jolt"])
     task = dag.select(lambda graph, task: True)
     assert len(task) == 1, "too many selfdeploy tasks found"
     task = task[0]
     if not acache.is_available_remotely(task):
         factory = LocalExecutorFactory()
         executor = LocalExecutor(factory, task, force_upload=True)
         executor.run(env)
     jolt_url = acache.location(task)
     raise_error_if(not jolt_url, "failed to deploy jolt to a remote cache")
     return {
         "jolt_url": jolt_url,
         "jolt_identity": task.identity[:8],
         "jolt_requires": config.get("selfdeploy", "requires", "")
     }
Esempio n. 7
0
File: cli.py Progetto: srand/jolt
def _export(ctx, task):
    acache = cache.ArtifactCache.get()
    task = [utils.stable_task_name(t) for t in task]
    registry = TaskRegistry.get()
    executors = scheduler.ExecutorRegistry.get()
    strategy = scheduler.LocalStrategy(executors, acache)

    dag = graph.GraphBuilder(registry, ctx.obj["manifest"])
    dag = dag.build(task)

    gp = graph.GraphPruner(strategy)
    dag = gp.prune(dag)

    class Export(object):
        def __init__(self):
            self.environ = {}
            self.prepend_environ = {}

        def setenv(self, name, value):
            self.environ[name] = value

    class Context(object):
        def __init__(self, tasks):
            self.tasks = tasks
            self.environ = set()
            self.exports = {}

        def add_export(self, task, visitor):
            self.exports[task] = visitor
            self.environ.update(set(visitor.environ.keys()))

    tasks = list(
        filter(lambda t: t.is_cacheable(), reversed(dag.topological_nodes)))
    context = Context(tasks)

    for task in context.tasks:
        artifact = acache.get_artifact(task)
        raise_task_error_if(
            artifact.is_temporary(), task,
            "Task artifact not found in local cache, build first")

        visitor = Export()
        cache.visit_artifact(task, artifact, visitor)
        context.add_export(task, visitor)

    script = utils.render("export.sh.template", ctx=context)

    print(script)
Esempio n. 8
0
def _list(ctx, task=None, reverse=None):
    """
    List all tasks, or dependencies of a task.

    """

    raise_error_if(not task and reverse, "TASK required with --reverse")

    registry = TaskRegistry.get()

    if not task:
        classes = registry.get_task_classes()
        classes += registry.get_test_classes()
        for task in sorted(classes, key=lambda x: x.name):
            if task.name:
                print(task.name)
        return

    task = [utils.stable_task_name(t) for t in task]
    reverse = [utils.stable_task_name(t) for t in utils.as_list(reverse or [])]

    try:
        dag = graph.GraphBuilder(registry,
                                 ctx.obj["manifest"]).build(task,
                                                            influence=False)
    except JoltError as e:
        raise e
    except Exception:
        raise_error(
            "an exception occurred during task dependency evaluation, see log for details"
        )

    task = reverse or task
    nodes = dag.select(lambda graph, node: node.short_qualified_name in task or
                       node.qualified_name in task)

    tasklist = set()
    iterator = dag.predecessors if reverse else dag.successors

    for node in nodes:
        for task in iterator(node):
            tasklist.add(task.short_qualified_name)

    for task in sorted(list(tasklist)):
        print(task)
Esempio n. 9
0
File: cli.py Progetto: srand/jolt
def freeze(ctx, task, default, output, remove):
    """
    Freeze the identity of a task.

    <WIP>
    """
    manifest = ctx.obj["manifest"]

    options = JoltOptions(default=default)
    acache = cache.ArtifactCache.get(options)
    scheduler.ExecutorRegistry.get(options)
    registry = TaskRegistry.get()

    for params in default:
        registry.set_default_parameters(params)

    gb = graph.GraphBuilder(registry, manifest)
    dag = gb.build(task)

    available_in_cache = [
        (t.is_available_locally(acache)
         or (t.is_available_remotely(acache) and acache.download_enabled()), t)
        for t in dag.tasks if t.is_cacheable()
    ]

    for available, task in available_in_cache:
        raise_task_error_if(
            not remove and not available, task,
            "task artifact is not available in any cache, build it first")

    for task in dag.tasks:
        if task.is_resource() or not task.is_cacheable():
            continue
        manifest_task = manifest.find_task(task)
        if remove and manifest_task:
            manifest.remove_task(manifest_task)
            continue
        if not remove:
            if not manifest_task:
                manifest_task = manifest.create_task()
            manifest_task.name = task.qualified_name
            manifest_task.identity = task.identity

    manifest.write(fs.path.join(JoltLoader.get().joltdir, output))
Esempio n. 10
0
    url = Parameter(help="URL to the Gerrit git repo to be cloned. Required.")
    sha = Parameter(required=False,
                    help="Specific commit or tag to be checked out. Optional.")
    path = Parameter(required=False,
                     help="Local path where the repository should be cloned.")
    _revision = Export(
        value=lambda self: self._get_revision() or self.git.head())

    def __init__(self, *args, **kwargs):
        refspec1 = '+refs/changes/*:refs/remotes/origin/changes/*'
        super(GerritSrc, self).__init__(*args, refspecs=[refspec1], **kwargs)


class Gerrit(git.Git):
    name = "gerrit"
    url = Parameter(help="URL to the Gerrit git repo to be cloned. Required.")
    sha = Parameter(required=False,
                    help="Specific commit or tag to be checked out. Optional.")
    path = Parameter(required=False,
                     help="Local path where the repository should be cloned.")
    _revision = Export(
        value=lambda self: self._get_revision() or self.git.head())

    def __init__(self, *args, **kwargs):
        refspec1 = '+refs/changes/*:refs/remotes/origin/changes/*'
        super(Gerrit, self).__init__(*args, refspecs=[refspec1], **kwargs)


TaskRegistry.get().add_task_class(GerritSrc)
TaskRegistry.get().add_task_class(Gerrit)
Esempio n. 11
0
File: cli.py Progetto: srand/jolt
def build(ctx, task, network, keep_going, default, local, no_download,
          no_upload, download, upload, worker, force, salt, copy, debug,
          result, jobs):
    """
    Build task artifact.

    TASK is the name of the task to execute. It is optionally followed by a colon and
    parameter value assignments. Assignments are separated by commas. Example:

       taskname:param1=value1,param2=value2

    Default parameter values can be overridden for any task in the dependency tree
    with --default. DEFAULT is a qualified task name, just like TASK, but parameter
    assignments change default values.

    By default, a task is executed locally and the resulting artifact is stored
    in the local artifact cache. If an artifact is already available in the cache,
    no execution takes place. Artifacts are identified with a hash digest,
    constructed from hashing task attributes.

    When remote cache providers are configured, artifacts may be downloaded from and/or
    uploaded to the remote cache as execution progresses. Several options exist to control
    the behavior, such as --local which disables all remote caches.

    Distributed task execution is enabled by passing the --network option. Tasks are then
    distributed to and executed by a pool of workers, if one has been configured.

    Rebuilds can be forced with either --force or --salt. --force rebuilds the requested
    task, but not its dependencies. --salt affects the entire dependency tree. Both add
    an extra attribute to the task hash calculation in order to taint the identity and
    induce a cache miss. In both cases, existing intermediate files in build directories
    are removed before execution starts.

    """
    raise_error_if(network and local,
                   "The -n and -l flags are mutually exclusive")

    raise_error_if(network and debug,
                   "The -g and -n flags are mutually exclusive")

    raise_error_if(
        no_download and download,
        "The --download and --no-download flags are mutually exclusive")

    raise_error_if(
        no_upload and upload,
        "The --upload and --no-upload flags are mutually exclusive")

    duration = utils.duration()

    task = list(task)
    task = [utils.stable_task_name(t) for t in task]

    if network:
        _download = config.getboolean("network", "download", True)
        _upload = config.getboolean("network", "upload", True)
    else:
        _download = config.getboolean("jolt", "download", True)
        _upload = config.getboolean("jolt", "upload", True)

    if local:
        _download = False
        _upload = False
    else:
        if no_download:
            _download = False
        if no_upload:
            _upload = False
        if download:
            _download = True
        if upload:
            _upload = True

    options = JoltOptions(network=network,
                          local=local,
                          download=_download,
                          upload=_upload,
                          keep_going=keep_going,
                          default=default,
                          worker=worker,
                          debug=debug,
                          salt=salt,
                          jobs=jobs)

    acache = cache.ArtifactCache.get(options)

    executors = scheduler.ExecutorRegistry.get(options)
    if worker:
        log.set_worker()
        log.verbose("Local build as a worker")
        strategy = scheduler.WorkerStrategy(executors, acache)
    elif network:
        log.verbose("Distributed build as a user")
        strategy = scheduler.DistributedStrategy(executors, acache)
    else:
        log.verbose("Local build as a user")
        strategy = scheduler.LocalStrategy(executors, acache)

    hooks.TaskHookRegistry.get(options)
    registry = TaskRegistry.get(options)

    for params in default:
        registry.set_default_parameters(params)

    manifest = ctx.obj["manifest"]

    for mb in manifest.builds:
        for mt in mb.tasks:
            task.append(mt.name)
        for mt in mb.defaults:
            registry.set_default_parameters(mt.name)

    if force:
        for goal in task:
            registry.get_task(goal, manifest=manifest).taint = uuid.uuid4()

    gb = graph.GraphBuilder(registry, manifest, options, progress=True)
    dag = gb.build(task)

    gp = graph.GraphPruner(strategy)
    dag = gp.prune(dag)

    goal_tasks = dag.goals
    goal_task_duration = 0

    queue = scheduler.TaskQueue(strategy)

    try:
        if not dag.has_tasks():
            return

        progress = log.progress(
            "Progress",
            dag.number_of_tasks(filterfn=lambda t: not t.is_resource()),
            " tasks",
            estimates=False,
            debug=debug)

        with progress:
            while dag.has_tasks():
                # Find all tasks ready to be executed
                leafs = dag.select(lambda graph, task: task.is_ready())

                # Order the tasks by their weights to improve build times
                leafs.sort(key=lambda x: x.weight)

                while leafs:
                    task = leafs.pop()
                    queue.submit(acache, task)

                task, error = queue.wait()

                if not task:
                    dag.debug()
                    break
                elif task.is_goal() and task.duration_running:
                    goal_task_duration += task.duration_running.seconds

                if not task.is_resource():
                    progress.update(1)

                if not keep_going and error is not None:
                    queue.abort()
                    raise error

        if dag.failed:
            log.error("List of failed tasks")
            for failed in dag.failed:
                log.error("- {}", failed.log_name.strip("()"))
            raise_error("no more tasks could be executed")

        for goal in goal_tasks:
            if acache.is_available_locally(goal):
                with acache.get_artifact(goal) as artifact:
                    log.info("Location: {0}", artifact.path)
                    if copy:
                        artifact.copy("*",
                                      utils.as_dirpath(
                                          fs.path.join(
                                              workdir,
                                              click.format_filename(copy))),
                                      symlinks=True)
    except KeyboardInterrupt:
        print()
        log.warning("Interrupted by user")
        try:
            queue.abort()
            sys.exit(1)
        except KeyboardInterrupt:
            print()
            log.warning("Interrupted again, exiting")
            _exit(1)
    finally:
        log.info("Total execution time: {0} {1}", str(duration),
                 str(queue.duration_acc) if network else '')
        if result:
            with report.update() as manifest:
                manifest.duration = str(goal_task_duration)
                manifest.write(result)
Esempio n. 12
0
File: debian.py Progetto: srand/jolt
    @property
    def joltdir(self):
        return loader.JoltLoader.get().joltdir

    def run_build(self, deps, tools):
        super().run(deps, tools)

    def run_download(self, deps, tools):
        with tools.cwd(tools.builddir()):
            assert tools.download(
                self.url, "debstrap.tgz"), "Failed to download mmdebstrap"
            tools.extract("debstrap.tgz", ".")
            tools.unlink("debstrap.tgz")


TaskRegistry.get().add_task_class(MMDebstrap)


class DebianPkg(DebianPkgBase):
    abstract = True
    requires = ["debian/mmdebstrap"]

    def publish(self, artifact, tools):
        super().publish(artifact, tools)
        artifact.debian.chroot.append(".")


class DebianEssential(DebianPkg):
    name = "debian/essential"
    variant = "essential"
Esempio n. 13
0
File: cli.py Progetto: srand/jolt
def cli(ctx, verbose, extra_verbose, config_file, debugger, profile, force,
        salt, debug, network, local, keep_going, jobs):
    """
    A task execution tool.

    When invoked without any commands and arguments, Jolt by default tries
    to execute and build the artifact of a task called `default`. To build
    artifacts of other tasks use the build subcommand.

    The Jolt command line interface is hierarchical. One set of options
    can be passed to the top-level command and a different set of options
    to the subcommands, simultaneously. For example, verbose output is
    a top-level option while forced rebuild is a build command option.
    They may combined like this:

      $ jolt --verbose build --force taskname

    Most build command options are available also at the top-level when
    build is invoked implicitly for the default task.

    """

    global debug_enabled
    debug_enabled = debugger

    log.verbose("Jolt command: {}",
                " ".join([fs.path.basename(sys.argv[0])] + sys.argv[1:]))
    log.verbose("Jolt host: {}", environ.get("HOSTNAME", "localhost"))
    log.verbose("Jolt install path: {}", fs.path.dirname(__file__))

    if ctx.invoked_subcommand in ["config"]:
        # Don't attempt to load any task recipes as they might require
        # plugins that are not yet configured.
        return

    if ctx.invoked_subcommand is None:
        build = ctx.command.get_command(ctx, "build")

    manifest = JoltManifest()
    utils.call_and_catch(manifest.parse)
    manifest.process_import()
    ctx.obj["manifest"] = manifest

    if manifest.version:
        from jolt.version_utils import requirement, version
        req = requirement(manifest.version)
        ver = version(__version__)
        raise_error_if(not req.satisfied(ver),
                       "this project requires Jolt version {} (running {})",
                       req, __version__)

    loader = JoltLoader.get()
    tasks = loader.load()
    for cls in tasks:
        TaskRegistry.get().add_task_class(cls)

    if ctx.invoked_subcommand in ["build", "clean"] and loader.joltdir:
        ctx.obj["workspace_lock"] = utils.LockFile(
            fs.path.join(loader.joltdir, "build"), log.info,
            "Workspace is locked by another process, please wait...")
        atexit.register(ctx.obj["workspace_lock"].close)

    # If no command is given, we default to building the default task.
    # If the default task doesn't exist, help is printed inside build().
    if ctx.invoked_subcommand is None:
        task = config.get("jolt", "default", "default")
        taskname, _ = utils.parse_task_name(task)
        if TaskRegistry.get().get_task_class(taskname) is not None:
            ctx.invoke(build,
                       task=[task],
                       force=force,
                       salt=salt,
                       debug=debug,
                       network=network,
                       local=local,
                       keep_going=keep_going,
                       jobs=jobs)
        else:
            print(cli.get_help(ctx))
            sys.exit(1)
Esempio n. 14
0
def compdb(ctx, task, default):
    """
    Generate a compilation database for a task.

    Aggregates compilation databases found in artifacts of the specified task and
    its dependencies. The commands are then post-processed and localized to the
    current workspace.

    All task artifacts are sandboxed and their directory trees are recreated
    using symlinks pointing to the origin of collected files. When opening a
    file, an IDE can then follow the symlinks into the workspace instead of
    opening files in the artifact cache.

    The database must be regenerated if dependencies or the directory tree
    of an artifact change.

    """

    manifest = ctx.obj["manifest"]
    options = JoltOptions(default=default)
    acache = cache.ArtifactCache.get(options)
    TaskHookRegistry.get(options)
    executors = scheduler.ExecutorRegistry.get(options)
    registry = TaskRegistry.get()
    strategy = scheduler.DownloadStrategy(executors, acache)
    queue = scheduler.TaskQueue(strategy)

    for params in default:
        registry.set_default_parameters(params)

    gb = graph.GraphBuilder(registry, manifest, options, progress=True)
    dag = gb.build(task)

    try:
        with log.progress("Progress",
                          dag.number_of_tasks(),
                          " tasks",
                          estimates=False,
                          debug=False) as p:
            while dag.has_tasks():
                leafs = dag.select(lambda graph, task: task.is_ready())

                # Order the tasks by their weights to improve build times
                leafs.sort(key=lambda x: x.weight)

                while leafs:
                    task = leafs.pop()
                    queue.submit(acache, task)

                task, error = queue.wait()
                p.update(1)

    except KeyboardInterrupt:
        print()
        log.warning("Interrupted by user")
        try:
            queue.abort()
            sys.exit(1)
        except KeyboardInterrupt:
            print()
            log.warning("Interrupted again, exiting")
            os._exit(1)

    for goal in dag.goals:
        artifact, deps = get_task_artifacts(goal)
        db = CompDB("all_compile_commands.json", artifact)
        db.read()
        db.relocate(goal, sandboxes=True)
        outdir = goal.tools.builddir("compdb", incremental=True)
        dbpath = fs.path.join(outdir, "all_compile_commands.json")
        db.write(dbpath, force=True)
        stage_artifacts(deps + [artifact], goal.tools)
        log.info("Compilation DB: {}", dbpath)
Esempio n. 15
0
File: docker.py Progetto: srand/jolt
        raise_task_error_if(not self._user(tools), self,
                            "Username has not been configured")
        raise_task_error_if(not self._password(tools), self,
                            "Password has not been configured")

        with tools.cwd(tools.builddir()):
            tools.write_file("docker-credential", self._password(tools))
            tools.run(
                "cat docker-credential | docker login -u {user} --password-stdin {server}",
                user=self._user(tools))

    def release(self, artifact, deps, tools, owner):
        tools.run("docker logout {server}")


TaskRegistry.get().add_task_class(DockerClient)
TaskRegistry.get().add_task_class(DockerLogin)


class _Tarfile(tarfile.TarFile):
    def without(self, targetpath):
        dirname, filename = fs.path.split(targetpath)
        if not filename.startswith(".wh."):
            return None
        fs.unlink(fs.path.join(dirname, filename[4:]),
                  ignore_errors=True,
                  tree=True)
        return True

    def makedev(self, tarinfo, targetpath):
        if self.without(targetpath):
Esempio n. 16
0
File: docker.py Progetto: srand/jolt
 def _image(self):
     registry = TaskRegistry.get()
     tool = tools.Tools(self)
     if registry.get_task_class(tool.expand(self.image)):
         return [self.image]
     return []
Esempio n. 17
0
File: nodejs.py Progetto: srand/jolt
@jolt_attributes.system
@jolt_attributes.attribute("bin", "bin_{system}")
@jolt_attributes.attribute("url", "url_{system}")
class NodeJS(Download):
    name = "nodejs"
    version = Parameter("16.15.1", help="NodeJS version.")
    url_linux = "https://nodejs.org/dist/v{version}/node-v{version}-linux-{arch}.tar.gz"
    url_windows = "https://nodejs.org/dist/v{version}/node-v{version}-win-{arch}.zip"
    bin_linux = "node-v{version}-linux-{arch}/bin"
    bin_windows = "node-v{version}-win-{arch}/bin"

    @property
    def arch(self):
        arch = platform.machine()
        if arch == "x86_64":
            return "x64"
        if arch in ["i386", "i486", "i586", "i686"]:
            return "x86"
        if arch in ["arm", "armv7l"]:
            return "armv7l"
        if arch in ["arm64", "armv8l", "aarch64"]:
            return "arm64"
        return None

    def publish(self, artifact, tools):
        super().publish(artifact, tools)
        artifact.environ.PATH.append(self.bin)


TaskRegistry.get().add_task_class(NodeJS)
Esempio n. 18
0
File: cli.py Progetto: srand/jolt
def inspect(ctx, task, influence=False, artifact=False, salt=None):
    """
    View information about a task.

    This command displays information about a task, such as its class
    documentation, parameters and their accepted values, requirements,
    task class origin (file/line), influence attributes, artifact identity,
    cache status, and more. Default parameter values, if any, are highlighted.

    """
    task_name = task
    task_cls_name, task_params = utils.parse_task_name(task_name)
    task_registry = TaskRegistry.get()
    task = task_registry.get_task_class(task_cls_name)
    raise_task_error_if(not task, task_name, "no such task")

    from jolt import inspection

    print()
    print("  {0}".format(task.name))
    print()
    if task.__doc__:
        print("  {0}".format(task.__doc__.strip()))
        print()
    print("  Parameters")
    has_param = False
    params = {
        key: getattr(task, key)
        for key in dir(task)
        if isinstance(utils.getattr_safe(task, key), Parameter)
    }
    for item, param in params.items():
        has_param = True
        print("    {0:<15}   {1}".format(item, param.help or ""))
    if not has_param:
        print("    None")

    print()
    print("  Definition")
    print("    {0:<15}   {1} ({2})".format(
        "File",
        fs.path.relpath(inspection.getfile(task),
                        JoltLoader.get().joltdir), inspection.getlineno(task)))

    print()
    print("  Requirements")
    manifest = ctx.obj["manifest"]
    try:
        task = task_registry.get_task(task_name, manifest=manifest)
        for req in sorted(
                utils.as_list(utils.call_or_return(task, task.requires))):
            print("    {0}".format(task.tools.expand(req)))
        if not task.requires:
            print("    None")
        print()
    except Exception as e:
        log.exception()
        if "has not been set" in str(e):
            print("    Unavailable (parameters must be set)")
            print()
            return
        print("    Unavailable (exception during evaluation)")
        print()
        return

    if salt:
        task.taint = salt

    if artifact:
        acache = cache.ArtifactCache.get()
        builder = graph.GraphBuilder(task_registry, manifest)
        dag = builder.build([task.qualified_name])
        tasks = dag.select(lambda graph, node: node.task is task)
        assert len(tasks) == 1, "graph produced multiple tasks, one expected"
        proxy = tasks[0]
        task = proxy.task

        print("  Cache")
        print("    Identity          {0}".format(proxy.identity))
        if acache.is_available_locally(proxy):
            with acache.get_artifact(proxy) as artifact:
                print("    Location          {0}".format(artifact.path))
            print("    Local             True ({0})".format(
                utils.as_human_size(acache.get_artifact(proxy).get_size())))
        else:
            print("    Local             False")
        print("    Remote            {0}".format(
            acache.is_available_remotely(proxy)))
        print()

    if influence:
        print("  Influence")
        for string in HashInfluenceRegistry.get().get_strings(task):
            string = string.split(":", 1)
            print("    {:<18}{}".format(string[0][10:], string[1].strip()))
Esempio n. 19
0
@jolt_attributes.attribute("url", "url_{system}")
class Golang(Download):
    name = "golang"
    version = Parameter("1.18.3", help="Go version.")
    url_linux = "https://go.dev/dl/go{version}.linux-{arch}.tar.gz"
    url_windows = "https://go.dev/dl/go{version}.windows-{arch}.zip"

    @property
    def arch(self):
        arch = platform.machine()
        if arch == "x86_64":
            return "amd64"
        if arch in ["i386", "i486", "i586", "i686"]:
            return "i386"
        if arch in ["arm", "armv6l", "armv7l"]:
            return "armv6l"
        if arch in ["arm64", "armv8l", "aarch64"]:
            return "arm64"
        return None

    def publish(self, artifact, tools):
        super().publish(artifact, tools)
        artifact.environ.PATH.append("go/bin")
        artifact.paths.goroot = "go"

    def unpack(self, artifact, tools):
        artifact.environ.GOROOT = str(artifact.paths.goroot)


TaskRegistry.get().add_task_class(Golang)