def run(self, env): try: self.task.started(TYPE) for extension in self.task.extensions: extension.started(TYPE) with hooks.task_run([self.task] + self.task.extensions): self._run(env) for extension in self.task.extensions: extension.finished(TYPE) self.task.finished(TYPE) except (ConnectionError, AMQPConnectionError): log.exception() for extension in self.task.extensions: extension.failed(TYPE) self.task.failed(TYPE) raise_error("Lost connection to AMQP server") except Exception as e: log.exception() for extension in self.task.extensions: extension.failed(TYPE) self.task.failed(TYPE) raise e finally: if self.connection is not None: utils.call_and_catch(self.connection.close) return self.task
def task_postrun(self, task, deps, tools): if not isinstance(task.task, ninja.CXXProject): return with tools.cwd(task.task.outdir): utils.call_and_catch( tools.run, "ninja -f build.ninja -t compdb > compile_commands.json")
def _autocomplete_tasks(ctx, args, incomplete): manifest = JoltManifest() utils.call_and_catch(manifest.parse) manifest.process_import() tasks = JoltLoader.get().load() tasks = [ task.name for task in tasks if task.name.startswith(incomplete or '') ] return sorted(tasks)
def run(self, deps, tools): buildargs = " ".join(["--build-arg " + tools.expand(arg) for arg in self.buildargs]) context = tools.expand_relpath(self.context, self.joltdir) dockerfile = tools.expand_path(self.dockerfile) self._imagefile = tools.expand(self.imagefile) if self.imagefile else None self._autoload = self._imagefile and self.autoload pull = " --pull" if self.pull else "" tags = [tools.expand(tag) for tag in self.tags] # If dockerfile is not relative to joltdir, look for it in context if not path.exists(dockerfile): with tools.cwd(context): dockerfile = tools.expand_path(self.dockerfile) if not path.exists(dockerfile): with tools.cwd(tools.builddir()): tools.write_file("Dockerfile", self.dockerfile) dockerfile = tools.expand_path("Dockerfile") self.info("Building image from {} in {}", tools.expand_relpath(dockerfile), tools.expand_relpath(context)) with tools.cwd(context): tools.run("docker build . -f {} -t {} {}{}", dockerfile, tags[0], buildargs, pull) for tag in tags[1:]: tools.run("docker tag {} {}", tags[0], tag) try: if self.push: self.info("Pushing image") for tag in tags: tools.run("docker push {}", tag) if self.extract: self.info("Extracting image") tools.run("docker create --name {canonical_name}.{identity} {}", tags[0]) try: with tools.cwd(tools.builddir("rootfs")): tools.run("docker export {canonical_name}.{identity} -o rootfs.tar") tools.extract("rootfs.tar", "rootfs/") finally: tools.run("docker rm {canonical_name}.{identity}") if self._imagefile: self.info("Saving image to file") with tools.cwd(tools.builddir()): tools.run("docker image save {} -o {_imagefile}", tags[0]) if self.compression is not None: tools.compress("{_imagefile}", "{_imagefile}.{compression}") finally: if self.cleanup: self.info("Removing image from Docker daemon") for tag in tags: utils.call_and_catch(tools.run("docker image rm {}", tag))
def relocate(self, task, sandboxes=False): for command in self.commands: utils.call_and_catch(patch, command, "command", self.attribs.get("joltdir", joltdir()), joltdir()) utils.call_and_catch(patch, command, "command", self.attribs.get("cachedir", config.get_cachedir()), config.get_cachedir()) if sandboxes: utils.call_and_catch(patch, command, "command", "sandbox-", "sandbox-reflect-") utils.call_and_catch(patch, command, "directory", self.attribs.get("joltdir", joltdir()), joltdir())
def cli(ctx, verbose, extra_verbose, config_file, debugger, profile, force, salt, debug, network, local, keep_going, jobs): """ A task execution tool. When invoked without any commands and arguments, Jolt by default tries to execute and build the artifact of a task called `default`. To build artifacts of other tasks use the build subcommand. The Jolt command line interface is hierarchical. One set of options can be passed to the top-level command and a different set of options to the subcommands, simultaneously. For example, verbose output is a top-level option while forced rebuild is a build command option. They may combined like this: $ jolt --verbose build --force taskname Most build command options are available also at the top-level when build is invoked implicitly for the default task. """ global debug_enabled debug_enabled = debugger log.verbose("Jolt command: {}", " ".join([fs.path.basename(sys.argv[0])] + sys.argv[1:])) log.verbose("Jolt host: {}", environ.get("HOSTNAME", "localhost")) log.verbose("Jolt install path: {}", fs.path.dirname(__file__)) if ctx.invoked_subcommand in ["config"]: # Don't attempt to load any task recipes as they might require # plugins that are not yet configured. return if ctx.invoked_subcommand is None: build = ctx.command.get_command(ctx, "build") manifest = JoltManifest() utils.call_and_catch(manifest.parse) manifest.process_import() ctx.obj["manifest"] = manifest if manifest.version: from jolt.version_utils import requirement, version req = requirement(manifest.version) ver = version(__version__) raise_error_if(not req.satisfied(ver), "this project requires Jolt version {} (running {})", req, __version__) loader = JoltLoader.get() tasks = loader.load() for cls in tasks: TaskRegistry.get().add_task_class(cls) if ctx.invoked_subcommand in ["build", "clean"] and loader.joltdir: ctx.obj["workspace_lock"] = utils.LockFile( fs.path.join(loader.joltdir, "build"), log.info, "Workspace is locked by another process, please wait...") atexit.register(ctx.obj["workspace_lock"].close) # If no command is given, we default to building the default task. # If the default task doesn't exist, help is printed inside build(). if ctx.invoked_subcommand is None: task = config.get("jolt", "default", "default") taskname, _ = utils.parse_task_name(task) if TaskRegistry.get().get_task_class(taskname) is not None: ctx.invoke(build, task=[task], force=force, salt=salt, debug=debug, network=network, local=local, keep_going=keep_going, jobs=jobs) else: print(cli.get_help(ctx)) sys.exit(1)
def run(self): with open("default.joltxmanifest", "wb") as f: f.write(self.body) log.info("Manifest written") tools = Tools() for recipe in tools.glob("*.jolt"): tools.unlink(recipe) try: jolt = self.selfdeploy() config_file = config.get("amqp", "config", "") if config_file: config_file = "-c " + config_file log.info("Running jolt") tools.run( "{} -vv {} build --worker --result result.joltxmanifest", jolt, config_file, output_stdio=True) except JoltCommandError as e: self.response = "" try: manifest = JoltManifest() try: manifest.parse("result.joltxmanifest") except Exception: manifest.duration = "0" manifest.result = "FAILED" manifest.stdout = "\n".join(e.stdout) manifest.stderr = "\n".join(e.stderr) self.response = manifest.format() except Exception: log.exception() log.error("Task failed") except Exception: log.exception() self.response = "" try: manifest = JoltManifest() try: manifest.parse("result.joltxmanifest") except Exception: manifest.duration = "0" manifest.result = "FAILED" self.response = manifest.format() except Exception: log.exception() log.error("Task failed") else: self.response = "" try: manifest = JoltManifest() try: manifest.parse("result.joltxmanifest") except Exception: manifest.duration = "0" manifest.result = "SUCCESS" self.response = manifest.format() except Exception: log.exception() log.info("Task succeeded") utils.call_and_catch(tools.unlink, "result.joltxmanifest") self.consumer.add_on_job_completed_callback(self)
def run(self, deps, tools): context = tools.expand_relpath(self.context, self.joltdir) dockerfile = tools.expand_path(self.dockerfile) self._imagefile = tools.expand( self.imagefile) if self.imagefile else None self._autoload = self._imagefile and self.autoload self.tags = [self.tools.expand(tag) for tag in self.tags] pull = " --pull" if self.pull else "" squash = " --squash" if self.squash else "" # If dockerfile is not relative to joltdir, look for it in context if not path.exists(dockerfile): with tools.cwd(context): dockerfile = tools.expand_path(self.dockerfile) if not path.exists(dockerfile): with tools.cwd(tools.builddir()): tools.write_file("Dockerfile", self.dockerfile) dockerfile = tools.expand_path("Dockerfile") self.info("Building image from {} in {}", tools.expand_relpath(dockerfile), tools.expand_relpath(context)) with tools.cwd(context): tools.run( "docker build {_platform} . -f {} {_buildargs} {_labels} {_tags} {pull}{squash}", utils.quote(dockerfile), pull=pull, squash=squash) try: if self.push: self.info("Pushing image") for tag in self.tags: tools.run("docker push {}", tag) if self._imagefile or self.extract: self.info("Saving image to file") with tools.cwd(tools.builddir()): tools.run("docker image save {} -o {}", self.tags[0], self._imagefile or "image.tar") if self.extract: with tools.cwd(tools.builddir()): tools.extract(self._imagefile or "image.tar", "layers/") manifest = json.loads( tools.read_file("layers/manifest.json")) for image in manifest: for layer in image.get("Layers", []): self.info("Extracting layer {}", fs.path.dirname(layer)) self._extract_layer(tools, fs.path.join("layers", layer), "rootfs/") elif self._imagefile: with tools.cwd(tools.builddir()): if self.compression is not None: tools.compress("{_imagefile}", "{_imagefile}.{compression}") finally: if self.cleanup: self.info("Removing image from Docker daemon") for tag in self.tags: utils.call_and_catch(tools.run("docker image rm {}", tag))