Exemple #1
0
    def on_connection_open_error(self, _unused_connection, err):
        """This method is called by pika if the connection to RabbitMQ
        can't be established.

        :param pika.SelectConnection _unused_connection: The connection
        :param Exception err: The error

        """
        log.error('Connection open failed: {}', err)
        self.reconnect()
Exemple #2
0
def build(ctx, task, network, keep_going, default, local, no_download,
          no_upload, download, upload, worker, force, salt, copy, debug,
          result, jobs):
    """
    Build task artifact.

    TASK is the name of the task to execute. It is optionally followed by a colon and
    parameter value assignments. Assignments are separated by commas. Example:

       taskname:param1=value1,param2=value2

    Default parameter values can be overridden for any task in the dependency tree
    with --default. DEFAULT is a qualified task name, just like TASK, but parameter
    assignments change default values.

    By default, a task is executed locally and the resulting artifact is stored
    in the local artifact cache. If an artifact is already available in the cache,
    no execution takes place. Artifacts are identified with a hash digest,
    constructed from hashing task attributes.

    When remote cache providers are configured, artifacts may be downloaded from and/or
    uploaded to the remote cache as execution progresses. Several options exist to control
    the behavior, such as --local which disables all remote caches.

    Distributed task execution is enabled by passing the --network option. Tasks are then
    distributed to and executed by a pool of workers, if one has been configured.

    Rebuilds can be forced with either --force or --salt. --force rebuilds the requested
    task, but not its dependencies. --salt affects the entire dependency tree. Both add
    an extra attribute to the task hash calculation in order to taint the identity and
    induce a cache miss. In both cases, existing intermediate files in build directories
    are removed before execution starts.

    """
    raise_error_if(network and local,
                   "The -n and -l flags are mutually exclusive")

    raise_error_if(network and debug,
                   "The -g and -n flags are mutually exclusive")

    raise_error_if(
        no_download and download,
        "The --download and --no-download flags are mutually exclusive")

    raise_error_if(
        no_upload and upload,
        "The --upload and --no-upload flags are mutually exclusive")

    duration = utils.duration()

    task = list(task)
    task = [utils.stable_task_name(t) for t in task]

    if network:
        _download = config.getboolean("network", "download", True)
        _upload = config.getboolean("network", "upload", True)
    else:
        _download = config.getboolean("jolt", "download", True)
        _upload = config.getboolean("jolt", "upload", True)

    if local:
        _download = False
        _upload = False
    else:
        if no_download:
            _download = False
        if no_upload:
            _upload = False
        if download:
            _download = True
        if upload:
            _upload = True

    options = JoltOptions(network=network,
                          local=local,
                          download=_download,
                          upload=_upload,
                          keep_going=keep_going,
                          default=default,
                          worker=worker,
                          debug=debug,
                          salt=salt,
                          jobs=jobs)

    acache = cache.ArtifactCache.get(options)

    executors = scheduler.ExecutorRegistry.get(options)
    if worker:
        log.set_worker()
        log.verbose("Local build as a worker")
        strategy = scheduler.WorkerStrategy(executors, acache)
    elif network:
        log.verbose("Distributed build as a user")
        strategy = scheduler.DistributedStrategy(executors, acache)
    else:
        log.verbose("Local build as a user")
        strategy = scheduler.LocalStrategy(executors, acache)

    hooks.TaskHookRegistry.get(options)
    registry = TaskRegistry.get(options)

    for params in default:
        registry.set_default_parameters(params)

    manifest = ctx.obj["manifest"]

    for mb in manifest.builds:
        for mt in mb.tasks:
            task.append(mt.name)
        for mt in mb.defaults:
            registry.set_default_parameters(mt.name)

    if force:
        for goal in task:
            registry.get_task(goal, manifest=manifest).taint = uuid.uuid4()

    gb = graph.GraphBuilder(registry, manifest, options, progress=True)
    dag = gb.build(task)

    gp = graph.GraphPruner(strategy)
    dag = gp.prune(dag)

    goal_tasks = dag.goals
    goal_task_duration = 0

    queue = scheduler.TaskQueue(strategy)

    try:
        if not dag.has_tasks():
            return

        progress = log.progress(
            "Progress",
            dag.number_of_tasks(filterfn=lambda t: not t.is_resource()),
            " tasks",
            estimates=False,
            debug=debug)

        with progress:
            while dag.has_tasks():
                # Find all tasks ready to be executed
                leafs = dag.select(lambda graph, task: task.is_ready())

                # Order the tasks by their weights to improve build times
                leafs.sort(key=lambda x: x.weight)

                while leafs:
                    task = leafs.pop()
                    queue.submit(acache, task)

                task, error = queue.wait()

                if not task:
                    dag.debug()
                    break
                elif task.is_goal() and task.duration_running:
                    goal_task_duration += task.duration_running.seconds

                if not task.is_resource():
                    progress.update(1)

                if not keep_going and error is not None:
                    queue.abort()
                    raise error

        if dag.failed:
            log.error("List of failed tasks")
            for failed in dag.failed:
                log.error("- {}", failed.log_name.strip("()"))
            raise_error("no more tasks could be executed")

        for goal in goal_tasks:
            if acache.is_available_locally(goal):
                with acache.get_artifact(goal) as artifact:
                    log.info("Location: {0}", artifact.path)
                    if copy:
                        artifact.copy("*",
                                      utils.as_dirpath(
                                          fs.path.join(
                                              workdir,
                                              click.format_filename(copy))),
                                      symlinks=True)
    except KeyboardInterrupt:
        print()
        log.warning("Interrupted by user")
        try:
            queue.abort()
            sys.exit(1)
        except KeyboardInterrupt:
            print()
            log.warning("Interrupted again, exiting")
            _exit(1)
    finally:
        log.info("Total execution time: {0} {1}", str(duration),
                 str(queue.duration_acc) if network else '')
        if result:
            with report.update() as manifest:
                manifest.duration = str(goal_task_duration)
                manifest.write(result)
Exemple #3
0
            def run(self):
                with open("default.joltxmanifest", "wb") as f:
                    f.write(self.body)

                log.info("Manifest written")

                tools = Tools()
                for recipe in tools.glob("*.jolt"):
                    tools.unlink(recipe)

                try:
                    jolt = self.selfdeploy()
                    config_file = config.get("amqp", "config", "")
                    if config_file:
                        config_file = "-c " + config_file

                    log.info("Running jolt")
                    tools.run(
                        "{} -vv {} build --worker --result result.joltxmanifest",
                        jolt,
                        config_file,
                        output_stdio=True)
                except JoltCommandError as e:
                    self.response = ""
                    try:
                        manifest = JoltManifest()
                        try:
                            manifest.parse("result.joltxmanifest")
                        except Exception:
                            manifest.duration = "0"
                        manifest.result = "FAILED"
                        manifest.stdout = "\n".join(e.stdout)
                        manifest.stderr = "\n".join(e.stderr)
                        self.response = manifest.format()
                    except Exception:
                        log.exception()
                    log.error("Task failed")
                except Exception:
                    log.exception()
                    self.response = ""
                    try:
                        manifest = JoltManifest()
                        try:
                            manifest.parse("result.joltxmanifest")
                        except Exception:
                            manifest.duration = "0"
                        manifest.result = "FAILED"
                        self.response = manifest.format()
                    except Exception:
                        log.exception()
                    log.error("Task failed")
                else:
                    self.response = ""
                    try:
                        manifest = JoltManifest()
                        try:
                            manifest.parse("result.joltxmanifest")
                        except Exception:
                            manifest.duration = "0"
                        manifest.result = "SUCCESS"
                        self.response = manifest.format()
                    except Exception:
                        log.exception()
                    log.info("Task succeeded")

                utils.call_and_catch(tools.unlink, "result.joltxmanifest")
                self.consumer.add_on_job_completed_callback(self)