Ejemplo n.º 1
0
def store_logfile(payload, current_file, full_log_file):
    if (payload is None or current_file is None or full_log_file is None):
        log.error("Cannot store log file (missing parameters)")
        return

    pr_full_name = github.pr_full_name(payload)
    pr_number = github.pr_number(payload)
    pr_id = github.pr_id(payload)
    pr_sha1 = github.pr_sha1(payload)

    log_file_dir = "{p}/{fn}/{n}/{i}/{s}".format(p=settings.log_dir(),
                                                 fn=pr_full_name,
                                                 n=pr_number,
                                                 i=pr_id,
                                                 s=pr_sha1)

    try:
        os.stat(log_file_dir)
    except FileNotFoundError:
        os.makedirs(log_file_dir)

    source = current_file
    dest = "{d}/{f}".format(d=log_file_dir, f=full_log_file)

    try:
        zipfile.ZipFile(dest, mode='a',
                        compression=zipfile.ZIP_DEFLATED).write(source)
    except FileNotFoundError:
        log.error("Couldn't find file {}".format(dest))
Ejemplo n.º 2
0
def get_logs(pr_full_name, pr_number, pr_id, pr_sha1):
    """The function returns a dictionary with dictionaries where the high level
       dictionary have 'key' corresponding job definition and the inner
       dictionaries corresponds to each individual log files."""
    if (pr_full_name is None or pr_number is None or pr_id is None
            or pr_sha1 is None):
        log.error("Cannot store log file (missing parameters)")
        return

    log_file_dir = "{p}/{fn}/{n}/{i}/{s}".format(p=settings.log_dir(),
                                                 fn=pr_full_name,
                                                 n=pr_number,
                                                 i=pr_id,
                                                 s=pr_sha1)

    log.debug("Getting logs from folder: {}".format(log_file_dir))

    all_logs = OrderedDict()
    for zf in sorted(glob.glob("{}/*.zip".format(log_file_dir))):
        logs = OrderedDict()
        log.debug("Unpacking zip-file: {}".format(zf))
        for key, logtype in log2str.items():
            filename = "{}.log".format(logtype)
            logs[logtype] = read_log(filename, zf)
        # Use job definition as key when returning logs from multi definition
        # jobs.
        jd = Path(zf).name.replace(".zip", "")
        all_logs[jd] = logs
    return all_logs
Ejemplo n.º 3
0
def clear_logfiles(payload):
    if payload is None:
        log.error("Cannot clear log file (missing parameters)")
        return

    pr_full_name = github.pr_full_name(payload)
    pr_number = github.pr_number(payload)
    pr_id = github.pr_id(payload)
    pr_sha1 = github.pr_sha1(payload)

    log_file_dir = "{p}/{fn}/{n}/{i}/{s}".format(p=settings.log_dir(),
                                                 fn=pr_full_name,
                                                 n=pr_number,
                                                 i=pr_id,
                                                 s=pr_sha1)

    for zf in glob.glob("{}/*.zip".format(log_file_dir)):
        if os.path.isfile(zf):
            os.remove(zf)
Ejemplo n.º 4
0
    def start_job(self):
        jobdefs = get_job_definitions()

        # Just local to save some typing further down
        payload = self.job.payload

        # To prevent old logs from showing up on the web-page, start by
        # removing all of them.
        ibl.clear_logfiles(payload)

        for jd in jobdefs:
            log.info("Start clone, build ... sequence for {}".format(self.job))

            # Replace .yaml with .zip
            full_log_file = Path(jd).name.replace(".yaml", ".zip")

            log.debug("full_log_file: {}".format(full_log_file))

            with open(jd, 'r') as yml:
                yml_config = yaml.load(yml)

            # Loop all defined values
            for k, logtype in ibl.log2str.items():
                try:
                    yml_iter = yml_config[logtype]
                except KeyError:
                    continue

                child = spawn_pexpect_child(self.job)
                current_log_file = "{}/{}.log".format(settings.log_dir(),
                                                      logtype)
                with open(current_log_file, 'w') as f:
                    child.logfile_read = f

                    if yml_iter is None:
                        ibl.store_logfile(payload, current_log_file,
                                          full_log_file)
                        continue

                    for i in yml_iter:
                        log.debug("")
                        c, e, cr, to = get_yaml_cmd(i)

                        if not do_pexpect(child, c, e, cr, to):
                            terminate_child(child)
                            run_teardown(yml_config)
                            log.error("job type: {} failed!".format(logtype))
                            ibl.store_logfile(payload, current_log_file,
                                              full_log_file)
                            github.update_state(payload, "failure", "Stage {} "
                                                "failed!".format(logtype))
                            return status.FAIL

                        if self.stopped():
                            terminate_child(child)
                            run_teardown(yml_config)
                            log.debug("job type: {} cancelled!".format(
                                      logtype))
                            ibl.store_logfile(payload, current_log_file,
                                              full_log_file)
                            github.update_state(payload, "failure", "Job was "
                                                "stopped by user (stage {})!"
                                                "".format(logtype))
                            return status.CANCEL

                ibl.store_logfile(payload, current_log_file, full_log_file)
            run_teardown(yml_config)

        github.update_state(payload, "success", "All good!")
        return status.SUCCESS