def cancel(self, pr_id, pr_sha1): force_update = True # Stop pending jobs for i, elem in enumerate(self.q): job_in_queue = self.job_dict[elem] if (job_in_queue.pr_id() == pr_id and job_in_queue.pr_sha1() == pr_sha1): log.debug("Got a stop from web {}/{}".format(pr_id, pr_sha1)) del self.q[i] db.update_job(job_in_queue.pr_id(), job_in_queue.pr_sha1(), status.d[status.CANCEL], "N/A") force_update = False # Stop the running job if self.jt is not None: if (self.jt.job.pr_id() == pr_id and self.jt.job.pr_sha1() == pr_sha1): log.debug("Got a stop from web {}/{}".format(pr_id, pr_sha1)) self.jt.stop() force_update = False # If it wasn't in the queue nor running, then just update the status if force_update: db.update_job(pr_id, pr_sha1, status.d[status.CANCEL], "N/A") payload = db.get_payload_from_pr_id(pr_id, pr_sha1) github.update_state(payload, "failure", "Job cancelled!")
def add(self, payload): """Responsible of adding new jobs the the job queue.""" if payload is None: log.error("Missing payload when trying to add job") return pr_id = github.pr_id(payload) pr_number = github.pr_number(payload) pr_sha1 = github.pr_sha1(payload) pr_full_name = github.pr_full_name(payload) with self.lock: log.info("Got GitHub initiated add {}/{} --> PR#{}".format( pr_id, pr_sha1, pr_number)) # Check whether the jobs in the current queue touches the same PR # number as this incoming request does. for i, elem in enumerate(self.q): job_in_queue = self.job_dict[elem] # Remove existing jobs as long as they are not user initiated # jobs. if (job_in_queue.pr_number() == pr_number and job_in_queue.pr_full_name() == pr_full_name): if not job_in_queue.user_initiated: log.debug("Non user initiated job found in queue, " "removing {}".format(elem)) del self.q[i] db.update_job(job_in_queue.pr_id(), job_in_queue.pr_sha1(), status.d[status.CANCEL], "N/A") github.update_state(job_in_queue.payload, "failure", "Job cancelled!") # Check whether current job also should be stopped (i.e, same # PR, but _not_ user initiated). if (self.jt is not None and self.jt.job.pr_number() == pr_number and self.jt.job.pr_full_name == pr_full_name and not self.jt.job.user_initiated): log.debug("Non user initiated job found running, " "stopping {}".format(self.jt.job)) self.jt.stop() pr_id_sha1 = "{}-{}".format(pr_id, pr_sha1) self.q.append(pr_id_sha1) new_job = job.Job(payload, False) self.job_dict[pr_id_sha1] = new_job db.add_build_record(new_job.payload) db.update_job(pr_id, pr_sha1, status.d[status.PENDING], "N/A") github.update_state(payload, "pending", "Job added to queue")
def user_add(self, pr_id, pr_sha1): if pr_id is None or pr_sha1 is None: log.error("Missing pr_id or pr_sha1 when trying to submit user " "job") return with self.lock: log.info("Got user initiated add {}/{}".format(pr_id, pr_sha1)) payload = db.get_payload_from_pr_id(pr_id, pr_sha1) if payload is None: log.error("Didn't find payload for ID:{}".format(pr_id)) return pr_id_sha1 = "{}-{}".format(pr_id, pr_sha1) self.q.append(pr_id_sha1) self.job_dict[pr_id_sha1] = job.Job(payload, True) db.update_job(pr_id, pr_sha1, status.d[status.PENDING], "N/A") github.update_state(payload, "pending", "Job added to queue")
def run(self): """This is the main function for running a complete clone, build, flash and test job.""" global export_history current_status = status.d[status.RUNNING] log.debug("Job/{} : {}".format(current_status, self.job)) time_start = time.time() pr_id = self.job.pr_id() pr_sha1 = self.job.pr_sha1() db.update_job(pr_id, pr_sha1, current_status, "N/A") github.update_state(self.job.payload, "pending", "Job running!") current_status = status.d[self.start_job()] export_history.clear() running_time = utils.get_running_time(time_start) log.debug("Job/{} : {} --> {}".format(current_status, self.job, running_time)) db.update_job(pr_id, pr_sha1, current_status, running_time)
def start_job(self): jobdefs = get_job_definitions() # Just local to save some typing further down payload = self.job.payload # To prevent old logs from showing up on the web-page, start by # removing all of them. ibl.clear_logfiles(payload) for jd in jobdefs: log.info("Start clone, build ... sequence for {}".format(self.job)) # Replace .yaml with .zip full_log_file = Path(jd).name.replace(".yaml", ".zip") log.debug("full_log_file: {}".format(full_log_file)) with open(jd, 'r') as yml: yml_config = yaml.load(yml) # Loop all defined values for k, logtype in ibl.log2str.items(): try: yml_iter = yml_config[logtype] except KeyError: continue child = spawn_pexpect_child(self.job) current_log_file = "{}/{}.log".format(settings.log_dir(), logtype) with open(current_log_file, 'w') as f: child.logfile_read = f if yml_iter is None: ibl.store_logfile(payload, current_log_file, full_log_file) continue for i in yml_iter: log.debug("") c, e, cr, to = get_yaml_cmd(i) if not do_pexpect(child, c, e, cr, to): terminate_child(child) run_teardown(yml_config) log.error("job type: {} failed!".format(logtype)) ibl.store_logfile(payload, current_log_file, full_log_file) github.update_state(payload, "failure", "Stage {} " "failed!".format(logtype)) return status.FAIL if self.stopped(): terminate_child(child) run_teardown(yml_config) log.debug("job type: {} cancelled!".format( logtype)) ibl.store_logfile(payload, current_log_file, full_log_file) github.update_state(payload, "failure", "Job was " "stopped by user (stage {})!" "".format(logtype)) return status.CANCEL ibl.store_logfile(payload, current_log_file, full_log_file) run_teardown(yml_config) github.update_state(payload, "success", "All good!") return status.SUCCESS