def store_logfile(payload, current_file, full_log_file): if (payload is None or current_file is None or full_log_file is None): log.error("Cannot store log file (missing parameters)") return pr_full_name = github.pr_full_name(payload) pr_number = github.pr_number(payload) pr_id = github.pr_id(payload) pr_sha1 = github.pr_sha1(payload) log_file_dir = "{p}/{fn}/{n}/{i}/{s}".format(p=settings.log_dir(), fn=pr_full_name, n=pr_number, i=pr_id, s=pr_sha1) try: os.stat(log_file_dir) except FileNotFoundError: os.makedirs(log_file_dir) source = current_file dest = "{d}/{f}".format(d=log_file_dir, f=full_log_file) try: zipfile.ZipFile(dest, mode='a', compression=zipfile.ZIP_DEFLATED).write(source) except FileNotFoundError: log.error("Couldn't find file {}".format(dest))
def add_build_record(payload): pr_id = github.pr_id(payload) pr_sha1 = github.pr_sha1(payload) log.debug("Adding record for {}/{}".format(pr_id, pr_sha1)) if pr_id == 0 or pr_sha1 == 0: log.error("Trying to add s record with no pr_id or pr_sha1!") return con = db_connect() cur = con.cursor() sql = ("SELECT pr_id FROM job WHERE pr_id = '{}' AND " "sha1 = '{}'".format(pr_id, pr_sha1)) cur.execute(sql) r = cur.fetchall() if len(r) >= 1: log.debug("Record for pr_id/sha1 {}/{} is already in the " "database".format(pr_id, pr_sha1)) con.commit() con.close() return pr_number = github.pr_number(payload) pr_full_name = github.pr_full_name(payload) sql = ("INSERT INTO job (pr_id, pr_number, full_name, sha1, date, payload)" " VALUES(?, ?, ?, ?, datetime('now'), ?)") data = (pr_id, pr_number, pr_full_name, pr_sha1, json.dumps(payload)) cur.execute(sql, data) con.commit() con.close()
def add(self, payload): """Responsible of adding new jobs the the job queue.""" if payload is None: log.error("Missing payload when trying to add job") return pr_id = github.pr_id(payload) pr_number = github.pr_number(payload) pr_sha1 = github.pr_sha1(payload) pr_full_name = github.pr_full_name(payload) with self.lock: log.info("Got GitHub initiated add {}/{} --> PR#{}".format( pr_id, pr_sha1, pr_number)) # Check whether the jobs in the current queue touches the same PR # number as this incoming request does. for i, elem in enumerate(self.q): job_in_queue = self.job_dict[elem] # Remove existing jobs as long as they are not user initiated # jobs. if (job_in_queue.pr_number() == pr_number and job_in_queue.pr_full_name() == pr_full_name): if not job_in_queue.user_initiated: log.debug("Non user initiated job found in queue, " "removing {}".format(elem)) del self.q[i] db.update_job(job_in_queue.pr_id(), job_in_queue.pr_sha1(), status.d[status.CANCEL], "N/A") github.update_state(job_in_queue.payload, "failure", "Job cancelled!") # Check whether current job also should be stopped (i.e, same # PR, but _not_ user initiated). if (self.jt is not None and self.jt.job.pr_number() == pr_number and self.jt.job.pr_full_name == pr_full_name and not self.jt.job.user_initiated): log.debug("Non user initiated job found running, " "stopping {}".format(self.jt.job)) self.jt.stop() pr_id_sha1 = "{}-{}".format(pr_id, pr_sha1) self.q.append(pr_id_sha1) new_job = job.Job(payload, False) self.job_dict[pr_id_sha1] = new_job db.add_build_record(new_job.payload) db.update_job(pr_id, pr_sha1, status.d[status.PENDING], "N/A") github.update_state(payload, "pending", "Job added to queue")
def clear_logfiles(payload): if payload is None: log.error("Cannot clear log file (missing parameters)") return pr_full_name = github.pr_full_name(payload) pr_number = github.pr_number(payload) pr_id = github.pr_id(payload) pr_sha1 = github.pr_sha1(payload) log_file_dir = "{p}/{fn}/{n}/{i}/{s}".format(p=settings.log_dir(), fn=pr_full_name, n=pr_number, i=pr_id, s=pr_sha1) for zf in glob.glob("{}/*.zip".format(log_file_dir)): if os.path.isfile(zf): os.remove(zf)
def pr_number(self): return github.pr_number(self.payload)