def run(self): ident = self.minion_daemon.add_thread() logger.meta.prefix = '[%s] ' % ident info = None try: creds = self.sock.getsockopt(socket.SOL_SOCKET, socket.SO_PEERCRED, struct.calcsize('3i')) info = 'pid=%d, uid=%d, gid=%d' % struct.unpack('3i', creds) logger.info('accepting connection from %s' % (info,)) buf = b'' while True: data = self.sock.recv(4096) if not data: break buf += data if b'\n' not in buf: continue cmds = buf.split(b'\n') buf = cmds[-1] cmds = cmds[:-1] for cmd in cmds: cmd = cmd.decode('ascii') logger.debug('received %r' % (cmd,)) output = self.minion_daemon.dispatch(self.sock, cmd) self.sock.sendall(json.dumps(output).encode('utf8')) self.sock.shutdown(socket.SHUT_RDWR) self.sock.close() except BrokenPipeError as e: pass except Exception as e: logger.exception('error processing request') finally: self.minion_daemon.rm_thread(ident) logger.info('finished')
def dispatch_add_blob(self, sock, cmd): parser = minion.cmd.add_blob(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) for blob in args.blobs: sha256 = self.blobs.add(blob) logger.info('manually added %s as %s...' % (blob, sha256[:8])) return {'status': 'success'}
def run(self): try: logger.meta.prefix = '[build %s/%s] ' % (self.controller.name, self.proc.name) if not self.controller.wait_for(self.proc): logger.info('aborting because of prior failure') return sources, artifacts = self.controller.get_inputs() stub = self.stub(sources, artifacts, '-') if self.controller.is_cached(stub): logger.debug('finishing with released copy') self.controller.finish_cached(self.proc, stub, released=True) return image = self.build_image(self.proc.path) logger.debug('docker image is %r' % image) stub = self.stub(sources, artifacts, image) if self.controller.is_cached(stub): success, X, X = self.controller.get_cached(self.proc, stub) if success or not self.controller.retry_failures: logger.debug('finishing with cached copy') self.controller.finish_cached(self.proc, stub) return success, log, artifacts = self.run_image(sources, artifacts, stub, image) if success: self.controller.finish_success(self.proc, stub, log, artifacts) else: self.controller.finish_error(self.proc, stub, log) except Exception as e: logger.exception('docker worker failed') finally: self.controller.abort_if_not_finished(self.proc)
def create_socket(self): logger.info("creating socket at %r" % self.SOCKET) if os.path.exists(self.SOCKET): logger.debug('socket already exists; erasing it') os.unlink(self.SOCKET) sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.bind(self.SOCKET) sock.listen(8) return sock
def dispatch_del_target(self, sock, cmd): parser = minion.cmd.del_target(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) with self._heads_mtx: path = self.TARGET(args.target) if not os.path.exists(path): raise MinionException("target %r doesn't exist" % args.target) logger.info('deleting target %r' % (args.target,)) shutil.rmtree(path) return {'status': 'success'}
def dispatch_forget_build_failure(self, sock, cmd): parser = minion.cmd.forget_build_failure(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) buildname = self.get_build(args.target, args.name) build = open(os.path.join(self.BUILDS, buildname)).read() report = json.loads(build) for x in report['reports']: if x['name'] == args.process: self.processes.forget(x['inputs']) logger.info('removed failed build %s from build %s' % (args.process, buildname))
def finish_cached(self, proc, stub, released=False): iid = self.minion.blobs.cat(stub) oid = self.minion.processes.lookup(iid) assert oid is not None success, log, artifacts = self.minion.read_output(proc, oid) with self.mtx: if success: logger.info('cached: success') else: logger.info('cached: failure') self._report(proc, success, iid, oid, artifacts=artifacts, cached=True, released=released)
def dispatch_new_target(self, sock, cmd): parser = minion.cmd.new_target(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) with self._heads_mtx: path = self.TARGET(args.target) if os.path.exists(path): raise MinionException('target %r already exists' % args.target) if not os.path.exists(self.HEADS): raise MinionException('heads missing; run update-heads and retry') os.makedirs(path) logger.info('creating target %r' % (args.target,)) shutil.copyfile(self.HEADS, os.path.join(path, 'AUTO')) shutil.copyfile(self.HEADS, os.path.join(path, 'HEADS')) return {'status': 'success'}
def run(self): while True: md = self.minion_daemon output, jc = md._builds_queue.get() logger.info('[build %s] starting' % output['name']) jc.run() for report in jc.reports: report = dict(report._asdict()) report['name'] = str(report['name']) output['reports'].append(report) report = json.dumps(output) rblob = md.blobs.cat(report.encode('utf8')) report_name = output['name'] md.blobs.copy(rblob, os.path.join(md.BUILDS, report_name)) with md._builds_mtx: md._builds_set.remove(report_name) logger.info('[build %s] finished' % output['name'])
def dispatch_status(self, sock, cmd): parser = minion.cmd.status(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) if args.name is not None: display_name = '%s:%s' % (args.target, args.name) else: display_name = '%s' % args.target logger.info('checking build status of %s' % display_name) build = self.get_build(args.target, args.name) reporter = args.report.replace('-', '_') reporter = getattr(self, 'report_' + reporter, None) if reporter is None: return {'status': 'failure', 'output': 'no such reporter'} if build is None: return {'status': 'failure', 'output': 'no such build as %s' % display_name} logger.info('generating %s report of %s' % (args.report, display_name)) build = open(os.path.join(self.BUILDS, build)).read() report = json.loads(build) return {'status': 'success', 'output': reporter(report)}
def dispatch_set_refspec(self, sock, cmd): parser = minion.cmd.set_refspec(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) with self._heads_mtx: path = self.TARGET(args.target) if not os.path.exists(path): raise MinionException("target %r doesn't exist" % args.target) parsed, sources = self.parsed_sources([args.source]) assert len(sources) == 1 source = sources[0] path = self.TARGET(args.target) heads = self.sources_load(os.path.join(path, 'HEADS')) if not isgit(source): raise MinionException('cannot set refspec for non-git source %s' % source) heads[source.name.normal] = self.get_source_git(source, args.refspec) logger.info('updating head %r in target %r to %r' % (source.name.normal, args.target, args.refspec)) parsed_names = [p.name.normal for p in parsed] self.sources_save(os.path.join(path, 'HEADS'), heads, parsed_names) return {'status': 'success'}
def dispatch_build(self, sock, cmd): parser = minion.cmd.build(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) chosen_procs = None if args.processes: args.processes = tuple(args.processes.split(',')) chosen_procs = self.parse_subset(args.processes) report_name = args.name or datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') report_name = args.target + ':' + report_name logger.info('running build process for %s; results will be saved to %s' % (args.target, report_name)) path = self.TARGET(args.target) if not os.path.exists(path): raise MinionException("target %r doesn't exist" % args.target) with self._heads_mtx: mblob = self.blobs.add(self.MINIONFILE) sblob = self.blobs.add(os.path.join(path, 'HEADS')) minionfile = self.blobs.path(mblob) logger.debug('using minionfile %s' % minionfile) sources = self.blobs.path(sblob) logger.debug('using sources %s' % sources) jc = JobController(self, self.sources_load(sources), report_name) jc.retry_failures = args.retry_failures for proc in self.parse(minionfile): if not isprocess(proc): continue if not args.processes or proc.name in chosen_procs: logger.debug('adding %s' % (proc,)) jc.add(proc) output = {} output['name'] = report_name output['minionfile'] = mblob output['sources'] = sblob output['reports'] = [] with self._builds_mtx: path = os.path.join(self.BUILDS, report_name) if os.path.exists(path) or report_name in self._builds_set: raise MinionException('build %r already exists' % report_name) self._builds_set.add(report_name) self._builds_queue.put((output, jc)) return {'status': 'success'}
def sync_target(self, parsed, sources, target, HEADS): path = self.TARGET(target) auto = self.sources_load(os.path.join(path, 'AUTO')) heads = self.sources_load(os.path.join(path, 'HEADS')) for k in set(auto.keys()): if k not in HEADS: del auto[k] for k in set(heads.keys()): if k not in HEADS: del heads[k] for src in sources: name = src.name.normal assert name in HEADS if name not in heads or \ name not in auto or \ (auto[name] == heads[name] and auto[name] != HEADS[name]): heads[name] = HEADS[name] logger.info('updating head %r in target %r' % (name, target)) auto[name] = HEADS[name] parsed_names = [p.name.normal for p in parsed] self.sources_save(os.path.join(path, 'AUTO'), auto, parsed_names) self.sources_save(os.path.join(path, 'HEADS'), heads, parsed_names)
def wait_for(self, proc): with self.mtx: proc_count = self.processes[proc.name][0] def done(): # funky if <pred>: return True just to improve readability of # disjunction if min([c for (name, (c, t)) in self.processes.items() if name not in self.finished]) >= proc_count: return True d = set([d for d in proc.dependencies if isinstance(d, ArtifactIdentifier)]) a = set(self.artifacts.keys()) if a.issuperset(d): return True if self.failed < proc_count: return True return False while not done(): self.cnd.wait() d = set([d for d in proc.dependencies if isinstance(d, ArtifactIdentifier)]) a = set(self.artifacts.keys()) logger.info('starting') return self.failed > proc_count and (d & a) == d
def finish_bool(self, success, proc, stub, log, artifacts): if type(log) == bytes: log = log.decode('utf8', 'ignore') record = '' if success: record += 'Status: success\n' else: record += 'Status: failure\n' record += 'Log: %s\n' % self.minion.blobs.cat(log.encode('utf8', 'ignore')) if success: for a in proc.artifacts: assert a in artifacts sha256, path = artifacts[a] record += 'Artifact %s: %s %s\n' % (a, sha256, path) iid = self.minion.blobs.cat(stub) oid = self.minion.blobs.cat(record.encode('utf8')) with self.mtx: if success: logger.info('finished: success') else: logger.info('finished: failure') self.minion.processes.insert(iid, oid) self._report(proc, success, iid, oid, artifacts=artifacts)
def dispatch_sync_target(self, sock, cmd): parser = minion.cmd.sync_target(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) with self._heads_mtx: parsed, sources = self.parsed_sources(args.sources) if not os.path.exists(self.HEADS): raise MinionException('heads missing; run update-heads and retry') path = self.TARGET(args.target) if not os.path.exists(path): raise MinionException("target %r doesn't exist" % args.target) old_auto = self.sources_load(os.path.join(path, 'AUTO')) new_auto = self.sources_load(self.HEADS) heads = self.sources_load(os.path.join(path, 'HEADS')) for src in sources: name = src.name.normal if name not in new_auto: raise MinionException('head %s missing; run update-heads and retry' % src.name) if name not in heads or name not in old_auto or old_auto[name] != new_auto[name]: heads[name] = new_auto[name] logger.info('updating head %r in target %r' % (name, args.target)) parsed_names = [p.name.normal for p in parsed] self.sources_save(os.path.join(path, 'AUTO'), new_auto, parsed_names) self.sources_save(os.path.join(path, 'HEADS'), heads, parsed_names) return {'status': 'success'}
def dispatch_build_status(self, sock, cmd): parser = minion.cmd.build_status(MinionThrowingArgumentParser()) args = parser.parse_args(cmd) if args.name is not None: logger.info('checking build status of %s:%s' % (args.target, args.name)) else: logger.info('checking build status of %s' % (args.target,)) build = self.get_build(args.target, args.name) reporter = getattr(self, 'report_' + args.report, None) if reporter is None: raise MinionException('unimplemented report type %r' % args.report) if build == 'in-progress': return {'status': 'success', 'output': '%s is still running' % args.target} if build == 'not found': return {'status': 'success', 'output': 'no such build as %s' % args.target} logger.info('generating %s report of %s' % (args.report, build)) build = open(os.path.join(self.BUILDS, build)).read() report = json.loads(build) return {'status': 'success', 'output': reporter(report)}
def configure_logging(self): fmt = '%(asctime)s %(levelname)-8s %(message)s' dtf = '%Y-%m-%dT%H:%M:%S' logging.basicConfig(filename=self.LOGFILE, format=fmt, datefmt=dtf, level=logging.INFO) logger.info('starting new minion-daemon: pid=%d' % os.getpid())