def stop_task(self, ctx, uuid): print "[LOG] stopping task: %s" % uuid task = db_api.task_get(uuid) for pid in task.pids: print "[LOG] interrupt process %s" % pid p = psutil.Process(int(pid)) p.send_signal(signal.SIGINT)
def run(config): uuid = config.get("uuid") last = config.get("last") if last and uuid: raise ValueError("can't assign last and uuid togther") elif not last and not uuid: task = get_last_task() elif last: task = get_last_task() elif uuid and len(uuid) < LOWEST: print "at least %d to find a task" % LOWEST return else: # len(uuid) > LOWEST task = db_api.task_get(uuid, fuzzy=True) print "command report: %s" % config print "task: <%s>" % task.uuid rets = [] for ret_uuid in task.results: ret = db_api.result_get(ret_uuid) rets.append(ret) if config.get("html"): generate_multiple_result_html(rets) else: map(pprint_result, rets)
while True: t = worker.stdout.readline() if not len(t): break _t = (time.time(), t.strip()) out.append(_t) except KeyboardInterrupt: pass # psutil is much more professional... I have to use it instead # this kill is to script process worker_p = psutil.Process(worker.pid) worker_p.send_signal(signal.SIGINT) parse_func = getattr(base, "parse_%s" % ag) # TODO file lock is okay in localhost, here need redis for distributed # lock istead co = coordination.get_coordinator("file:///tmp", b"localhost") co.start() lock = co.get_lock("task_update_lock") with lock: task = db_api.task_get(task_uuid) results = copy(task.results) for ret in parse_func(out): ret = db_api.result_create(**ret) results.append(ret.uuid) db_api.task_update(task_uuid, results=results) time.sleep(2) co.stop()
def get_task(self, ctx, uuid, fuzzy): print "[LOG] reading task: %s" % uuid task = db_api.task_get(uuid, fuzzy) # TODO(kun) object return {"uuid": task.uuid, "results": task.results}