Example #1
0
    def start_tracers(self, ctx, tracers):
        all_tr = self.tracer_list(ctx)
        running_tr = map(lambda t: t["name"],
                         filter(lambda t: t["running"], all_tr))
        task = db_api.task_create(results=[], pids=[])

        pids = []
        for tr in tracers:
            if tr in running_tr:
                LOG.info("%s is running, skipped" % tr)
            else:
                pid = base.run_agent(task.uuid, tr)
                LOG.debug("saving pid %s" % pid)
                self.set_tracer_pid(ctx, tr, pid)
                pids.append(pid)

        task = db_api.task_update(task.uuid, pids=pids)
        print "[LOG] task <%s> runs successfully!" % task.uuid
Example #2
0
        while True:
            t = worker.stdout.readline()
            if not len(t):
                break
            _t = (time.time(), t.strip())
            out.append(_t)
    except KeyboardInterrupt:
        pass

    # psutil is much more professional... I have to use it instead
    # this kill is to script process
    worker_p = psutil.Process(worker.pid)
    worker_p.send_signal(signal.SIGINT)

    parse_func = getattr(base, "parse_%s" % ag)

    # TODO file lock is okay in localhost, here need redis for distributed
    # lock istead
    co = coordination.get_coordinator("file:///tmp", b"localhost")
    co.start()
    lock = co.get_lock("task_update_lock")
    with lock:
        task = db_api.task_get(task_uuid)
        results = copy(task.results)
        for ret in parse_func(out):
            ret = db_api.result_create(**ret)
            results.append(ret.uuid)
        db_api.task_update(task_uuid, results=results)
        time.sleep(2)
    co.stop()