Beispiel #1
0
def run(config):
    print "command start: %s" % config
    agents = _parse_agents_from_args(config)
    agents |= _parse_agents_from_file(config)
    running_agents = []
    for ag in agents:
        ag_exec = agents_map.get(ag)
        if ag_exec:
            ag_p = subprocess.Popen(ag_exec.split(), stdout=subprocess.PIPE)
            running_agents.append(ag_p)
    time.sleep(15)
    data = []
    for ag_p in running_agents:
        # shell scripts has depend child which can't be killed by subprocess' API
        # it should be ag_p.kill()
        #os.system("pkill -P %s" % ag_p.pid)
        ag_p.send_signal(signal.SIGINT)
        stdout = ag_p.stdout.read()
        data.append(stdout)
    rets = []
    ret = db_api.result_create(data)
    rets.append(ret.uuid)
    task = db_api.task_create(rets)
    print "task: %s runs successfully!" % task.uuid
    return
Beispiel #2
0
def save_result_to_task():
    global task_uuid
    global out
    parse_func = getattr(base, "parse_%s" % ag)

    # TODO(kun) file lock is okay in localhost, here need redis for distributed
    # lock istead
    co = coordination.get_coordinator("file:///tmp", b"localhost")
    co.start()
    lock = co.get_lock("task_update_lock")
    with lock:
        for ret in parse_func(out):
            ret = db_api.result_create(**ret)
            print "[LOG] appending result with id %s" % ret.uuid
            db_api.task_append_result(task_uuid, ret.uuid)
            db_api.tracer_append_result(ag, ret.uuid)
        print "[LOG] update tas with result %s" % task_uuid
        time.sleep(2)
    co.stop()
Beispiel #3
0
        while True:
            t = worker.stdout.readline()
            if not len(t):
                break
            _t = (time.time(), t.strip())
            out.append(_t)
    except KeyboardInterrupt:
        pass

    # psutil is much more professional... I have to use it instead
    # this kill is to script process
    worker_p = psutil.Process(worker.pid)
    worker_p.send_signal(signal.SIGINT)

    parse_func = getattr(base, "parse_%s" % ag)

    # TODO file lock is okay in localhost, here need redis for distributed
    # lock istead
    co = coordination.get_coordinator("file:///tmp", b"localhost")
    co.start()
    lock = co.get_lock("task_update_lock")
    with lock:
        task = db_api.task_get(task_uuid)
        results = copy(task.results)
        for ret in parse_func(out):
            ret = db_api.result_create(**ret)
            results.append(ret.uuid)
        db_api.task_update(task_uuid, results=results)
        time.sleep(2)
    co.stop()