Exemplo n.º 1
0
    def _pipeline_fast_slow_execution(self):
        print 'fast_slow execution test begin'

        def func1():
            i = 0
            while i < 5:
                ret = random.randint(1, 10)
                print 'func1', ret
                yield ret
                i += 1
                time.sleep(1)

        def func2(i):
            try:
                time.sleep(2)
                print '      func2', i * 2
            except Exception as err:
                print err

        pipeline = utils.Pipeline(1)
        pipeline.add_worker(func1)
        pipeline.add_worker(func2, tail=True)
        pipeline.start()
        #time.sleep(3)
        pipeline.join()
        print 'fast_slow execution test end'
Exemplo n.º 2
0
    def _pipeline_serialize_execution(self):
        print 'serialize execution test begin'

        def func1():
            yield random.randint(1, 10)

        def func2(i):
            print i

        pipeline = utils.Pipeline(1)
        pipeline.add_worker(func1)
        pipeline.add_worker(func2, tail=True)
        pipeline.start()
        time.sleep(1)
        pipeline.stop()
        print 'serialize execution test end'
Exemplo n.º 3
0
    def test_pipeline_slow_fast_execution(self):
        print 'fast_slow execution test begin'

        def func1():
            i = 0
            while i < 6:
                ret = random.randint(1, 10)
                print 'func1', ret
                yield ret
                i += 1
                time.sleep(1)

        def func2(i):
            try:
                time.sleep(2)
                print '      func2', i * 2
                return i * 2
            except Exception as err:
                print err

        def func3(i):
            try:
                #time.sleep(0)
                print '                func3', i * 2
            except Exception as err:
                print err

        pipeline = utils.Pipeline(1)
        pipeline.add_worker(func1)
        pipeline.add_worker(func2, workers=2)
        pipeline.add_worker(func3, tail=True)

        def kill(signum, frame):
            pipeline.stop()

        signal.signal(signal.SIGINT, kill)
        pipeline.start()
        print "gears: %d.. " % len(pipeline.gears)
        print "workers: %d.. " % len(pipeline.workers)
        #time.sleep(3)
        pipeline.join()
        print 'fast_slow execution test end'
Exemplo n.º 4
0
    def test_parser(self):
        logging.info("test parser begin.")

        def func1():
            listen_addr = ("0.0.0.0", 6343)
            sock = socket(AF_INET, SOCK_DGRAM)
            sock.bind(listen_addr)
            i = 0
            while True:
                data, addr = sock.recvfrom(65535)
                sflow_datagram = {}
                sflow_datagram["addr"] = addr
                sflow_datagram["data"] = data
                yield sflow_datagram
                i += 1
                if i >= 3:
                    break

        def func3(item):
            for rec in item:
                print(rec)
                #stdout.flush()

        pipeline = utils.Pipeline(1)
        pipeline.add_worker(func1)
        pipeline.add_worker(parser.parse)
        pipeline.add_worker(func3, tail=True)

        def kill(signum, frame):
            pipeline.stop()

        signal.signal(signal.SIGINT, kill)
        pipeline.start()
        print "gears: %d.. " % len(pipeline.gears)
        print "workers: %d.. " % len(pipeline.workers)
        #time.sleep(3)
        pipeline.join()
        logging.info("test parser end.")
Exemplo n.º 5
0
def main():
    log.init_log("sflow_agent")
    config.init(_CONF_FILE)
    utils.security_start(config.CONF)
    sflow_client = client.SflowClient(config.CONF.yunhai)
    logging.info("Sflow agent start.")

    sflow_entry_cache = {}

    # uuid: (sflow_entry,timestamp)

    def func1():
        listen_addr = ("0.0.0.0", 6343)
        sock = socket(AF_INET, SOCK_DGRAM)
        sock.bind(listen_addr)
        while True:
            data, addr = sock.recvfrom(65535)
            sflow_datagram = {}
            sflow_datagram["addr"] = addr
            sflow_datagram["data"] = data
            yield sflow_datagram

    def func3(item):
        #logging.info("Emit sflow entry begin")
        for rec in item:
            for counter_record in rec:
                counter_data = counter_record.data
                sflow_entry = utils.IfCounters_to_sflow_entry(counter_data)
                if sflow_entry is not None:
                    logging.info("Sflow entry added: %s" % sflow_entry)
                    sflow_client.add_sflow_entry(sflow_entry)
                    yield sflow_entry
        #logging.info("Emit sflow entry end.")

    def func4(sflow_entry):
        uuid = sflow_entry["uuid"]
        if uuid not in sflow_entry_cache:
            sflow_entry_cache[uuid] = (sflow_entry, int(time.time()))
        else:
            curr_time = int(time.time())
            last_sflow_entry = sflow_entry_cache[uuid][0]
            last_time = sflow_entry_cache[uuid][1]
            in_pps_diff = int(sflow_entry["in_pps"] -
                              last_sflow_entry["in_pps"])
            velocity = int(in_pps_diff / (curr_time - last_time))
            if velocity > int(config.CONF.alarm.pps_threshold):
                record = models.AbnormalRecord()
                record.uuid = uuid
                record.start = time.strftime("%Y-%m-%d %H:%M:%S",
                                             time.localtime(curr_time))
                record.stats = json.dumps(sflow_entry)
                api.abnormal_record_insert(record)

    pipeline = utils.Pipeline(1)
    pipeline.add_worker(func1)
    pipeline.add_worker(parser.parse)
    pipeline.add_worker(func3, tail=True)

    def kill(signum, frame):
        logging.info("meet signal: %s" % str(signum))
        logging.info("sigterm/sigint received. remove status file and exist")
        prog_status_path = config.CONF.default.prog_status_path
        if os.path.exists(prog_status_path):
            utils.remove_status_file(prog_status_path)
        pipeline.stop()

    signal.signal(signal.SIGINT, kill)
    signal.signal(signal.SIGTERM, kill)
    pipeline.start()
    pipeline.join()
    logging.info("Sflow agent end.")