def main(): log.init_log("sflow_agent") config.init(_CONF_FILE) utils.security_start(config.CONF) sflow_client = client.SflowClient(config.CONF.yunhai) logging.info("Sflow agent start.") sflow_entry_cache = {} # uuid: (sflow_entry,timestamp) def func1(): listen_addr = ("0.0.0.0", 6343) sock = socket(AF_INET, SOCK_DGRAM) sock.bind(listen_addr) while True: data, addr = sock.recvfrom(65535) sflow_datagram = {} sflow_datagram["addr"] = addr sflow_datagram["data"] = data yield sflow_datagram def func3(item): #logging.info("Emit sflow entry begin") for rec in item: for counter_record in rec: counter_data = counter_record.data sflow_entry = utils.IfCounters_to_sflow_entry(counter_data) if sflow_entry is not None: logging.info("Sflow entry added: %s" % sflow_entry) sflow_client.add_sflow_entry(sflow_entry) yield sflow_entry #logging.info("Emit sflow entry end.") def func4(sflow_entry): uuid = sflow_entry["uuid"] if uuid not in sflow_entry_cache: sflow_entry_cache[uuid] = (sflow_entry, int(time.time())) else: curr_time = int(time.time()) last_sflow_entry = sflow_entry_cache[uuid][0] last_time = sflow_entry_cache[uuid][1] in_pps_diff = int(sflow_entry["in_pps"] - last_sflow_entry["in_pps"]) velocity = int(in_pps_diff / (curr_time - last_time)) if velocity > int(config.CONF.alarm.pps_threshold): record = models.AbnormalRecord() record.uuid = uuid record.start = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(curr_time)) record.stats = json.dumps(sflow_entry) api.abnormal_record_insert(record) pipeline = utils.Pipeline(1) pipeline.add_worker(func1) pipeline.add_worker(parser.parse) pipeline.add_worker(func3, tail=True) def kill(signum, frame): logging.info("meet signal: %s" % str(signum)) logging.info("sigterm/sigint received. remove status file and exist") prog_status_path = config.CONF.default.prog_status_path if os.path.exists(prog_status_path): utils.remove_status_file(prog_status_path) pipeline.stop() signal.signal(signal.SIGINT, kill) signal.signal(signal.SIGTERM, kill) pipeline.start() pipeline.join() logging.info("Sflow agent end.")
class SflowTestCase(unittest.TestCase): _CONF_FILE = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'etc/test.conf') def setUp(self): conf = config.Conf() conf.init(self.__class__._CONF_FILE) self.conf = conf self.pcap_eth_dev_name = self.conf.default.eth_dev self.pcap_packet_num = self.conf.default.packet_num self.pcap_file_name = str(int( time.time())) + "_" + self.pcap_eth_dev_name + ".pcap" status, output = commands.getstatusoutput("sudo tcpdump -i %s -c %s -w %s"\ % (self.pcap_eth_dev_name, \ self.pcap_packet_num, \ self.pcap_file_name)) self.pcap_tool = pcap.Pcap(self.pcap_file_name) def test_pcap_parse(self): ethernet_packets = self.pcap_tool.parse() for pac in ethernet_packets: print pac.data.data.__class__.__name__ if __name__ == "__main__": log.init_log("test_pcap") unittest.main()
except Exception as err: print err def func3(i): try: #time.sleep(0) print ' func3', i * 2 except Exception as err: print err pipeline = utils.Pipeline(1) pipeline.add_worker(func1) pipeline.add_worker(func2, workers=2) pipeline.add_worker(func3, tail=True) def kill(signum, frame): pipeline.stop() signal.signal(signal.SIGINT, kill) pipeline.start() print "gears: %d.. " % len(pipeline.gears) print "workers: %d.. " % len(pipeline.workers) #time.sleep(3) pipeline.join() print 'fast_slow execution test end' if __name__ == "__main__": log.init_log("test_PC_gear") unittest.main()
#api.init(self.conf.db) config.init(self.__class__._CONF_FILE) def test_abnormal_record_get_set(self): #TODO ar = models.AbnormalRecord() letter_digits = string.ascii_letters + string.digits ar.uuid = ''.join([letter_digits[random.randint(0,35)] for i in range(0,36)]) ar.start = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime()) ar.end = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime()) ar.stats = 'test,this is just a test' api.abnormal_record_insert(ar) ret = api.abnormal_record_get_all() uuid_set = set(map(lambda x:x.uuid, ret)) assert(ar.uuid in uuid_set) #import pdb #pdb.set_trace() api.abnormal_record_delete(ar.uuid) ret = api.abnormal_record_get_all() uuid_set = set(map(lambda x:x.uuid, ret)) assert(ar.uuid not in uuid_set) if __name__ == "__main__": log.init_log("test_db_api") unittest.main()
except Exception as err: print err def func3(i): try: # time.sleep(0) print " func3", i * 2 except Exception as err: print err pipeline = utils.Pipeline(1) pipeline.add_worker(func1) pipeline.add_worker(func2, workers=2) pipeline.add_worker(func3, tail=True) def kill(signum, frame): pipeline.stop() signal.signal(signal.SIGINT, kill) pipeline.start() print "gears: %d.. " % len(pipeline.gears) print "workers: %d.. " % len(pipeline.workers) # time.sleep(3) pipeline.join() print "fast_slow execution test end" if __name__ == "__main__": log.init_log("test_PC_gear") unittest.main()
config.init(self.__class__._CONF_FILE) def test_abnormal_record_get_set(self): #TODO ar = models.AbnormalRecord() letter_digits = string.ascii_letters + string.digits ar.uuid = ''.join( [letter_digits[random.randint(0, 35)] for i in range(0, 36)]) ar.start = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) ar.end = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) ar.stats = 'test,this is just a test' api.abnormal_record_insert(ar) ret = api.abnormal_record_get_all() uuid_set = set(map(lambda x: x.uuid, ret)) assert (ar.uuid in uuid_set) #import pdb #pdb.set_trace() api.abnormal_record_delete(ar.uuid) ret = api.abnormal_record_get_all() uuid_set = set(map(lambda x: x.uuid, ret)) assert (ar.uuid not in uuid_set) if __name__ == "__main__": log.init_log("test_db_api") unittest.main()
import unittest import time sys.path.append( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from sflow_agent import virt from sflow_agent import log UUID = 'sflow_agent_test' class VirtTestCase(unittest.TestCase): _CONF_FILE = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "etc/test.conf") def setUp(self): pass def test_ifindex_to_uuid(self): status, output = commands.getstatusoutput("ip link|grep qvo|awk -F':' '{print $1}'") ifindex_list = output.strip("\n").split("\n") print 'before' print time.time() for ifindex in ifindex_list: print virt.ifindex_to_uuid(ifindex) print 'after' print time.time() if __name__ == "__main__": log.init_log("test_virt") unittest.main()
def main(): log.init_log("sflow_agent") config.init(_CONF_FILE) utils.security_start(config.CONF) sflow_client = client.SflowClient(config.CONF.yunhai) logging.info("Sflow agent start.") sflow_entry_cache = {} # uuid: (sflow_entry,timestamp) def func1(): listen_addr = ("0.0.0.0", 6343) sock = socket(AF_INET, SOCK_DGRAM) sock.bind(listen_addr) while True: data, addr = sock.recvfrom(65535) sflow_datagram = {} sflow_datagram["addr"] = addr sflow_datagram["data"] = data yield sflow_datagram def func3(item): #logging.info("Emit sflow entry begin") for rec in item: for counter_record in rec: counter_data = counter_record.data sflow_entry = utils.IfCounters_to_sflow_entry(counter_data) if sflow_entry is not None: logging.info("Sflow entry added: %s" % sflow_entry) sflow_client.add_sflow_entry(sflow_entry) yield sflow_entry #logging.info("Emit sflow entry end.") def func4(sflow_entry): uuid = sflow_entry["uuid"] if uuid not in sflow_entry_cache: sflow_entry_cache[uuid] = (sflow_entry, int(time.time())) else: curr_time = int(time.time()) last_sflow_entry = sflow_entry_cache[uuid][0] last_time = sflow_entry_cache[uuid][1] in_pps_diff = int(sflow_entry["in_pps"] - last_sflow_entry["in_pps"]) velocity = int(in_pps_diff / (curr_time - last_time)) if velocity > int(config.CONF.alarm.pps_threshold): record = models.AbnormalRecord() record.uuid = uuid record.start = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime(curr_time)) record.stats = json.dumps(sflow_entry) api.abnormal_record_insert(record) pipeline = utils.Pipeline(1) pipeline.add_worker(func1) pipeline.add_worker(parser.parse) pipeline.add_worker(func3, tail=True) def kill(signum, frame): logging.info("meet signal: %s"%str(signum)) logging.info("sigterm/sigint received. remove status file and exist") prog_status_path = config.CONF.default.prog_status_path if os.path.exists(prog_status_path): utils.remove_status_file(prog_status_path) pipeline.stop() signal.signal(signal.SIGINT, kill) signal.signal(signal.SIGTERM, kill) pipeline.start() pipeline.join() logging.info("Sflow agent end.")
from sflow_agent import pcap from sflow_agent import config class SflowTestCase(unittest.TestCase): _CONF_FILE = os.path.join(os.path.dirname( os.path.dirname(os.path.abspath(__file__))), 'etc/test.conf') def setUp(self): conf = config.Conf() conf.init(self.__class__._CONF_FILE) self.conf = conf self.pcap_eth_dev_name = self.conf.default.eth_dev self.pcap_packet_num = self.conf.default.packet_num self.pcap_file_name = str(int(time.time())) + "_" + self.pcap_eth_dev_name + ".pcap" status, output = commands.getstatusoutput("sudo tcpdump -i %s -c %s -w %s"\ % (self.pcap_eth_dev_name, \ self.pcap_packet_num, \ self.pcap_file_name)) self.pcap_tool = pcap.Pcap(self.pcap_file_name) def test_pcap_parse(self): ethernet_packets = self.pcap_tool.parse() for pac in ethernet_packets: print pac.data.data.__class__.__name__ if __name__ == "__main__": log.init_log("test_pcap") unittest.main()
yield sflow_datagram i += 1 if i >= 3: break def func3(item): for rec in item: print(rec) #stdout.flush() pipeline = utils.Pipeline(1) pipeline.add_worker(func1) pipeline.add_worker(parser.parse) pipeline.add_worker(func3, tail=True) def kill(signum, frame): pipeline.stop() signal.signal(signal.SIGINT, kill) pipeline.start() print "gears: %d.. " % len(pipeline.gears) print "workers: %d.. " % len(pipeline.workers) #time.sleep(3) pipeline.join() logging.info("test parser end.") if __name__ == "__main__": log.init_log("test_parser") unittest.main()
from sflow_agent import net_flow from sflow_agent import config from sflow_agent.db.sqlalchemy import models from sflow_agent.db import api class NetFlowTestCase(unittest.TestCase): _CONF_FILE = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'etc/test.conf') def setUp(self): config.init(self.__class__._CONF_FILE) self.flow_extractor = net_flow.FlowExtractor(config.CONF.default) def test_flow_summary_persist(self): for pac_summary in self.flow_extractor.extract( config.CONF.default.eth_dev, pcap_keep=False): nfs = models.VMNetworkFlowSummary() nfs.uuid = ''.join( chr(random.randint(97, 122)) for i in range(0, 36)) nfs.ctime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) nfs.summary = json.dumps(pac_summary) api.network_flow_summary_insert(nfs) print api.network_flow_summary_get_by_uuid_ctime( nfs.uuid, nfs.ctime) if __name__ == "__main__": log.init_log("test_net_flow") unittest.main()
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from sflow_agent import log from sflow_agent import net_flow from sflow_agent import config from sflow_agent.db.sqlalchemy import models from sflow_agent.db import api class NetFlowTestCase(unittest.TestCase): _CONF_FILE = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "etc/test.conf") def setUp(self): config.init(self.__class__._CONF_FILE) self.flow_extractor = net_flow.FlowExtractor(config.CONF.default) def test_flow_summary_persist(self): for pac_summary in self.flow_extractor.extract(config.CONF.default.eth_dev, pcap_keep=False): nfs = models.VMNetworkFlowSummary() nfs.uuid = "".join(chr(random.randint(97, 122)) for i in range(0, 36)) nfs.ctime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) nfs.summary = json.dumps(pac_summary) api.network_flow_summary_insert(nfs) print api.network_flow_summary_get_by_uuid_ctime(nfs.uuid, nfs.ctime) if __name__ == "__main__": log.init_log("test_net_flow") unittest.main()
class SflowTestCase(unittest.TestCase): _CONF_FILE = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "etc/test.conf") def setUp(self): conf = config.Conf() conf.init(self.__class__._CONF_FILE) self.conf = conf self.sflow_client = client.SflowClient(conf.yunhai) self.uuid = 'zs-test' def test_post_sflow_entry(self): data = { "uuid": str(self.uuid), "host": "test.baidu.com", "in_discard": -1, "in_error":-1, "in_bps":-1, "in_pps":-1, "out_discard":-1, "out_error":-1, "out_bps":-1, "out_pps":-1 } self.sflow_client.add_sflow_entry(data) if __name__ == "__main__": log.init_log("test_client") unittest.main()
class SflowTestCase(unittest.TestCase): _CONF_FILE = os.path.join( os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "etc/test.conf") def setUp(self): conf = config.Conf() conf.init(self.__class__._CONF_FILE) self.conf = conf self.sflow_client = client.SflowClient(conf.yunhai) self.uuid = 'zs-test' def test_post_sflow_entry(self): data = { "uuid": str(self.uuid), "host": "test.baidu.com", "in_discard": -1, "in_error": -1, "in_bps": -1, "in_pps": -1, "out_discard": -1, "out_error": -1, "out_bps": -1, "out_pps": -1 } self.sflow_client.add_sflow_entry(data) if __name__ == "__main__": log.init_log("test_client") unittest.main()