Ejemplo n.º 1
0
def start(config, basedir, db, is_compressed_file_present, updater):
    logging.warn("transforming logs")
    repo = config["repo"]
    zmq_context = zmq.Context()

    col_out = wiring.Wire('collector_out', zmq_context=zmq_context)
    grouped_files, ips, estimator = get_grouped_files(basedir)
    device_router.add(db, ips, config)

    updater.update_stat('running')

    gevent.spawn_link_exception(progress.responder, estimator, zmq_context,
                                repo)

    gevent.sleep(
        2
    )  # allow config-regeneration to complete successfully called by device_router.add
    transform(grouped_files, col_out, estimator, config, updater)

    # delete uploaded compressed file and extracted dir
    if is_compressed_file_present:
        dir_path = os.path.dirname(basedir)
        if os.path.exists(dir_path):
            shutil.rmtree(dir_path)

    # stopping immediately will remove sid in norm_front and store_handler
    # so sleep 1 hour, because events may be in queue of norm_front and store_handler
    time.sleep(3600)
    updater.update_stat('finished')
Ejemplo n.º 2
0
def main():
    app_config = _parse_args()

    port = app_config["port"]
    address4 = ('0.0.0.0', port)
    address6 = ('::1', port)

    zmq_context = zmq.Context()
    snmptrapd_out = wiring.Wire('collector_out',
                                zmq_context=zmq_context,
                                conf_path=app_config.get('wiring_conf_path')
                                or None)

    snmpEngine = getSNMPEngine(address4, address6,
                               app_config["snmpv12_agents"],
                               app_config["SNMPv3_users"])
    cbCtx = {
        "out_wire": snmptrapd_out,
        "MIBView": getMIBViewer(),
        "app_config": app_config,
        "last_col_ts": 0,
        "log_counter": 0
    }
    ntfrcv.NotificationReceiver(snmpEngine, cbFun, cbCtx)
    snmpEngine.transportDispatcher.jobStarted(1)  # this job would never finish
    snmpEngine.transportDispatcher.runDispatcher()
Ejemplo n.º 3
0
def main():
    """main function"""
    context = zmq.Context()
    socket = context.socket(zmq.DEALER)
    identity = u'client-3'
    socket.identity = identity.encode('ascii')
    socket.connect('tcp://localhost:5570')
    print 'Client %s started' % (identity)
    poll = zmq.Poller()
    poll.register(socket, zmq.POLLIN)
    reqs = 0
    count = 0
    begin = time.time()
    while True:
        reqs = reqs + 1
        #print 'Req #%d sent..' % (reqs)
        socket.send_string(u'request #%d' % (reqs))

        sockets = dict(poll.poll(10))
        if socket in sockets:
            msg = socket.recv()
            count = count + 1
            print 'Client %s received: %s count %d time %d' % (
                identity, msg, count, time.time() - begin)

    socket.close()
    context.term()
Ejemplo n.º 4
0
def test():
    config = _parse_args()
    port = config['port']
    expire_time = config['expire_time']
    col_type = config['col_type']
    collected_at = config["loginspect_name"]

    zmq_context = zmq.Context()

    netflow_out = wiring.Wire('collector_out',
                              zmq_context=zmq_context,
                              conf_path=config.get('wiring_conf_path') or None)

    q = Queue.Queue()
    for i in range(5000):
        q.put(DATA)

    #sock = start_udp_server(port)

    netflow1 = netflow.Netflow1()
    netflow5 = netflow.Netflow5()
    netflow6 = netflow.Netflow6()
    netflow7 = netflow.Netflow7()

    num_of_process = multiprocessing.cpu_count()
    start = time.time()
    benchmark_file = open("multiprocess.benchmark", "w")
    netflow_proc = [multiprocessing.Process(target=work,
                    args=(i, q, config, netflow_out, col_type, collected_at, expire_time, start, \
                          netflow1, netflow5, netflow6, netflow7, benchmark_file)) for i in xrange(num_of_process)]
    for nf in netflow_proc:
        nf.start()
Ejemplo n.º 5
0
class test_syslog_collector(unittest.TestCase):
    zmq_context = zmq.Context()
    syslog_out = wiring.Wire('collector_out', zmq_context=zmq_context)

    def setUp(self):
        config_path = disk.get_sibling(__file__, "test-config.json")
        self.config = conf.load(config_path)
        self.port = self.config["port"]
        self.ssl_port = self.config["ssl_port"]
        self.collected_at = self.config["loginspect_name"]

    def tearDown(self):
        pass

    def test__create_listener(self):
        result = _create_listener(self.port)
        sock, sockaddr = inet.create_external_address(self.port)
        expected = sock
        eq_(type(result), type(expected))

    def test__get_profile_info(self):
        addr = '127.0.0.1', self.port
        ip = addr[0]
        sid = 'syslog|127.0.0.1'
        device_name = 'localhost'
        result = _get_profile_info(addr, self.config)
        result = list(result)
        parser = result.pop(2)
        expected = [ip, sid, device_name, self.collected_at]
        eq_(result, expected)

    def test__get_sid_parser(self):
        profile = self.config["client_map"]['127.0.0.1']
        sid, parser = _get_sid_parser(self.config, '127.0.0.1', profile)
        eq_(sid, 'syslog|127.0.0.1')
Ejemplo n.º 6
0
def main():
    """main function"""
    context = zmq.Context()
    frontend = context.socket(zmq.ROUTER)
    frontend.bind('tcp://*:5570')

    backend = context.socket(zmq.DEALER)
    backend.bind('tcp://*:5571')

    poll = zmq.Poller()
    poll.register(frontend, zmq.POLLIN)
    poll.register(backend, zmq.POLLIN)

    while True:
        sockets = dict(poll.poll())
        if frontend in sockets:
            ident, msg = frontend.recv_multipart()
            print 'Server received %s id %s' % (msg, ident)
            backend.send_multipart([ident, msg])
        if backend in sockets:
            ident, msg = backend.recv_multipart()
            print 'Sending to frontend %s id %s' % (msg, ident)
            frontend.send_multipart([ident, msg])

    frontend.close()
    backend.close()
    context.term()
Ejemplo n.º 7
0
 def __create_context(self, wire_name):
     """
     Creates wiring context for sending events to normalizer
     """
     zmq_context = zmq.Context()
     collector_out = wiring.Wire(wire_name, zmq_context=zmq_context,
                                     conf_path=self.__config.get('wiring_conf_path', None))
     return collector_out
Ejemplo n.º 8
0
def _create_context(config):
    """
        Creates wiring context for sending events to normalizer
    """
    zmq_context = zmq.Context()
    return wiring.Wire("collector_out",
                       zmq_context=zmq_context,
                       conf_path=config.get("wiring_conf_path"))
Ejemplo n.º 9
0
class TestFTPCollector(unittest.TestCase):
    zmq_context = zmq.Context()

    def setUp(self):
        self.ftp_server = subprocess.Popen(
            ['python', 'ftp_collector.py', 'tests/data/test-config.json'])

        self.batch_processor_in = wiring.Wire('batch_processor_in',
                                              use_gevent=True)
        # dont know why it fails when zmq_context reused
        #self.batch_processor_in = wiring.Wire('batch_processor_in',
        #                                      zmq_context=self.zmq_context)

        # Allow to prepare for serving
        time.sleep(0.5)

    def tearDown(self):
        self.ftp_server.kill()
        self.batch_processor_in.close()
        time.sleep(0.5)

    def login(self):
        ftp = FTP()
        ftp.connect('0.0.0.0', 2021)
        ftp.login('alpha', 'alpha')
        return ftp

    def test_login(self):
        self.login()

    def test_bad_authentication(self):
        ftp = FTP()
        ftp.connect('0.0.0.0', 2021)
        self.assertRaises(Exception, ftp.login, 'sujan', 'asdf')

    def test_normal_logfile(self):
        ftp = self.login()
        logfile = 'tests/data/1308216000'
        f = open(logfile)
        ftp.storbinary('STOR test_logfile_%s' % time.time(), f)

        event = gevent.with_timeout(5,
                                    self.batch_processor_in.recv,
                                    timeout_value=None)
        eq_(event['parser'], 'SyslogParser')
        eq_(event['sid'], 'ftpc|127.0.0.1-ubuntu')

    def test_gz_logfile(self):
        ftp = self.login()
        logfile = 'tests/data/1308216000.gz'
        f = open(logfile)
        ftp.storbinary('STOR test_logfile_%s.gz' % time.time(), f)

        event = gevent.with_timeout(5,
                                    self.batch_processor_in.recv,
                                    timeout_value=None)
        eq_(event['parser'], 'SyslogParser')
        eq_(event['sid'], 'ftpc|127.0.0.1-ubuntu')
def main():
    config = _parse_args()
    zmq_context = zmq.Context()

    db_file = homing.home_join('storage/col/filesystem_collector', 'checksums.shelve')
    disk.prepare_path(db_file)

    cursor_shelve = shelve.open(db_file, protocol=2)
    watcher.monitor(config, cursor_shelve, zmq_context)
Ejemplo n.º 11
0
def main():
    zmq_context = zmq.Context()
    
    config = _parse_args()
    
    opsecfetcher_out = wiring.Wire("collector_out", zmq_context=zmq_context,
                                        conf_path=config.get('wiring_conf_path') or None)

    log.info('opsec fetcher starting..')
    fetcherloop.start(config, opsecfetcher_out)
Ejemplo n.º 12
0
def main():
    zmq_context = zmq.Context()

    config = _parse_args()
    opsecfetcher_out = wiring.Wire("collector_out", zmq_context=zmq_context,
                                        conf_path=config.get("wiring_conf_path") or None)

    running_opsecf_jobs = {}

    while not STOPIT:
        if config["_onreload"](timeout = 1):
            update_jobs(config, running_opsecf_jobs, opsecfetcher_out)
Ejemplo n.º 13
0
def main():
    config = _parse_args()

    port = config["port"]

    zmq_context = zmq.Context()

    snare_out = wiring.Wire('collector_out',
                            zmq_context=zmq_context,
                            conf_path=config.get('wiring_conf_path') or None)

    start_tcp_server(port, config, snare_out)

    start_udp_server(port, config, snare_out)
Ejemplo n.º 14
0
def main():
    zmq_context = zmq.Context()

    config = _parse_args()
    #config = textual.utf8(config)

    #_prepare_application_directory(config)

    fi_out = wiring.Wire('collector_out',
                         zmq_context=zmq_context,
                         conf_path=config.get('wiring_conf_path') or None)

    log.info('LogPoint_agent_collector starting...')

    fi_collector.main(config, fi_out)
Ejemplo n.º 15
0
def main():
    """main function"""
    context = zmq.Context()
    worker = context.socket(zmq.ROUTER)
    worker.bind('tcp://*:5570')
    print 'Worker started'

    receiver = context.socket(zmq.PULL)
    receiver.bind('tcp://*:5503')

    poll = zmq.Poller()
    poll.register(worker, zmq.POLLIN)
    poll.register(receiver, zmq.POLLIN)
    identifier = ""
    queue = Queue(10000)
    count = 0
    received_count = 0
    begin = time.time()
    while True:
        sockets = dict(poll.poll(10))
        if receiver in sockets:
            msg = receiver.recv()
            queue.put(msg)
            received_count = received_count + 1
            print 'message received %d time %d' % (received_count,
                                                   time.time() - begin)
        if worker in sockets:
            ident, req_msg = worker.recv_multipart()
            #print 'Worker received %s from %s' % (req_msg, ident)
            # replies = randint(0,4)
            # for i in range(replies):
            #     time.sleep(1. / (randint(1,10)))
            if not queue.empty():
                msg = queue.get()
                queue.task_done()
                worker.send_multipart([ident, msg])
                count = count + 1

    worker.close()
Ejemplo n.º 16
0
def main():
    """
    Main program loop where the storage listens for the incomming connection and
    from the collector layer, finds the appropriate Repo for the given log and
    forwards them to the RepoHandler.
    """

    zmq_context = zmq.Context()

    client_address = "PUSH:connect:tcp://127.0.0.1:5503"

    client_1 = wiring.create_wire(zmq_context,
                                  dict(format="json", socket=client_address))
    count = 1
    msg = "{'msg':'Hello World - 2'}"
    while True:

        client_1.send_raw(json.dumps(msg))
        if count == 100000:
            break
        count = count + 1
        #print json.loads(raw_data)
        print count
Ejemplo n.º 17
0
class test_sflow_collector(unittest.TestCase):
    zmq_context = zmq.Context()

    def setUp(self):
        # starting sflow collector
        config_path = disk.get_sibling(__file__, "test-config.json")
        config = conf.load(config_path)
        self.port = config["port"]
        self.sflow_out = wiring.Wire('norm_front_in',
                                     zmq_context=self.zmq_context)
        self.sflow_collector = Popen(
            ['python', 'sflow_collector.py', config_path])
        time.sleep(0.5)

    def tearDown(self):
        self.sflow_collector.kill()
        self.sflow_out.close()
        time.sleep(0.5)

    def send_message(self, address=None, message=None):
        address = address or ('127.0.0.1', self.port)
        host, port = address

        data_file_path = disk.get_sibling(__file__, "sflow-data-v5.txt")
        message = message or open(data_file_path, "rb").read()

        client, sockaddr = inet.create_address(host, port, socket.SOCK_DGRAM)
        client.sendto(message, sockaddr)

        event = gevent.with_timeout(5, self.sflow_out.recv, timeout_value=None)

        mid = event.pop('mid')
        assert re.match(r'^LogInspect500\|sflow\|(127.0.0.1|::1)\|\d+\|1$',
                        mid)

        device_name = event.pop('device_name')
        eq_(device_name, 'localhost')

        expected = dict(
            col_type='sflow',
            switch_uptime=240000L,
            _type_num=
            'switch_uptime samples_count sample_source_id_index sub_agent_id sample_sequence_number sample_source_id_type version datagram_sequence_number version',
            samples_count=1L,
            sub_agent_id=100000L,
            device_ip=address[0],
            sample_type='COUNTER_SAMPLE',
            _p__raw_msg_b=
            'AAAABQAAAAHAqAIoAAGGoAAAAAwAA6mAAAAAAQAAAAIAAAGMAAAADAIAAAEAAAAGAAAH0QAAACQAAAACAAAAAQAAAAEAAAAAAAAAAAAAAAIAAAABCAAnPETIAAAAAAfVAAAANAAAAAHdFswAAAAAAY+B4AAAAAZmAAAiyAAAAAAKiYQAABA0sAAACKYAAAAAAOuQAAAAE9wAAAfUAAAASAAAAAAt99AAAAAAACl0AAAAAAAAAAAAAAAAAAAA/mAAAAAAAAHBIAAAAAAAGD/gAAAAAAAYP+AAAAEMRAAAE6YAAAAAAAAAAAAAB9MAAABEAAAAADwj1woAAAAAAAAAAAAAAEwAAAABAAAJZgAAAU0AAAWgAAAAAAAAEoQABN6aAAAeAAAAADwAAAAeAAAyogAAaOoAAAfWAAAAKAAAAAAAAk54AAAECgAAAAAAAAAAAAAAAAAAlKAAAACMAAAAAAAAAAAAAAfQAAAARAAAAAlyaXR1YnVudHUAAAAfQisf1SlOi4jO6A5NVlugAAAAAgAAAAIAAAAVMi42LjMyLTI4LWdlbmVyaWMtcGFlAAAA\n',
            sample_sequence_number=12L,
            sample_source_id_index=1L,
            sample_source_id_type=2L,
            version=5L,
            address_type='IP_V4',
            datagram_sequence_number=12L,
            collected_at='LogInspect500',
            msg='',
            ip_address='192.168.2.40',
            _type_ip='ip_address device_ip',
            _type_str=
            'sample_type address_type ip_address msg col_type device_name collected_at',
        )

        eq_(event, expected)

    def test_udp_flow(self):
        self.send_message(('127.0.0.1', self.port))

    def test_udp6_flow(self):
        self.send_message(('::1', self.port))
Ejemplo n.º 18
0
def main():
    log.info('Batch Processor for Collector apps starting...')

    config = _parse_args()
    zmq_context = zmq.Context()

    processor_in = wiring.Wire('batch_processor_in',
                               zmq_context=zmq_context,
                               conf_path=config.get('wiring_conf_path')
                               or None)
    collector_out = wiring.Wire('collector_out',
                                zmq_context=zmq_context,
                                conf_path=config.get('wiring_conf_path')
                                or None)

    last_timestamp = 0

    throttler = timing.speed_throttler(MAX_SPEED)
    while True:
        file_info = processor_in.recv()

        sid = file_info['sid']
        col_type = file_info['col_type']
        col_ts = file_info['col_ts']
        parser = file_info['parser']
        file = file_info['file']
        cursor = file_info['cursor']
        charset = file_info['charset']
        device_name = file_info['device_name']
        device_ip = file_info['device_ip']
        regex_pattern = file_info['regex_pattern']
        regexparser_name = file_info['regexparser_name']
        normalizer = file_info['normalizer']
        repo = file_info['repo']
        source_name = file_info['source_name']

        current_timestamp = int(time.time())
        if current_timestamp > last_timestamp:
            last_timestamp = current_timestamp
            counter = 0

        for event in file_processor(file, parser, sid, charset, cursor,
                                    regex_pattern, regexparser_name):
            throttler.next()
            counter += 1

            loginspect_name = config["loginspect_name"]
            event['mid'] = '%s|%s|%s|%d' % (loginspect_name, sid, col_ts,
                                            counter)
            event['collected_at'] = loginspect_name

            event['device_name'] = device_name
            msgfilling.add_types(event, '_type_str',
                                 'device_name collected_at col_type')

            event['col_ts'] = col_ts
            event['_counter'] = counter
            event['col_type'] = col_type
            msgfilling.add_types(event, '_type_num', 'col_ts')

            if device_ip is not None:
                event['device_ip'] = device_ip
                msgfilling.add_types(event, '_type_str', 'device_ip')
                msgfilling.add_types(event, '_type_ip', 'device_ip')

            event['normalizer'] = normalizer
            event['repo'] = repo

            if source_name:
                if event.get('_normalized_fields'):
                    event['_normalized_fields']['source_name'] = source_name
                else:
                    event['_normalized_fields'] = {'source_name': source_name}
                msgfilling.add_types(event, '_type_str', 'source_name')

            collector_out.send_with_norm_policy_and_repo(event)

        try:
            os.unlink(file)
        except:
            pass
class test_netflow_collector(unittest.TestCase):
    zmq_context = zmq.Context()

    def setUp(self):
        # netflow collector forwards the received msg to normalizer_in
        # starting netflow collector
        config_path = disk.get_sibling(__file__, "test-config-netflow.json")
        config = conf.load(config_path)
        self.port = config['port']

        self.normalizer = wiring.Wire('norm_front_in',
                                      zmq_context=self.zmq_context)

        self.netflow_collector = Popen(
            ['python', 'netflow_collector.py', config_path])
        # Allow to prepare for serving
        time.sleep(0.5)

    def tearDown(self):
        self.netflow_collector.kill()
        self.normalizer.close()
        time.sleep(0.5)

    def send_message(self, address=None, message=None, version=5):
        address = address or ('127.0.0.1', self.port)
        host, port = address

        client, sockaddr = inet.create_address(host, port, socket.SOCK_DGRAM)

        if version == 5:
            file_path = disk.get_sibling(__file__, "v5-data.txt")
            msg = open(file_path, "rb").read()
            message = message or msg

            client.sendto(message, sockaddr)

            event = gevent.with_timeout(5,
                                        self.normalizer.recv,
                                        timeout_value=None)

            mid = event.pop('mid')
            assert re.match(
                r'^LogInspect500\|netflow\|(127.0.0.1|::1)\|\d+\|1$', mid)
            #'mid': u'LogInspect500|netflow|192.168.2.0/24|1353399814|1',

            eq_(
                event,
                dict(
                    #msg=message.rstrip('\n'),
                    destination_address='10.0.0.3',
                    protocol_name='UDP',
                    _p__raw_msg_b=
                    'CgAAAgoAAAMAAAAAAAMABQAAAAEAAABAAbw5vQG9JB0QkgBQAAARAQACAAMgHwAA\n',
                    version=5,
                    msg='',
                    source_address='10.0.0.2',
                    current_unix_sec=1026403152,
                    bytes_count=64,
                    end_uptime_ms=29172765,
                    types_of_service=1,
                    destination_port=80,
                    interface_index=3,
                    start_uptime_ms=29112765,
                    device_name='localhost',
                    packet_count=1,
                    col_type='netflow',
                    source_port=4242,
                    device_ip=address[0],
                    collected_at='LogInspect500',
                    _type_num=
                    'interface_index start_uptime_ms end_uptime_ms source_port destination_port packet_count bytes_count types_of_service version current_unix_sec',
                    _type_str=
                    'protocol_name source_address destination_address msg col_type device_name collected_at',
                    _type_ip='source_address destination_address device_ip',
                ))

        elif version == 9:
            file_path = disk.get_sibling(__file__, "v9-data.txt")
            msg = open(file_path, "rb").read()
            message = message or msg

            client.sendto(message, sockaddr)

            event = gevent.with_timeout(5,
                                        self.normalizer.recv,
                                        timeout_value=None)

            mid = event.pop('mid')
            assert re.match(
                r'^LogInspect500\|netflow\|(127.0.0.1|::1)\|\d+\|1$', mid)
            #'mid': u'LogInspect500|netflow|192.168.2.0/24|1353399814|1',

            expected = {
                '_type_str':
                'packet_type msg col_type device_name collected_at',
                '_type_num': 'template_id',
                'template_id': 300,
                'device_ip': '127.0.0.1',
                'device_name': u'localhost',
                'packet_type': 'template',
                'col_type': u'netflow',
                'collected_at': u'LogInspect500',
                'msg': '',
                '_p___raw_msg_b':
                'ASwAEgAIAAQADAAEAA8ABAAKAAQADgAEAAIABAABAAQABwACAAsAAgAGAAEABAABAAUAAQARAAIAEAACAAkAAQANAAEAFQAEABYABA==\n',
                '_type_ip': 'device_ip'
            }
            eq_(event, expected)
            #eq_(event, dict(
            #    protocol=17,
            #    first_switched=29074919,
            #    unix_secs=0,
            #    sys_uptime_ms=29134919,
            #    package_sequence=111,
            #    destination_address='10.0.0.3',
            #    protocol_name='UDP',
            #    _p__raw_msg_b='CgAAAgoAAAMAAAAAAAAAAwAAAAUAAAABAAAAQBCSAFAAEQEAAwACIB8BvJBHAbul5wAAAA==\n',
            #    version=9,
            #    msg='',
            #    source_address='10.0.0.2',
            #    bytes_count=64,
            #    destination_mask=31,
            #    source_mask=32,
            #    next_hop='0.0.0.0',
            #    source_as=2,
            #    output_interface_index=5,
            #    source_id=0,
            #    last_switched=29134919,
            #    tcp_flag=0,
            #    destination_as=3,
            #    types_of_service=1,
            #    destination_port=80,
            #    input_interface_index=3,
            #    device_name='localhost',
            #    packet_count=1,
            #    col_type='netflow',
            #    source_port=4242,
            #    device_ip=address[0],
            #    collected_at='LogInspect500',
            #    _type_num='bytes_count packet_count protocol types_of_service tcp_flag source_port source_mask input_interface_index destination_port destination_mask output_interface_index source_as destination_as last_switched first_switched version sys_uptime_ms unix_secs package_sequence source_id',
            #    _type_str='protocol_name source_address destination_address next_hop msg col_type device_name collected_at',
            #    _type_ip='source_address destination_address next_hop device_ip',
            #    ))

        else:
            raise ValueError("Unknown netflow version type: %r" % version)

    def test_udp_version5(self):
        self.send_message(version=5)

    def test_udp_version9(self):
        self.send_message(version=9)

    def test_udp6_version5(self):
        self.send_message(address=('::1', self.port), version=5)
Ejemplo n.º 20
0
def main():
    config = _parse_args()
    log_level = config['core']['log_level']
    port = config['port']
    expire_time = config['expire_time']
    col_type = config['col_type']
    collected_at = config["loginspect_name"]

    zmq_context = zmq.Context()

    netflow_out = wiring.Wire('collector_out',
                              zmq_context=zmq_context,
                              conf_path=config.get('wiring_conf_path') or None)

    sock = start_udp_server(port)
    while True:
        data, addr = sock.recvfrom(9216)

        if not data:
            continue
        log.debug('udp collector; from ip=%s, got msg=%s;', addr, data)

        ip = inet.get_ip(addr)
        config_ip = config_reader.get_config_ip(ip, config)
        if not config_ip:
            continue
        try:
            version = get_netflow_packet_version(data[0:2])
            count = socket.ntohs(struct.unpack('H', data[2:4])[0])
            current_unix_sec = (struct.unpack('I', data[8:12])[0])

            log.debug("Version: %s", version)
            log.debug("Count of no. of records: %s", count)
            log.debug("Count of no. of seconds since 0000 UTC 1970: %s",
                      current_unix_sec)

            netflow1 = netflow.Netflow1()
            netflow5 = netflow.Netflow5()
            netflow6 = netflow.Netflow6()
            netflow7 = netflow.Netflow7()

            global VERSION
            global netflowdata

            if ((version == 1) or (version == 5) or (version == 6)
                    or (version == 7)):

                if version == 1:
                    log.info("version 1 unpacking...")
                    VERSION = 1
                    netflow1.unpack(data)
                    netflowdata = netflow1.data
                elif version == 5:
                    log.info("version 5 unpacking...")
                    VERSION = 5
                    netflow5.unpack(data)
                    netflowdata = netflow5.data
                elif version == 6:
                    log.info("version 6 unpacking...")
                    VERSION = 6
                    netflow6.unpack(data)
                    netflowdata = netflow6.data
                elif version == 7:
                    log.info("version 7 unpacking...")
                    VERSION = 7
                    netflow7.unpack(data)
                    netflowdata = netflow7.data

                i = 1
                if not netflowdata:
                    continue

                for netflow_record in netflowdata:
                    try:
                        i = i + 1
                        try:
                            parsed_msg_dict = parse_record(netflow_record)
                        except Exception, e:
                            log.error("Could not parse the given record. %s",
                                      repr(e))
                        parsed_msg_dict['_p__raw_msg_b'] = binascii.b2a_base64(
                            str(netflow_record))
                        parsed_msg_dict['version'] = VERSION
                        parsed_msg_dict['current_unix_sec'] = current_unix_sec
                        msgfilling.add_types(parsed_msg_dict, '_type_num',
                                             'version current_unix_sec')

                        sid = _get_sid(config_ip, config)
                        device_name = config['client_map'][config_ip][
                            "device_name"]
                        log.debug("device: %s", device_name)
                        log.debug("descrete ip: %s", ip)
                        try:
                            _handle_data(parsed_msg_dict, sid, netflow_out,
                                         device_name, col_type, ip,
                                         collected_at)
                        except Exception, e:
                            log.error("Device name not found. %s", repr(e))
                    except Exception, e:
                        log.error(
                            "Error in constructing message, Necessary field not supplied in Netflow"
                        )
                        log.error(repr(e))
Ejemplo n.º 21
0
class test_snare_collector(unittest.TestCase):
    os.environ["TZ"] = "UTC"
    zmq_context = zmq.Context()

    def setUp(self):
        # snare collector forwards the received msg to normalizer_in
        # starting snare collector
        config_path = disk.get_sibling(__file__, 'test-config.json')
        config = conf.load(config_path)
        self.port = config['port']

        self.normalizer = wiring.Wire('norm_front_in',
                zmq_context=self.zmq_context)

        self.snare_collector = Popen(['python', 'snare_collector.py',
                                       config_path])
        # Allow to prepare for serving
        time.sleep(0.5)

    def tearDown(self):
        self.snare_collector.kill()
        self.normalizer.close()
        time.sleep(0.5)

    def send_message(self, address=None, message=None, flow='udp'):
        address = address or ('127.0.0.1', self.port)
        message = message or "<124> May 06 2012 15:02:24 [emerg] (17)File exists: Couldn't create accept lock (/private/var/log/apache2/accept.lock.19) (5)\n"

        host, port = address
        if flow == 'tcp':
            client, sockaddr = inet.create_address(host, port)
            client.connect(sockaddr)
            client.send(message)
        elif flow == 'ssl':
            client, sockaddr = inet.create_address(host, port)
            client = ssl.wrap_socket(client)
            client.connect(sockaddr)
            client.send(message)
        elif flow == 'udp':
            client, sockaddr = inet.create_address(host, port,
                    socket.SOCK_DGRAM)
            client.sendto(message, sockaddr)
        else:
            raise ValueError('Unknown flow type: %r' % flow)

        event = gevent.with_timeout(5, self.normalizer.recv, timeout_value=None)

        mid = event.pop('mid')
        assert re.match(r'^LogInspect500\|snare\|(127.0.0.1|::1)\|\d+\|1$', mid)
        eq_(event, dict(
            msg=message.rstrip('\n'),
            severity=4,
            facility=15,
            log_ts=1336316544,
            device_ip=address[0],
            device_name='localhost',
            collected_at='LogInspect500',
            _type_num='log_ts severity facility',
            _type_str='msg device_name collected_at',
            _type_ip='device_ip',
            ))

    def test_tcp_basic_flow(self):
        self.send_message(flow='tcp')

    def test_udp_basic_flow(self):
        self.send_message(flow='udp')

    def test_tcp6_flow(self):
        self.send_message(('::1', self.port), flow='tcp')

    def test_udp6_flow(self):
        self.send_message(('::1', self.port), flow='udp')
Ejemplo n.º 22
0
from netflow_collector import parse_record
from netflow_collector import _handle_data
from netflow_collector import _netflow9
from netflow_collector import _New_Netflow_v9
from netflow_collector import msgfill_parsed_record_v9

import unittest

sample_v5 = '\x00\x05\x00\x01\x01\xbd$\x1dP\xab-=\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\x00\n\x00\x00\x02\n\x00\x00\x03\x00\x00\x00\x00\x00\x03\x00\x05\x00\x00\x00\x01\x00\x00\x00@\x01\xbc9\xbd\x01\xbd$\x1d\x10\x92\x00P\x00\x00\x11\x01\x00\x02\x00\x03 \x1f\x00\x00'
sample_v9 = '\x00\t\x00\x02\x01\xbc\x90G\x00\x00\x00\x00\x00\x00\x00o\x00\x00\x00\x00\x00\x00\x00P\x01,\x00\x12\x00\x08\x00\x04\x00\x0c\x00\x04\x00\x0f\x00\x04\x00\n\x00\x04\x00\x0e\x00\x04\x00\x02\x00\x04\x00\x01\x00\x04\x00\x07\x00\x02\x00\x0b\x00\x02\x00\x06\x00\x01\x00\x04\x00\x01\x00\x05\x00\x01\x00\x11\x00\x02\x00\x10\x00\x02\x00\t\x00\x01\x00\r\x00\x01\x00\x15\x00\x04\x00\x16\x00\x04\x01,\x008\n\x00\x00\x02\n\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00\x01\x00\x00\x00@\x10\x92\x00P\x00\x11\x01\x00\x03\x00\x02 \x1f\x01\xbc\x90G\x01\xbb\xa5\xe7\x00\x00\x00'

config = {"col_type": "netflow"}
collected_at = "Loginspect500"
col_type = "netflowc"

zmq_context = zmq.Context()
netflow_out = wiring.Wire('collector_out',
                          zmq_context=zmq_context,
                          conf_path=config.get('wiring_conf_path') or None)
ip = '127.0.0.1'
sid = "%s|%s" % (col_type, ip)
device_name = "device_name"


def test_test():
    eq_('test', 'test', msg="Checking test")


def test_start_udp_server():
    port = 9001
    sock = start_udp_server(port)
Ejemplo n.º 23
0
def main():
    #print "This is my sflow_collector 1 ."
    log.debug("Started.")
    config = _parse_args()
    log_level = config['core']['log_level']
    port = config['port']
    log.debug("This is log level set to %s.", log_level)
    col_type = config['col_type']
    log.debug("Col_type : %s", col_type)

    zmq_context = zmq.Context()
    sflow_out = wiring.Wire('collector_out',
                            zmq_context=zmq_context,
                            conf_path=config.get('wiring_conf_path') or None)
    sock = start_udp_server(port)

    while True:
        global data
        data, addr = sock.recvfrom(9216)
        log.info("data: %s, addr: %s", data, addr)
        if not data:
            log.debug("no data")
            continue

        ip = inet.get_ip(addr)
        config_ip = config_reader.get_config_ip(ip, config)
        if not config_ip:
            continue

        try:
            _p__raw_msg_b = data
            # Datagram
            version = get_data32(data)
            address_type = get_data32(data)
            if address_type == Address_type['IPV4']:
                log.debug("IPV4 agent found.")
                address_type = 'IP_V4'
                ip_address = get_data32_addr(data)
            elif address_type == Address_type['IPV6']:
                address_type = 'IP_V6'
                ip_address = get_data128_addr(data)
            else:
                address_type = None

            sub_agent_id = get_data32(data)
            datagram_sequence_number = get_data32(data)
            switch_uptime = get_data32(data)  #in ms
            samples_count = get_data32(data)

            datagram_dict = dict(
                _p__raw_msg_b=binascii.b2a_base64(str(_p__raw_msg_b)),
                version=version,
                address_type=address_type,
                ip_address=ip_address,
                sub_agent_id=sub_agent_id,
                datagram_sequence_number=datagram_sequence_number,
                switch_uptime=switch_uptime,
                samples_count=samples_count)
            EVENT.clear()
            EACH_EVENT.clear()
            EVENT.update(datagram_dict)
            EACH_EVENT.update(datagram_dict)

            log.info("Version: %s", version)

            # samples
            if version == Versions['VERSION5'] or address_type is not None:
                log.info("Version %s unpacking...", version)
                try:
                    for i in range(samples_count):
                        log.debug("datagram samples : %s", i)
                        try:
                            parse_sample(data)  #Parse the obtained datagram
                        except Exception, e:
                            log.error("Unable to parse the data: %s", repr(e))
                        complete_event_dict = _fill_msg_types(EVENT)

                        sid = _get_sid(config_ip, config)
                        device_name = config['client_map'][config_ip][
                            "device_name"]
                        collected_at = config["loginspect_name"]
                        _handle_data(complete_event_dict, sid, sflow_out,
                                     device_name, col_type, ip, collected_at)
                        EVENT.clear()
                        EVENT.update(EACH_EVENT)
                except Exception, e:
                    log.error(
                        "Error in constructing sflow message, Necessary field not supplied in Sflow"
                    )
                    log.error(repr(e))
            else:
                #we donot accept this agent
                log.error(
                    "Datagram from Unknown agent: %s. Or incorrect version type.",
                    address_type)