def __init__(self, conn, server, config, db_file, parser_name_only): ftpserver.FTPHandler.__init__(self, conn, server) self.config = config = textual.utf8(config) self.ip = inet.get_ip(conn.getpeername()) self.db_file = db_file self.parser_name_only = parser_name_only self.config_ip = config_reader.get_config_ip(self.ip, config) if not self.config_ip: conn.send("Please add your device %s to ftp_collector in LogInspect to send logs.\n" % self.ip) self.close() return self.profiles = config["client_map"][self.config_ip] self.authorizer = ftpserver.DummyAuthorizer() for user, profile in self.profiles.iteritems(): password = outself.get_decrypted_password(profile["password"]) permission = profile["permission"] basedir = config["basedir"].replace("$LOGINSPECT_HOME", homing.LOGINSPECT_HOME) home = profile["home"].lstrip("/") # let home not be absolute path user_home = os.path.join(basedir, home) disk.prepare_path(user_home + "/") self.authorizer.add_user(user, password, user_home, permission)
def __init__(self, conn, server, config, db_file): ftpserver.FTPHandler.__init__(self, conn, server) self.config = config = textual.utf8(config) self.ip = inet.get_ip(conn.getpeername()) self.db_file = db_file self.config_ip = config_reader.get_config_ip(self.ip, config) if not self.config_ip: conn.send( 'Please add your device %s to ftp_collector in LogInspect to send logs.\n' % self.ip) self.close() return self.profiles = config['client_map'][self.config_ip] # TODO use hashed password in config file self.authorizer = ftpserver.DummyAuthorizer() for user, profile in self.profiles.iteritems(): password = profile['password'] permission = profile['permission'] basedir = config['basedir'].replace('$LOGINSPECT_HOME', homing.LOGINSPECT_HOME) home = profile['home'].lstrip('/') # let home not be absolute path user_home = os.path.join(basedir, home) disk.prepare_path(user_home + '/') self.authorizer.add_user(user, password, user_home, permission)
def __handle_tcp_client(self, sock, addr, protocol="TCP"): """ """ logging.debug("tcp collector; %s connected;" % str(addr)) ip = inet.get_ip(addr) try: # config_ip can be changed if any device whose cidr belong to this ip is added old_config_ips = None old_parser_name = None old_charset = None parser = None while True: config_ips = self.__config["client_map"].keys() if config_ips != old_config_ips: old_config_ips = config_ips config_ip = config_reader.get_config_ip(ip, self.__config) if not config_ip: return sid = "%s|%s" % (self.__config["col_type"], config_ip) profile = self.__config["client_map"][config_ip] parser_name = profile.get("parser") or None charset = profile["charset"] if parser_name and (parser_name != old_parser_name or charset != old_charset): if old_parser_name and old_charset: logging.warn("settings changed for ip %s, old_parser=%s, new_parser=%s, " "old_charset=%s, new_charset=%s", ip, old_parser_name, parser_name, old_charset, charset) old_parser_name = parser_name old_charset = charset new_parser = GetParser(parser_name, sid, charset, profile.get('regex_pattern'), profile.get('regexparser_name')) if parser and parser.buffer: new_parser.write(parser.buffer) parser = new_parser data = sock.recv(4096) if not data: break if protocol == "SSL": self.handle_tcp_ssl_data(data, ip=ip, sid=sid, parser=parser, device_name=profile["device_name"], normalizer=profile["normalizer"], repo=profile["repo"]) else: self.handle_tcp_data(data, ip=ip, sid=sid, parser=parser, device_name=profile["device_name"], normalizer=profile["normalizer"], repo=profile["repo"]) except Exception, e: logging.warn('Exception receiving message: %s' % str(e))
def __get_profile_info(self, addr): ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(ip, self.__config) if not config_ip: return profile = self.__config["client_map"][config_ip] sid, parser = self.__get_sid_parser(config_ip, profile) device_name = self.__config["client_map"][config_ip]["device_name"] normalizer = self.__config["client_map"][config_ip]["normalizer"] repo = self.__config["client_map"][config_ip]["repo"] return ip, sid, parser, device_name, normalizer, repo
def _get_profile_info(addr, config): ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(ip, config) if not config_ip: return profile = config["client_map"].get(config_ip) sid, parser = _get_sid_parser(config, config_ip, profile) if not parser: return device_name = config['client_map'][config_ip]["device_name"] collected_at = config["loginspect_name"] return ip, sid, parser, device_name, collected_at
def _handle_tcp_client(sock, addr, config, snare_out): log.debug("tcp collector; %s connected;" % str(addr)) ip = inet.get_ip(addr) try: # config_ip can be changed if any device whose cidr belong to this ip is added old_config_ips = None old_parser_name = None old_charset = None parser = None while True: config_ips = config["client_map"].keys() if config_ips != old_config_ips: old_config_ips = config_ips config_ip = config_reader.get_config_ip(ip, config) if not config_ip: return sid = '%s|%s' % (config['col_type'], config_ip) profile = config["client_map"][config_ip] parser_name = profile["parser"] charset = profile["charset"] if parser_name != old_parser_name or charset != old_charset: if old_parser_name and old_charset: log.warn( 'settings changed for ip %s, old_parser=%s, new_parser=%s, ' 'old_charset=%s, new_charset=%s', ip, old_parser_name, parser_name, old_charset, charset) old_parser_name = parser_name old_charset = charset new_parser = GetParser(parser_name, sid, charset, profile.get('regex_pattern'), profile.get('regexparser_name')) if parser and parser.buffer: new_parser.write(parser.buffer) parser = new_parser data = sock.recv(4096) log.debug("tcp collector; ip=%s, got data=%s", config_ip, data) if not data: break device_name = profile["device_name"] collected_at = config["loginspect_name"] _handle_data(data, sid, parser, snare_out, device_name, ip, collected_at) finally: sock.close()
def _handle_message_request(sock, addr, config, fi_out): global LAST_COL_TS global LOG_COUNTER log.debug("tcp collector; %s connected;" % str(addr)) try: client_ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(client_ip, config) sid, parser = _get_sid_parser(client_ip, config, config_ip) if not parser: return device_name = config['client_map'][config_ip]["device_name"] normalizer = config['client_map'][config_ip]["normalizer"] repo = config['client_map'][config_ip]["repo"] while True: data = sock.recv(4096) if not data: break try: message = cPickle.loads(data) except: #in case if complete data is not received try: data += sock.recv(4096) message = cPickle.loads(data) except: log.warn("Dropping the log; log is more than 4 KB") sock.send(cPickle.dumps({'received': False})) continue client_id = message['id'] if message.get('message') and message.get('app_name'): app_name = message['app_name'] extra_info = message.get('extra_info') or {} fi_out.start_benchmarker_processing() if app_name == "windows_eventlog_reader": event = { "msg": textual.utf8(message["message"]), "_type_str": "msg" } if extra_info.get("_is_event_xml"): extra_info.pop("_is_event_xml") try: more_info = _get_extra_key_values_from_xml( message["message"]) except: more_info = {} log.warn( "Couldnot parse windows xml event log sent from LogPoint Agent" ) if more_info: extra_info.update(more_info) parser_data = [event] else: parser.write(textual.utf8(message['message']), old_parser=True) parser_data = [] if parser: for event in parser: if event: parser_data.append(event) for event in parser_data: col_ts = int(time.time()) if col_ts > LAST_COL_TS: LAST_COL_TS = col_ts LOG_COUNTER = 0 col_type = "lpagent" mid_prefix = '%s|%s|%s|%s|' % (config['loginspect_name'], col_type, config_ip, col_ts) LOG_COUNTER += 1 event['mid'] = mid_prefix + "%d" % LOG_COUNTER event['col_ts'] = col_ts event['_counter'] = LOG_COUNTER event['col_type'] = col_type msgfilling.add_types(event, '_type_num', 'col_ts') msgfilling.add_types(event, '_type_str', 'col_type') event['app_name'] = message['app_name'] event['fi_client_id'] = client_id event['device_name'] = device_name event['device_ip'] = client_ip event['collected_at'] = config['loginspect_name'] if extra_info: event.update(extra_info) for key, value in extra_info.iteritems(): if type(value) is int: msgfilling.add_types(event, '_type_num', key) else: msgfilling.add_types(event, '_type_str', key) msgfilling.add_types(event, '_type_str', 'app_name') msgfilling.add_types(event, '_type_str', 'device_name') msgfilling.add_types(event, '_type_str', 'fi_client_id') msgfilling.add_types(event, '_type_ip', 'device_ip') msgfilling.add_types(event, '_type_str', 'device_ip') msgfilling.add_types(event, '_type_str', 'collected_at') log.debug('sending message to normalizer: %s' % event) event['normalizer'] = normalizer event['repo'] = repo fi_out.send_with_norm_policy_and_repo(event) sock.send(cPickle.dumps({'received': True})) else: sock.send(cPickle.dumps({'received': False})) except Exception, e: log.warn('fileinspect collector exception: %s' % str(e))
def _handle_heartbeat_request(sock, addr, config, db): log.debug("tcp collector; %s connected;" % str(addr)) client_ip = inet.get_ip(addr) config_ip = None try: log.warn('LogPoint agent %s started' % client_ip) while True: data = sock.recv(4096) if not data: break message = cPickle.loads(data) client_id = message['id'] client_map = config['client_map'] log.warn( 'LogPoint agent; received request; client_ip=%s; client_id=%s' % (client_ip, client_id)) config_changed = _check_config_changes(db, client_ip) if message.get('new_start') or config_changed: if message.get('new_start'): log.debug('New Start request from client %s' % client_ip) config_ip = config_reader.get_config_ip(client_ip, config) if config_ip: db.fileinspectclients.update( {'ip': client_ip}, {'$set': { 'config_changed': False }}) log.debug( 'sending application lists to LogPoint agent %s' % config_ip) client_config = _get_client_config(client_map[config_ip]) sock.send( cPickle.dumps({ 'type': 1, 'config': client_config })) else: if not db.fileinspectclients.find_one({'ip': client_ip}): db.fileinspectclients.insert( { 'ip': client_ip, 'client_id': client_id, 'config_changed': False }, safe=True) log.warn('No applications found for client %s' % client_ip) sock.send( cPickle.dumps({ 'type': 2, 'message': 'No applications added for this LogPoint agent in LogPoint' })) else: log.debug('Nothing to send to client %s' % client_ip) sock.send(cPickle.dumps({'type': 0})) except Exception, e: log.warn('LogPoint agent collector exception: %s' % str(e))
def main(): #print "This is my sflow_collector 1 ." log.debug("Started.") config = _parse_args() log_level = config['core']['log_level'] port = config['port'] log.debug("This is log level set to %s.", log_level) col_type = config['col_type'] log.debug("Col_type : %s", col_type) zmq_context = zmq.Context() sflow_out = wiring.Wire('collector_out', zmq_context=zmq_context, conf_path=config.get('wiring_conf_path') or None) sock = start_udp_server(port) while True: global data data, addr = sock.recvfrom(9216) log.info("data: %s, addr: %s", data, addr) if not data: log.debug("no data") continue ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(ip, config) if not config_ip: continue try: _p__raw_msg_b = data # Datagram version = get_data32(data) address_type = get_data32(data) if address_type == Address_type['IPV4']: log.debug("IPV4 agent found.") address_type = 'IP_V4' ip_address = get_data32_addr(data) elif address_type == Address_type['IPV6']: address_type = 'IP_V6' ip_address = get_data128_addr(data) else: address_type = None sub_agent_id = get_data32(data) datagram_sequence_number = get_data32(data) switch_uptime = get_data32(data) #in ms samples_count = get_data32(data) datagram_dict = dict( _p__raw_msg_b=binascii.b2a_base64(str(_p__raw_msg_b)), version=version, address_type=address_type, ip_address=ip_address, sub_agent_id=sub_agent_id, datagram_sequence_number=datagram_sequence_number, switch_uptime=switch_uptime, samples_count=samples_count) EVENT.clear() EACH_EVENT.clear() EVENT.update(datagram_dict) EACH_EVENT.update(datagram_dict) log.info("Version: %s", version) # samples if version == Versions['VERSION5'] or address_type is not None: log.info("Version %s unpacking...", version) try: for i in range(samples_count): log.debug("datagram samples : %s", i) try: parse_sample(data) #Parse the obtained datagram except Exception, e: log.error("Unable to parse the data: %s", repr(e)) complete_event_dict = _fill_msg_types(EVENT) sid = _get_sid(config_ip, config) device_name = config['client_map'][config_ip][ "device_name"] collected_at = config["loginspect_name"] _handle_data(complete_event_dict, sid, sflow_out, device_name, col_type, ip, collected_at) EVENT.clear() EVENT.update(EACH_EVENT) except Exception, e: log.error( "Error in constructing sflow message, Necessary field not supplied in Sflow" ) log.error(repr(e)) else: #we donot accept this agent log.error( "Datagram from Unknown agent: %s. Or incorrect version type.", address_type)
def main(): config = _parse_args() log_level = config['core']['log_level'] port = config['port'] expire_time = config['expire_time'] col_type = config['col_type'] collected_at = config["loginspect_name"] zmq_context = zmq.Context() netflow_out = wiring.Wire('collector_out', zmq_context=zmq_context, conf_path=config.get('wiring_conf_path') or None) sock = start_udp_server(port) while True: data, addr = sock.recvfrom(9216) if not data: continue log.debug('udp collector; from ip=%s, got msg=%s;', addr, data) ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(ip, config) if not config_ip: continue try: version = get_netflow_packet_version(data[0:2]) count = socket.ntohs(struct.unpack('H', data[2:4])[0]) current_unix_sec = (struct.unpack('I', data[8:12])[0]) log.debug("Version: %s", version) log.debug("Count of no. of records: %s", count) log.debug("Count of no. of seconds since 0000 UTC 1970: %s", current_unix_sec) netflow1 = netflow.Netflow1() netflow5 = netflow.Netflow5() netflow6 = netflow.Netflow6() netflow7 = netflow.Netflow7() global VERSION global netflowdata if ((version == 1) or (version == 5) or (version == 6) or (version == 7)): if version == 1: log.info("version 1 unpacking...") VERSION = 1 netflow1.unpack(data) netflowdata = netflow1.data elif version == 5: log.info("version 5 unpacking...") VERSION = 5 netflow5.unpack(data) netflowdata = netflow5.data elif version == 6: log.info("version 6 unpacking...") VERSION = 6 netflow6.unpack(data) netflowdata = netflow6.data elif version == 7: log.info("version 7 unpacking...") VERSION = 7 netflow7.unpack(data) netflowdata = netflow7.data i = 1 if not netflowdata: continue for netflow_record in netflowdata: try: i = i + 1 try: parsed_msg_dict = parse_record(netflow_record) except Exception, e: log.error("Could not parse the given record. %s", repr(e)) parsed_msg_dict['_p__raw_msg_b'] = binascii.b2a_base64( str(netflow_record)) parsed_msg_dict['version'] = VERSION parsed_msg_dict['current_unix_sec'] = current_unix_sec msgfilling.add_types(parsed_msg_dict, '_type_num', 'version current_unix_sec') sid = _get_sid(config_ip, config) device_name = config['client_map'][config_ip][ "device_name"] log.debug("device: %s", device_name) log.debug("descrete ip: %s", ip) try: _handle_data(parsed_msg_dict, sid, netflow_out, device_name, col_type, ip, collected_at) except Exception, e: log.error("Device name not found. %s", repr(e)) except Exception, e: log.error( "Error in constructing message, Necessary field not supplied in Netflow" ) log.error(repr(e))
def _handle_message_request(sock, addr, config, fi_out, db): global LAST_COL_TS global LOG_COUNTER log.debug("tcp collector; %s connected;" % str(addr)) try: client_map = config["client_map"] client_ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(client_ip, config) sid, parser = _get_sid_parser(client_ip, config, config_ip) if not parser: return device_name = config["client_map"][config_ip]["device_name"] while True: data = sock.recv(4096) if not data: break try: message = cPickle.loads(zlib.decompress(data)) except: #in case if complete data is not received try: data += sock.recv(4096) message = cPickle.loads(zlib.decompress(data)) except: log.warn("Dropping the log; log is more than 4 KB") sock.send(zlib.compress(cPickle.dumps({"received": False}))) continue if message.get("send_app_file"): app_name = message["app_name"] app_content = open( homing.home_join("storage/col/logpointagent/%s.fi" % app_name), "rb").read() sock.send(str(len(app_content)) + "\n" + app_content) log.warn("Application file for %s sent to client %s" % (app_name, client_ip)) continue if message.get("heartbeat_request"): client_id = message["client_id"] db_fi_client = db.fileinspectclients.find_one( {"ip": client_ip}) if not db_fi_client: log.warn( "Received first request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) db.fileinspectclients.insert( { "ip": client_ip, "client_id": client_id, "config_changed": True }, safe=True) sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) elif db_fi_client and not db_fi_client.get("applications"): log.warn( "Add applciations for LogPoint Agent with ip=%s and id=%s" % (client_ip, client_id)) sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) elif db_fi_client.get("applications") and ( message.get("first_fetch") or db_fi_client["config_changed"]): log.warn( "Received config request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) client_config = _get_client_config( db_fi_client["applications"]) if not client_config.get("apps"): sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) else: sock.send( zlib.compress( cPickle.dumps({ "type": 2, "config": client_config }))) db.fileinspectclients.update({"ip": client_ip}, { "$set": { "client_id": client_id, "config_changed": False } }) else: log.warn( "Received heartbeat request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) sock.send(zlib.compress(cPickle.dumps({"type": 0}))) continue client_id = message['id'] if message.get('message') and message.get('app_name'): app_name = message['app_name'] extra_info = message.get('extra_info') or {} fi_out.start_benchmarker_processing() if app_name == "windows_eventlog_reader": event = { "msg": textual.utf8(message["message"]), "_type_str": "msg" } if extra_info.get("_is_event_xml"): extra_info.pop("_is_event_xml") #try: # more_info = _get_extra_key_values_from_xml(message["message"]) #except: # more_info = {} # log.warn("Couldnot parse windows xml event log sent from LogPoint Agent") #if more_info: # extra_info.update(more_info) parser_data = [event] else: parser.write(textual.utf8(message['message']), old_parser=True) parser_data = [] if parser: for event in parser: if event: parser_data.append(event) for event in parser_data: col_ts = int(time.time()) if col_ts > LAST_COL_TS: LAST_COL_TS = col_ts LOG_COUNTER = 0 mid_prefix = '%s|%s|%s|%s|' % (config['loginspect_name'], config['col_type'], config_ip, col_ts) LOG_COUNTER += 1 event['mid'] = mid_prefix + "%d" % LOG_COUNTER event['device_name'] = device_name event['device_ip'] = client_ip event['collected_at'] = config['loginspect_name'] event['col_ts'] = col_ts event['_counter'] = LOG_COUNTER event['col_type'] = config['col_type'] msgfilling.add_types(event, '_type_str', 'device_name') msgfilling.add_types(event, '_type_ip', 'device_ip') msgfilling.add_types(event, '_type_str', 'device_ip') msgfilling.add_types(event, '_type_str', 'collected_at') msgfilling.add_types(event, '_type_num', 'col_ts') msgfilling.add_types(event, '_type_str', 'col_type') event['_normalized_fields'] = {} event['_normalized_fields']['app_name'] = message[ 'app_name'] event['_normalized_fields']['lp_agent_id'] = client_id msgfilling.add_types(event, '_type_str', 'app_name') msgfilling.add_types(event, '_type_str', 'lp_agent_id') if extra_info: #event.update(extra_info) for key, value in extra_info.iteritems(): if type(value) is int: msgfilling.add_types(event, '_type_num', key) else: msgfilling.add_types(event, '_type_str', key) event['_normalized_fields'][key] = value log.debug('sending message to normalizer: %s' % event) event['repo'] = config['client_map'][config_ip]['repo'] event['normalizer'] = config['client_map'][config_ip][ 'normalizer'] fi_out.send_with_mid(event) sock.send(zlib.compress(cPickle.dumps({'received': True}))) else: sock.send(zlib.compress(cPickle.dumps({'received': False}))) except Exception, e: log.warn('logpooint agent collector exception: %s' % str(e))
def work(id, que, config, netflow_out, col_type, collected_at, expire_time, start, netflow1, netflow5, netflow6, netflow7, benchmark_file): global counter while True: #not que.qsize() == 0: log.warn("speed: %s %s" % (time.time() - start, counter)) benchmark_file.write("%d task, total time: %s, counter: %s\n" % (id, time.time() - start, counter)) #log.warn("%d task:" % id) counter += 1 #data, addr = sock.recvfrom(9216) addr = ('::ffff:192.168.2.4', 62826, 0, 0) data = que.get() if not data: break #continue #log.debug('udp collector; from ip=%s, got msg=%s;', addr, data) ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(ip, config) if not config_ip: continue sid = _get_sid(config_ip, config) device_name = config['client_map'][config_ip]["device_name"] try: version = get_netflow_packet_version(data[0:2]) count = socket.ntohs(struct.unpack('H', data[2:4])[0]) current_unix_sec = socket.ntohl(struct.unpack('I', data[8:12])[0]) global VERSION global netflowdata if ((version == 1) or (version == 5) or (version == 6) or (version == 7)): if version == 1: log.info("version 1 unpacking...") VERSION = 1 netflow1.unpack(data) netflowdata = netflow1.data elif version == 5: log.info("version 5 unpacking...") VERSION = 5 netflow5.unpack(data) netflowdata = netflow5.data elif version == 6: log.info("version 6 unpacking...") VERSION = 6 netflow6.unpack(data) netflowdata = netflow6.data elif version == 7: log.info("version 7 unpacking...") VERSION = 7 netflow7.unpack(data) netflowdata = netflow7.data if not netflowdata: continue for netflow_record in netflowdata: try: try: parsed_msg_dict = parse_record(netflow_record) except Exception, e: log.error("Could not parse the given record. %s", repr(e)) parsed_msg_dict['_p__raw_msg_b'] = binascii.b2a_base64( str(netflow_record)) parsed_msg_dict['version'] = VERSION parsed_msg_dict['current_unix_sec'] = current_unix_sec msgfilling.add_types(parsed_msg_dict, '_type_num', 'version current_unix_sec') _handle_data(parsed_msg_dict, sid, netflow_out, device_name, col_type, ip, collected_at) except Exception, e: log.error( "Error in constructing message, Necessary field not supplied in Netflow" ) log.error(repr(e))