def __init__(self, conn, server, config, db_file): ftpserver.FTPHandler.__init__(self, conn, server) self.config = config = textual.utf8(config) self.ip = inet.get_ip(conn.getpeername()) self.db_file = db_file self.config_ip = config_reader.get_config_ip(self.ip, config) if not self.config_ip: conn.send( 'Please add your device %s to ftp_collector in LogInspect to send logs.\n' % self.ip) self.close() return self.profiles = config['client_map'][self.config_ip] # TODO use hashed password in config file self.authorizer = ftpserver.DummyAuthorizer() for user, profile in self.profiles.iteritems(): password = profile['password'] permission = profile['permission'] basedir = config['basedir'].replace('$LOGINSPECT_HOME', homing.LOGINSPECT_HOME) home = profile['home'].lstrip('/') # let home not be absolute path user_home = os.path.join(basedir, home) disk.prepare_path(user_home + '/') self.authorizer.add_user(user, password, user_home, permission)
def __init__(self, conn, server, config, db_file, parser_name_only): ftpserver.FTPHandler.__init__(self, conn, server) self.config = config = textual.utf8(config) self.ip = inet.get_ip(conn.getpeername()) self.db_file = db_file self.parser_name_only = parser_name_only self.config_ip = config_reader.get_config_ip(self.ip, config) if not self.config_ip: conn.send("Please add your device %s to ftp_collector in LogInspect to send logs.\n" % self.ip) self.close() return self.profiles = config["client_map"][self.config_ip] self.authorizer = ftpserver.DummyAuthorizer() for user, profile in self.profiles.iteritems(): password = outself.get_decrypted_password(profile["password"]) permission = profile["permission"] basedir = config["basedir"].replace("$LOGINSPECT_HOME", homing.LOGINSPECT_HOME) home = profile["home"].lstrip("/") # let home not be absolute path user_home = os.path.join(basedir, home) disk.prepare_path(user_home + "/") self.authorizer.add_user(user, password, user_home, permission)
def main(): config = _parse_args() config = textual.utf8(config) db = mongo.get_makalu() updater = status_updater.Updater(db, config["repo"]) if config.get("upload_channel") is not None: downloaded_file = ftp_server.listen(db, config, updater) basedir = extracter.extract(downloaded_file, db, config, updater) is_compressed_file_present = True else: basedir = config["path"] is_compressed_file_present = False transformer.start(config, basedir, db, is_compressed_file_present, updater)
def main(): #config = {'port':221, 'username':'******', 'password':'******', 'permission': 'lr', 'basedir':'c:\\test1\\'} config = _parse_args() config = textual.utf8(config) _prepare_application_directory(config) port = config['port'] address = ('0.0.0.0', port) logging.warn('starting fileinspect ftp server') ftpd = ftpserver.FTPServer( address, lambda conn, server: FTPHandler(conn, server, config)) FTPHandler.use_sendfile = False make_inet6_compatible(ftpd, port) ftpd.max_cons = 256 ftpd.max_cons_per_ip = 5 ftpd.serve_forever()
def _handle_message_request(sock, addr, config, fi_out): global LAST_COL_TS global LOG_COUNTER log.debug("tcp collector; %s connected;" % str(addr)) try: client_ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(client_ip, config) sid, parser = _get_sid_parser(client_ip, config, config_ip) if not parser: return device_name = config['client_map'][config_ip]["device_name"] normalizer = config['client_map'][config_ip]["normalizer"] repo = config['client_map'][config_ip]["repo"] while True: data = sock.recv(4096) if not data: break try: message = cPickle.loads(data) except: #in case if complete data is not received try: data += sock.recv(4096) message = cPickle.loads(data) except: log.warn("Dropping the log; log is more than 4 KB") sock.send(cPickle.dumps({'received': False})) continue client_id = message['id'] if message.get('message') and message.get('app_name'): app_name = message['app_name'] extra_info = message.get('extra_info') or {} fi_out.start_benchmarker_processing() if app_name == "windows_eventlog_reader": event = { "msg": textual.utf8(message["message"]), "_type_str": "msg" } if extra_info.get("_is_event_xml"): extra_info.pop("_is_event_xml") try: more_info = _get_extra_key_values_from_xml( message["message"]) except: more_info = {} log.warn( "Couldnot parse windows xml event log sent from LogPoint Agent" ) if more_info: extra_info.update(more_info) parser_data = [event] else: parser.write(textual.utf8(message['message']), old_parser=True) parser_data = [] if parser: for event in parser: if event: parser_data.append(event) for event in parser_data: col_ts = int(time.time()) if col_ts > LAST_COL_TS: LAST_COL_TS = col_ts LOG_COUNTER = 0 col_type = "lpagent" mid_prefix = '%s|%s|%s|%s|' % (config['loginspect_name'], col_type, config_ip, col_ts) LOG_COUNTER += 1 event['mid'] = mid_prefix + "%d" % LOG_COUNTER event['col_ts'] = col_ts event['_counter'] = LOG_COUNTER event['col_type'] = col_type msgfilling.add_types(event, '_type_num', 'col_ts') msgfilling.add_types(event, '_type_str', 'col_type') event['app_name'] = message['app_name'] event['fi_client_id'] = client_id event['device_name'] = device_name event['device_ip'] = client_ip event['collected_at'] = config['loginspect_name'] if extra_info: event.update(extra_info) for key, value in extra_info.iteritems(): if type(value) is int: msgfilling.add_types(event, '_type_num', key) else: msgfilling.add_types(event, '_type_str', key) msgfilling.add_types(event, '_type_str', 'app_name') msgfilling.add_types(event, '_type_str', 'device_name') msgfilling.add_types(event, '_type_str', 'fi_client_id') msgfilling.add_types(event, '_type_ip', 'device_ip') msgfilling.add_types(event, '_type_str', 'device_ip') msgfilling.add_types(event, '_type_str', 'collected_at') log.debug('sending message to normalizer: %s' % event) event['normalizer'] = normalizer event['repo'] = repo fi_out.send_with_norm_policy_and_repo(event) sock.send(cPickle.dumps({'received': True})) else: sock.send(cPickle.dumps({'received': False})) except Exception, e: log.warn('fileinspect collector exception: %s' % str(e))
def _handle_message_request(sock, addr, config, fi_out, db): global LAST_COL_TS global LOG_COUNTER log.debug("tcp collector; %s connected;" % str(addr)) try: client_map = config["client_map"] client_ip = inet.get_ip(addr) config_ip = config_reader.get_config_ip(client_ip, config) sid, parser = _get_sid_parser(client_ip, config, config_ip) if not parser: return device_name = config["client_map"][config_ip]["device_name"] while True: data = sock.recv(4096) if not data: break try: message = cPickle.loads(zlib.decompress(data)) except: #in case if complete data is not received try: data += sock.recv(4096) message = cPickle.loads(zlib.decompress(data)) except: log.warn("Dropping the log; log is more than 4 KB") sock.send(zlib.compress(cPickle.dumps({"received": False}))) continue if message.get("send_app_file"): app_name = message["app_name"] app_content = open( homing.home_join("storage/col/logpointagent/%s.fi" % app_name), "rb").read() sock.send(str(len(app_content)) + "\n" + app_content) log.warn("Application file for %s sent to client %s" % (app_name, client_ip)) continue if message.get("heartbeat_request"): client_id = message["client_id"] db_fi_client = db.fileinspectclients.find_one( {"ip": client_ip}) if not db_fi_client: log.warn( "Received first request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) db.fileinspectclients.insert( { "ip": client_ip, "client_id": client_id, "config_changed": True }, safe=True) sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) elif db_fi_client and not db_fi_client.get("applications"): log.warn( "Add applciations for LogPoint Agent with ip=%s and id=%s" % (client_ip, client_id)) sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) elif db_fi_client.get("applications") and ( message.get("first_fetch") or db_fi_client["config_changed"]): log.warn( "Received config request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) client_config = _get_client_config( db_fi_client["applications"]) if not client_config.get("apps"): sock.send( zlib.compress( cPickle.dumps({ "type": 1, "message": "No applications added for this LogPoint Agent in LogPoint", "pdict_using_apps": ["file_system_collector"] }))) else: sock.send( zlib.compress( cPickle.dumps({ "type": 2, "config": client_config }))) db.fileinspectclients.update({"ip": client_ip}, { "$set": { "client_id": client_id, "config_changed": False } }) else: log.warn( "Received heartbeat request from LogPoint agent with ip=%s and id=%s" % (client_ip, client_id)) sock.send(zlib.compress(cPickle.dumps({"type": 0}))) continue client_id = message['id'] if message.get('message') and message.get('app_name'): app_name = message['app_name'] extra_info = message.get('extra_info') or {} fi_out.start_benchmarker_processing() if app_name == "windows_eventlog_reader": event = { "msg": textual.utf8(message["message"]), "_type_str": "msg" } if extra_info.get("_is_event_xml"): extra_info.pop("_is_event_xml") #try: # more_info = _get_extra_key_values_from_xml(message["message"]) #except: # more_info = {} # log.warn("Couldnot parse windows xml event log sent from LogPoint Agent") #if more_info: # extra_info.update(more_info) parser_data = [event] else: parser.write(textual.utf8(message['message']), old_parser=True) parser_data = [] if parser: for event in parser: if event: parser_data.append(event) for event in parser_data: col_ts = int(time.time()) if col_ts > LAST_COL_TS: LAST_COL_TS = col_ts LOG_COUNTER = 0 mid_prefix = '%s|%s|%s|%s|' % (config['loginspect_name'], config['col_type'], config_ip, col_ts) LOG_COUNTER += 1 event['mid'] = mid_prefix + "%d" % LOG_COUNTER event['device_name'] = device_name event['device_ip'] = client_ip event['collected_at'] = config['loginspect_name'] event['col_ts'] = col_ts event['_counter'] = LOG_COUNTER event['col_type'] = config['col_type'] msgfilling.add_types(event, '_type_str', 'device_name') msgfilling.add_types(event, '_type_ip', 'device_ip') msgfilling.add_types(event, '_type_str', 'device_ip') msgfilling.add_types(event, '_type_str', 'collected_at') msgfilling.add_types(event, '_type_num', 'col_ts') msgfilling.add_types(event, '_type_str', 'col_type') event['_normalized_fields'] = {} event['_normalized_fields']['app_name'] = message[ 'app_name'] event['_normalized_fields']['lp_agent_id'] = client_id msgfilling.add_types(event, '_type_str', 'app_name') msgfilling.add_types(event, '_type_str', 'lp_agent_id') if extra_info: #event.update(extra_info) for key, value in extra_info.iteritems(): if type(value) is int: msgfilling.add_types(event, '_type_num', key) else: msgfilling.add_types(event, '_type_str', key) event['_normalized_fields'][key] = value log.debug('sending message to normalizer: %s' % event) event['repo'] = config['client_map'][config_ip]['repo'] event['normalizer'] = config['client_map'][config_ip][ 'normalizer'] fi_out.send_with_mid(event) sock.send(zlib.compress(cPickle.dumps({'received': True}))) else: sock.send(zlib.compress(cPickle.dumps({'received': False}))) except Exception, e: log.warn('logpooint agent collector exception: %s' % str(e))
def pre_create(self): id = is_mongokit_objectid(self.params.get("alert_id")) if id: alert = dboperation.read("AlertRules", {"_id": id}, True) if alert: notifications = alert.get("notification", []) if notifications: for notification in notifications: if notification.get("type") == "newtestemail": notifications.remove(notification) break if self.params.get("notify_newtestemail") == "on": email_template = textual.utf8( self.params.get('email_template')) try: template = Template(email_template) except TemplateSyntaxError: return ((0, 800), {}) email_emails = self.params.get('email_emails') if email_emails: email_emails = json.loads(email_emails) email_pattern = re.compile( r"^[-!#$%&'*+/0-9=?A-Z^_a-z{|}~](\.?[-!#$%&'*+/0-9=?A-Z^_a-z{|}~])*@[a-zA-Z](-?[a-zA-Z0-9])*(\.[a-zA-Z](-?[a-zA-Z0-9])*)*$" ) invalid_emails = [] for email in email_emails: if not bool(email_pattern.match(email)): invalid_emails.append(email) if invalid_emails: return ((0, 801), { "errors": { "invalid_emails": invalid_emails } }) else: return ((0, 801), {}) email_threshold_option = self.params.get( "email_threshold_option") email_threshold_value = self.params.get( "email_threshold_value") if email_threshold_value: email_threshold_value = int(email_threshold_value) template_file = "" if email_template: disk.prepare_path(ALERT_TEMPLATES_PATH) user_id = dboperation.read( "User", {'username': self.user.get_user_name()}, True) template_file = 'alert_%s_%s.tmp' % (str( user_id['_id']), base64.b32encode(alert["name"])) template_file_path = os.path.join( ALERT_TEMPLATES_PATH, template_file) email_template = Markup( email_template.decode('utf-8')).unescape() with open(template_file_path, 'w') as f: email_template = email_template.encode('utf-8') format_template = re.sub( '\|\s*(readable|date|time|datetime)\s*}}', self._regex_replacer, email_template) f.write(format_template) else: email_template = "<br>" disk.prepare_path(ALERT_TEMPLATES_PATH) template_file = 'alert_%s_%s.tmp' % ( self.user.get_user_name().encode( 'ascii', 'ignore'), base64.b32encode(name)) template_file_path = os.path.join( ALERT_TEMPLATES_PATH, template_file) email_template = Markup( email_template.decode('utf-8')).unescape() with open(template_file_path, 'w') as f: email_template = email_template.encode('utf-8') format_template = re.sub( '\|\s*(readable|date|time|datetime)\s*}}', self._regex_replacer, email_template) f.write(format_template) notifications.append({'template_file':template_file,'type':'newtestemail', 'notify_newtestemail':True, 'email_emails':email_emails,\ 'email_template':email_template, 'threshold':email_threshold_value, 'threshold_option':email_threshold_option}) return {"notification": notifications}