def init_logger(): use_graylog = Config.use_graylog source = Config.source graylog_host = Config.graylog_host graylog_port = Config.graylog_port log_level = Config.log_level log_facility_name = Config.log_facility_name temp_logger = logging.getLogger(log_facility_name) temp_logger.setLevel(log_level) log_handlers = [] if use_graylog == "0": handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(filename)s:%(lineno)d %(levelname)s:\n"%(message)s"' ) handler.setFormatter(formatter) log_handlers.append(handler) elif use_graylog == "1" and graylog_host and source and graylog_port is not None \ and isinstance(graylog_port, int): log_handlers.append( graypy.GELFHandler(host=graylog_host, port=graylog_port, localname=source)) elif use_graylog == "2" and graylog_host and source and graylog_port is not None \ and isinstance(graylog_port, int): handler1 = graypy.GELFHandler(host=graylog_host, port=graylog_port, localname=source) handler2 = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(filename)s:%(lineno)d %(levelname)s:\n"%(message)s"' ) handler2.setFormatter(formatter) log_handlers.append(handler1) log_handlers.append(handler2) for log_handler in log_handlers: temp_logger.addHandler(log_handler) Logger.logger = temp_logger return Logger.logger
def configure_logging(self): log_format = '%(asctime)s [%(processName)-17.17s] [%(levelname)-5.5s] %(message)s' if self.config.has_option('logging', 'LOG_FORMAT'): log_format = self.config.get('logging', 'LOG_FORMAT', raw=True) log_level = self.config.get('logging', 'LOG_LEVEL') # Root logging configuration log_formatter = logging.Formatter(log_format) root_logger = logging.getLogger() root_logger.setLevel(log_level) console_handler = logging.StreamHandler() console_handler.setFormatter(log_formatter) root_logger.addHandler(console_handler) # Graylog configuration if self.config.has_option('logging', 'GRAYLOG_SERVER'): graylog_server = self.config.get('logging', 'GRAYLOG_SERVER') graylog_port = int(self.config.get('logging', 'GRAYLOG_PORT')) handler = graypy.GELFHandler(graylog_server, graylog_port, facility=self.bot_id) root_logger.addHandler(handler) self.logger = logging.getLogger(__name__ + "_" + self.bot_id)
def main(): try: args = check_args() my_logger = logging.getLogger(args['facility']) my_logger.setLevel(args['level']) handler = graypy.GELFHandler(args['server'], args['port'], debugging_fields=False) my_logger.addHandler(handler) d = args['data'] if args['template'] is not None: parsing = parse_log_string(args['template'], args['message']) d.update(parsing) if args['nolog'] is True: print(u'Simulation mode:') print(u'Log level: {0}'.format(args['level'])) print(u'Facility: {0}'.format(args['facility'])) print(u'Server: {0}'.format(args['server'])) print(u'Message: {0}'.format(args['message'])) print(u'Custom fields: {0}'.format(json.dumps(d))) else: my_logger.log(args['level'], args['message'], extra=d) except Exception as e: bailout("Exception during log operation: {0}".format(e))
def main(): env = os.environ try: host = env['GRAYLOG_SERVER'] port = int(env['GRAYLOG_PORT']) except KeyError: sys.exit("GRAYLOG_SERVER and GRAYLOG_PORT are required.") sys.stderr.write("Starting with host: %s, port: %d" % (host, port)) sys.stderr.flush() handler = graypy.GELFHandler(host, port) for event_headers, event_data in supervisor_events(sys.stdin, sys.stdout): level, filename, lineno, body = split_msg_and_get_log_level( event_data, split_regex) event = logging.LogRecord( name=event_headers['processname'], level=level, pathname=filename, lineno=lineno, msg=body, args=(), exc_info=None, ) event.process = int(event_headers['pid']) handler.handle(event)
def main(): env = os.environ try: host = env['GRAYLOG_SERVER'] port = int(env['GRAYLOG_PORT']) except KeyError: sys.exit("GRAYLOG_SERVER and GRAYLOG_PORT are required.") sys.stderr.write("Starting with host: %s, port: %d" % (host, port)) sys.stderr.flush() handler = graypy.GELFHandler(host, port) level_match = re.compile(level_match_expr) for event_headers, event_data in supervisor_events(sys.stdin, sys.stdout): level, body = split_msg_and_get_log_level(event_data, level_match) event = logging.LogRecord( name=event_headers['processname'], level=level, pathname=None, lineno=0, msg=body, args=(), exc_info=None, ) event.process = int(event_headers['pid']) # Set the processName to the name of the value of 'program:name' in the # supervisor config. event.processName = event_headers['groupname'] handler.handle(event)
def setup(logger: logging.Logger, filename: Optional[str]=None) -> logging.Logger: # create formatter formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s (%(message)s)') # create file handler which logs even debug messages if filename is not None: file_handler = logging.FileHandler(filename) file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(formatter) logger.addHandler(file_handler) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) ch.setFormatter(formatter) logger.addHandler(ch) # graylog handler graypy_handler = graypy.GELFHandler('graylog.pi', 12201) graypy_handler.setLevel(logging.DEBUG) logger.addHandler(graypy_handler) logger.setLevel(logging.DEBUG) return logger
def setup_logger(self): """Setups logger for the spider Arguments: project_conf -- ConfigParser Object that contains the configuration data in conf/alascrapy.conf """ LOG_FORMAT = "alaScrapy Schedulers: %(asctime)s %(levelname)s [%(" \ "name)s] %(message)s" DATE_FORMAT = '%Y-%m-%d %H:%M:%S' logging.basicConfig(format=LOG_FORMAT, datefmt=DATE_FORMAT) graylog_host = self.project_conf.get("LOGGING", "graylog_host") graylog_port = self.project_conf.getint("LOGGING", "graylog_port") graylog = graypy.GELFHandler(graylog_host, graylog_port) graylog.setLevel(logging.INFO) file_handler = logging.handlers.TimedRotatingFileHandler( "/var/log/alaScrapy/scheduler.log", 'midnight', 10) file_handler.suffix = "%Y-%m-%d" file_handler.setLevel(logging.INFO) #TODO: separate the logger name so we can separate spider messages #from scheduler messages to a different stream self.logger = logging.getLogger('alascrapy_scheduler') self.logger.addHandler(graylog) self.logger.addHandler(file_handler) sys.excepthook = self.log_uncaught_exception
def init_logging(): """Initializes logging.""" cfg = Config() formatter = logging.Formatter("%(asctime)s [%(name)s] %(levelname)s: %(message)s") fh = logging.handlers.WatchedFileHandler(os.path.join(CUCKOO_ROOT, "log", "cuckoo.log")) fh.setFormatter(formatter) log.addHandler(fh) ch = ConsoleHandler() ch.setFormatter(formatter) log.addHandler(ch) dh = DatabaseHandler() dh.setLevel(logging.ERROR) log.addHandler(dh) if cfg.graylog.enabled: if HAVE_GRAYPY: gray = graypy.GELFHandler(cfg.graylog.host, cfg.graylog.port) try: level = logging.getLevelName(cfg.graylog.level.upper()) except ValueError: level = logging.ERROR gray.setLevel(level) log.addHandler(gray) else: raise CuckooDependencyError("Graypy is not installed") log.setLevel(logging.INFO)
def _setup_logger(): global LOGGER LOGGER.setLevel(logging.DEBUG) # setup logger audit_log_path = os.path.realpath(CTX.config['logging']['audit_path']) # touch log file if doesn't exist if not os.path.isfile(audit_log_path): open(audit_log_path, 'a').close() # message format # TODO: Make this pure json? fmt = '%(asctime)s - Audit Log - %(message)s' fmtr = logging.Formatter(fmt=fmt) # write to file fh = logging.FileHandler(audit_log_path) fh.setFormatter(fmtr) LOGGER.addHandler(fh) gh = graypy.GELFHandler(LOGSTASH_HOST, LOGSTASH_PORT) gh.setFormatter(fmtr) LOGGER.addHandler(gh) # write to stderr sh = logging.StreamHandler() sh.setFormatter(fmtr) LOGGER.addHandler(sh)
def setup_logger(self): """Setups logger for the spider Arguments: project_conf -- ConfigParser Object that contains the configuration data in conf/alascrapy.conf """ LOG_FORMAT = self.name + \ ": %(asctime)s %(levelname)s [%(name)s] %(message)s" DATE_FORMAT = '%Y-%m-%d %H:%M:%S' logging.basicConfig(format=LOG_FORMAT, datefmt=DATE_FORMAT) graylog_host = self.project_conf.get("LOGGING", "graylog_host") graylog_port = self.project_conf.getint("LOGGING", "graylog_port") graylog = graypy.GELFHandler(graylog_host, graylog_port) graylog.setLevel(logging.INFO) file_handler = logging.handlers.TimedRotatingFileHandler( "/var/log/alaScrapy/spiders.log", when='midnight', backupCount=10) file_handler.suffix = "%Y-%m-%d" file_handler.setLevel(logging.INFO) # TODO: separate the logger name so we can separate spider messages # from scheduler messages to a different stream self._logger = logging.getLogger('') # get root logger self._logger.addHandler(graylog) self._logger.addHandler(file_handler) sys.excepthook = self.log_uncaught_exception # suppress expected 'error messages' when using fake user agent fu_logger = logging.getLogger('fake_useragent') fu_logger.addFilter(FakeUserAgentFilter())
def __init__(self): logConfig = Config().getConfigFile()["logger"] logger = logging.getLogger() logging.basicConfig(format='%(levelname)s:%(message)s') logger.setLevel(logConfig["level"]) logger.addHandler( graypy.GELFHandler(logConfig["host"], logConfig["port"]))
def _init_graylog(app): if app.config['LOGGING']: gelf_handler = graypy.GELFHandler( app.config['GRAYLOG_HOST'], chunk_size=graypy.LAN_CHUNK) gelf_handler.addFilter(RequestInfoFilter()) app.logger.setLevel(logging.INFO) app.logger.addHandler(gelf_handler)
def add_handler(self): """ Add a handler for Graylog """ if not self.ip or not self.port or not graypy: # pylint: disable=no-member self.log('Graylog not configured! Disabling it', 'dev') return handler = graypy.GELFHandler(self.ip, self.port, debugging_fields=False) # pylint: disable=no-member self.logger.addHandler(handler)
def setup_graylog(self): try: import graypy except ImportError: logging.critical("Please Install 'graypy'") graypyhandler = graypy.GELFHandler('logs', 12201) logformat = logging.Formatter( fmt='%(asctime)s [%(levelname)s] (%(threadName)-10s) %(message)s', datefmt='%Y-%m-%d %H:%M:%S') graypyhandler.setFormatter(logformat) logging.root.addHandler(graypyhandler)
def get_graylogger(host, facility, level='INFO', port=12201, **kwargs): import logging, graypy logger = logging.getLogger(facility) logger.setLevel(getattr(logging, level)) logger.addHandler(graypy.GELFHandler(host, port, **kwargs)) h = logging.StreamHandler() h.setLevel(logging.DEBUG) h.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s')) logger.addHandler(h) logger.info("Starting") return logger
def main(): for signame in ('SIGINT', 'SIGTERM'): loop.add_signal_handler(getattr(signal, signame), functools.partial(_exit, signame)) handler = graypy.GELFHandler('localhost', 12201) logger.addHandler(handler) logger.setLevel(logging.INFO) loop.call_later(RESOLUTION, _probe) print("Probing cpu, memory and disk...") try: loop.run_forever() finally: loop.close()
def setup_graylog(): try: import graypy except ImportError: return import logging my_logger = logging.getLogger('dda_logger') my_logger.setLevel(logging.DEBUG) handler = graypy.GELFHandler('localhost', 12201) my_logger.addHandler(handler) return my_logger
def __init__(self, db=None, impdir=None): self.db = db self.impdir = impdir # logging.basicConfig(filename=("log/downloader-%s.log" % int(time.time())),level=logging.INFO) logging.basicConfig(level=logging.INFO) self.logger = logging.getLogger(__name__) handler = graypy.GELFHandler(settings.LOGSTASH_GELF_HOST, settings.LOGSTASH_GELF_PORT) self.logger.addHandler(handler) ########DOWNLOAD MANAGERS modo = [ name.split(".py")[0] for name in listdir("DownloadManagers") if name.endswith(".py") ] modules = {} for modu in modo: modu = "DownloadManagers." + modu modules[modu] = __import__(modu) baseclass = modules[ "DownloadManagers.DownloadManager"].DownloadManager.DownloadManager self.managers = {} for cls in baseclass.__subclasses__(): self.managers[cls.__name__] = cls #########DOWNLOAD PLUGINS modo = [ name.split(".py")[0] for name in listdir("DownloadPlugins") if name.endswith(".py") ] modules = {} for modu in modo: modu = "DownloadPlugins." + modu modules[modu] = __import__(modu) baseclass = modules[ "DownloadPlugins.DownloadPlugin"].DownloadPlugin.DownloadPlugin self.downloaders = {} for cls in baseclass.__subclasses__(): self.downloaders[cls.__name__] = cls() self.config = { 'dldir': settings.DOWNLOAD_DIRECTORY, 'impdir': impdir if impdir else settings.IMPORTER_DIRECTORY, 'downloadmanager': settings.DOWNLOAD_MANAGER } if not path.exists(path.abspath(self.config['dldir'])): makedirs(path.abspath(self.config['dldir'])) self.logger.info("Download Directory Created!")
def init_app(self, app, config=None): """ Configure Graylog logger from a Flask application Available configuration options: GRAYLOG_HOST - the host to send messages to [default: 'localhost'] GRAYLOG_PORT - the port to send messages to [default: 12201] GRAYLOG_FACILITY - the facility to report with [default: 'flask'] GRAYLOG_EXTRA_FIELDS - whether or not to include `extra` fields from the message [default: True] GRAYLOG_ADD_DEBUG_FIELDS - whether extra python debug fields should be added to each message [default: True] GRAYLOG_CONFIGURE_MIDDLEWARE - whether to setup middleware to log each response [default: True] :param app: Flask application to configure this logger for :type app: flask.Flask :param config: An override config to use instead of `app.config` :type config: `dict` or `None` """ # Use the config they provided if config is not None: self.config = config # Use the apps config if `config` was not provided elif app is not None: self.config = app.config self.app = app # Setup default config settings self.config.setdefault('GRAYLOG_HOST', 'localhost') self.config.setdefault('GRAYLOG_PORT', 12201) self.config.setdefault('GRAYLOG_FACILITY', 'flask') self.config.setdefault('GRAYLOG_EXTRA_FIELDS', True) self.config.setdefault('GRAYLOG_ADD_DEBUG_FIELDS', True) self.config.setdefault('GRAYLOG_CONFIGURE_MIDDLEWARE', True) # Configure the logging handler and attach to this logger self.handler = graypy.GELFHandler( host=self.config['GRAYLOG_HOST'], port=self.config['GRAYLOG_PORT'], facility=self.config['GRAYLOG_FACILITY'], extra_fields=self.config['GRAYLOG_EXTRA_FIELDS'], debugging_fields=self.config['GRAYLOG_ADD_DEBUG_FIELDS'], ) self.addHandler(self.handler) # Setup middleware if they asked for it if self.config['GRAYLOG_CONFIGURE_MIDDLEWARE']: self.setup_middleware()
def setup_logging(): logging.basicConfig( filename=config['log-file'], level='WARNING', format= '%(asctime)s %(levelname)s %(module)s %(process)d %(funcName)s: %(message)s' ) log = logging.getLogger(__name__) log.setLevel(config['log-level']) gelf_handler = graypy.GELFHandler(wfconfig['named_gelfreceiver'], 12201) gelf_handler.setLevel('CRITICAL') log.addHandler(gelf_handler) log.info('got my config') return log
def configure_logging(app): if app.debug or app.testing: # Skip debug and test mode. Just check standard output. return if "GELF_SERVER" in app.config: gelf_handler = graypy.GELFHandler(app.config['GELF_SERVER'], 12201) gelf_formatter = logging.Formatter( '%(asctime)s [%(process)d] [%(levelname)s] %(name)-16s %(message)s', '%Y-%m-%d %H:%M:%S') gelf_handler.setFormatter(gelf_formatter) gelf_handler.setLevel(app.config.get("GELF_LOG_LEVEL", logging.WARNING)) app.logger.addHandler(gelf_handler) app.logger.warn('EggsNSpam Flask Service Started! PID={}.'.format( os.getpid())) # Set default level on logger, which might be overwritten by handlers. app.logger.setLevel(app.config.get("LOG_LEVEL", logging.WARNING))
def compose_logger(cfg, name=None, extra=None): log_name = name or __name__.split('.')[0] logger = logging.getLogger(log_name) if not logger.handlers: if 'gray_gelf' in cfg['handlers'] and cfg['handlers']['gray_gelf']: handler = graypy.GELFHandler(*cfg['handlers']['gray_gelf']) logger.addHandler(handler) if 'file' in cfg['handlers'] and cfg['handlers']['file']: handler = logging.FileHandler(cfg['handlers']['file']) logger.addHandler(handler) if 'stream' in cfg['handlers'] and cfg['handlers']['stream']: logger.addHandler(logging.StreamHandler()) logger.setLevel(getattr(logging, cfg['level'].upper())) for h in logger.handlers: h.setFormatter(logging.Formatter(cfg['format'])) if extra: adapter = logging.LoggerAdapter(logger, extra) return adapter return logger
def config_logger(log_level=logging.DEBUG): logging.basicConfig(format='%(levelname)s %(asctime)s: \t%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', log_level=log_level) logger = logging.getLogger(__name__) logger.setLevel(log_level) if CONFIG.get('general', 'log_file', '') != '': hdlr = logging.FileHandler(CONFIG.get('general', 'log_file', '')) formatter = logging.Formatter( fmt='%(levelname)s %(asctime)s: %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') hdlr.setFormatter(formatter) logger.addHandler(hdlr) if CONFIG.get('general', 'graylog_api', '') != '' and CONFIG.get( 'general', 'graylog_port', '') != '': gray_handler = graypy.GELFHandler( CONFIG.get('general', 'graylog_api', ''), CONFIG.getint('general', 'graylog_port')) logger.addHandler(gray_handler) return logger
def configure_logging(app, context='api'): app.logger.setLevel(logging.DEBUG) level = app.config['LOG_LEVEL'] file_handler = logging.handlers.TimedRotatingFileHandler( app.config['LOG_FILE'], when='midnight', backupCount=1) file_handler.setLevel(level) app.logger.addHandler(file_handler) if app.config.get('USE_GRAYLOG'): try: import graypy facility = 'comptoir.{0}'.format(context) grayhandler = graypy.GELFHandler(app.config['GRAYLOG_HOST'], app.config['GRAYLOG_PORT'], facility=facility) grayhandler.setLevel(level) GPFORMAT = '%(name)s[%(process)d]: %(message)s' grayhandler.setFormatter(logging.Formatter(fmt=GPFORMAT)) app.logger.addHandler(grayhandler) except Exception: app.logger.exception('Graylog not configured on this machine')
parser.add_argument('--facility', dest='facility', default='error_log', help='logging facility (default: error_log)') args = parser.parse_args() regexp = '^\[[^]]*\] \[([^]]*)\] \[client (?P<ipaddr>[0-9\.]+)\] (.*)' baserecord = {} if args.vhost: baserecord['vhost'] = args.vhost logger = logging.getLogger(args.facility) logger.setLevel(logging.DEBUG) logger.addHandler( graypy.GELFHandler(args.host, int(args.port), debugging_fields=False, localname=args.localname)) for line in iter(sys.stdin.readline, b''): print line.rstrip() matches = re.search(regexp, line) if matches: record = baserecord record.update(matches.groupdict()) adapter = logging.LoggerAdapter(logging.getLogger(args.facility), record) if args.vhost: adapter.error('%s %s %s: %s' % (matches.group(2), args.vhost, matches.group(1), matches.group(3)))
if __name__ == '__main__': # Set up logging logger = logging.getLogger('tagrecommendation') logger.setLevel(logging.DEBUG) handler = RotatingFileHandler(LOGFILE, maxBytes=2 * 1024 * 1024, backupCount=5) handler.setLevel(logging.DEBUG) std_handler = logging.StreamHandler() std_handler.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) std_handler.setFormatter(formatter) logger.addHandler(std_handler) handler_graypy = graypy.GELFHandler(LOGSERVER_IP_ADDRESS, LOGSERVER_PORT) logger.addHandler(handler_graypy) # Start service logger.info('Configuring tag recommendation service...') root = resource.Resource() root.putChild("tagrecommendation", TagRecommendationServer()) site = server.Site(root) reactor.listenTCP(LISTEN_PORT, site) logger.info('Started tag recommendation service, listening to port ' + str(LISTEN_PORT) + "...") reactor.run() logger.info('Service stopped.')
parser.add_argument('--vhost', dest='vhost', help='Add additional "vhost" field to all log records. This can be used to differentiate between virtual hosts.') args = parser.parse_args() """The list of expected fields is hard-coded. Please feel free to change it As specified above, this requires the following line in apache configuration: CustomLog "||/path/to/accesslog2gelf.py" "%V %h %u \"%r\" %>s %b \"%{Referer}i\"" """ regexp = '^(?P<host>\S+) (?P<ipaddr>\S+) (?P<username>\S+) "(?P<request>[^"]*)" (?P<status>\S+) (?P<size>\S+) "(?P<referer>[^"]*)"$' baserecord = {} if args.vhost: baserecord['vhost'] = args.vhost logger = logging.getLogger(args.facility) logger.setLevel(logging.DEBUG) logger.addHandler(graypy.GELFHandler(args.host, int(args.port), debugging_fields=False)) for line in iter(sys.stdin.readline, b''): matches = re.search(regexp, line) if matches: record = baserecord record.update(matches.groupdict()) adapter = logging.LoggerAdapter(logging.getLogger(args.facility), record) """Default output message format is also hard-coded""" if args.vhost: adapter.info('%s %s (%s) "%s" %s %s "%s"' % tuple(record[f] for f in ["ipaddr", "vhost", "host", "request", "status", "size", "referer"])) else: adapter.info('%s %s "%s" %s %s "%s"' % tuple(record[f] for f in ["ipaddr", "host", "request", "status", "size", "referer"]))
def error_handling(error): if isinstance(error, HTTPException): result = {'code': error.code, 'description': error.description} else: description = abort.mapping[500].description result = {'code': 500, 'description': description} app.logger.exception(str(error), extra=result) result['message'] = str(error) resp = jsonify(result) resp.status_code = result['code'] return resp for code in abort.mapping: app.register_error_handler(code, error_handling) @app.route('/api', methods=['GET', 'POST']) def my_microservice(): app.logger.info("Logged into Graylog") resp = jsonify({'result': 'OK', 'Hello': 'World!'}) # this will also be logged_ raise Exception('BAHM') return resp if __name__ == '__main__': handler = graypy.GELFHandler('localhost', 12201) app.logger.addHandler(handler) app.run()
print " " print "This script will sniff three management frame subtypes in 802.11: 0, 2 and 4 and will send it to Graylog2. Make sure you first set the interface you want to use in monitoring mode, before starting this script: airmon-ng start wlan0. Also, make sure you have a GELF UDP listener ready on " + SentToHost + ":" + SentToPort + ". In the same directory as this script, store the file you get from https://gist.github.com/derlinkshaender/5995776 and name it graylogger.py." print " " print "---- THIS PROGRAM IS \"AS IS\" WITHOUT WARRANTY OF ANY KIND ----" # Define the interface name that we will be sniffing from, you can # change this if needed. interface = "mon0" # Next, declare a Python list to keep track of client MAC addresses # that we have already seen so we only print the address once per client. observedclients = [] my_logger = logging.getLogger(Name) my_logger.setLevel(logging.DEBUG) handler = graypy.GELFHandler(SentToHost, int(SentToPort)) my_logger.addHandler(handler) # The sniffmgmt() function is called each time Scapy receives a packet # (we'll tell Scapy to use this function below with the sniff() function). # The packet that was sniffed is passed as the function argument, "p". def sniffmgmt(p): # Define our tuple (an immutable list) of the 3 management frame # subtypes sent exclusively by clients. I got this list from Wireshark. stamgmtstypes = (0, 2, 4) # Make sure the packet has the Scapy Dot11 layer present if p.haslayer(Dot11):
import logging import graypy import time import subprocess # instantiating a new logger object to send the log message logger = logging.getLogger('logger') # setting the importance of the message logger.setLevel(logging.INFO) # instantiating a new handler with GELF format (specialised JSON) based on address and port handler = graypy.GELFHandler('uta.dyndns.sonah.xyz', 12201) # adding the handler to the logger logger.addHandler(handler) while 1: # run script for collecting metrics subprocess.call("/etc/metrics-collector/update_metrics.sh") # modifing the message: assigning new fields and their values (reading from files filled by shell script) adapter = logging.LoggerAdapter( logger, { 'temperature': float( open('/var/www/html/img-host/temperature', 'r').read().strip()), 'ping': float(open('/var/www/html/img-host/ping', 'r').read().strip()), 'clock': float(open('/var/www/html/img-host/clock', 'r').read().strip()), 'pub_ip': open('/var/www/html/img-host/pub_ip', 'r').read().strip(),