def getlogger(name, log_file, log_level="INFO"): importer_logger = logging.getLogger(name) importer_logger.setLevel(log_level) # Handler to send logs in files fh = logging.FileHandler(filename=log_file) fh.setLevel(log_level) formatter_file = logging.Formatter( '%(asctime)s - %(msecs)d - %(funcName)s - %(lineno)d : %(levelname)s : %(message)s' ) fh.setFormatter(formatter_file) importer_logger.addHandler(fh) # Handler to send logs in Elasticsearch flh = handler.FluentHandler(name, host=cst.FLUENT_CONFIGURATION["SERVER"], port=cst.FLUENT_CONFIGURATION["PORT"], buffer_overflow_handler=overflow_handler) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', 'lineno': '%(lineno)s', 'pathname': '%(pathname)s', } formatter_flh = fluent_handler.FluentRecordFormatter(custom_format) flh.setFormatter(formatter_flh) importer_logger.addHandler(flh) return importer_logger
def configure_logging(service_name): """ Configure logging based on the settings in the settings file. This sets up a handler for each logging mode that is enabled. See `microservice.core.settings.LoggingMode` for the supported logging types. :param str service_name: Name of the service being served by this instance. """ logger = logging.getLogger() logger.setLevel(settings.logging_level) formatter_kwargs = { 'fmt': json.dumps({'extra': { 'local_service': service_name, # Basic way to distinguish logs between instances of the same microservice. 'instance_id': random.randint(100000, 999999) }}) } formatter = LogstashFormatterV1(**formatter_kwargs) if settings.LoggingMode.FILE in settings.logging_modes: file_handler = logging.FileHandler('{}.log'.format(service_name)) file_handler.setFormatter(formatter) file_handler.addFilter(RequestIDLogFilter()) logger.addHandler(file_handler) if settings.LoggingMode.HUMAN in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(HumanReadableLogstashFormatter(**formatter_kwargs)) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.STDOUT in settings.logging_modes: stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(formatter) stdout_handler.addFilter(RequestIDLogFilter()) logger.addHandler(stdout_handler) if settings.LoggingMode.LOGSTASH in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") logstash_handler = AsynchronousLogstashHandler( **settings.logstash_settings) logstash_handler.setFormatter(formatter) logstash_handler.addFilter(RequestIDLogFilter()) logger.addHandler(logstash_handler) if settings.LoggingMode.FLUENTD in settings.logging_modes: # TODO: test this raise Exception("Warning: untested") fluentd_handler = handler.FluentHandler( 'pycroservices.follow', **settings.fluentd_settings, buffer_overflow_handler=overflow_handler) fluentd_handler.setFormatter(formatter) fluentd_handler.addFilter(RequestIDLogFilter()) logger.addHandler(fluentd_handler)
def add_handlers(self, config_path: str = None, **kwargs): self.logger.handlers = [] with open(config_path) as fp: config = yaml.load(fp) for h in config['handlers']: cfg = config['configs'].get(h, None) fmt = getattr(formatter, cfg.get('formatter', 'PlainFormatter')) if h not in self.supported or not cfg: raise ValueError( f'can not find configs for {h}, maybe it is not supported') handler = None if h == 'StreamHandler': handler = logging.StreamHandler(sys.stdout) handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) elif h == 'SysLogHandler': if cfg['host'] and cfg['port']: handler = SysLogHandlerWrapper(address=(cfg['host'], cfg['port'])) else: # a UNIX socket is used if platform.system() == 'Darwin': handler = SysLogHandlerWrapper( address='/var/run/syslog') else: handler = SysLogHandlerWrapper(address='/dev/log') if handler: handler.ident = cfg.get('ident', '') handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) elif h == 'FileHandler': handler = logging.FileHandler(cfg['output'].format_map(kwargs), delay=True) handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) elif h == 'FluentHandler': try: from fluent import asynchandler as fluentasynchandler from fluent.handler import FluentRecordFormatter handler = fluentasynchandler.FluentHandler( cfg['tag'], host=cfg['host'], port=cfg['port'], queue_circular=True) cfg['format'].update(kwargs) fmt = FluentRecordFormatter(cfg['format']) handler.setFormatter(fmt) except (ModuleNotFoundError, ImportError): pass if handler: self.logger.addHandler(handler) verbose_level = LogVerbosity.from_string(config['level']) self.logger.setLevel(verbose_level.value)
from io import BytesIO import logging from fluent import asynchandler as handler def overflow_handler(pendings): unpacker = msgpack.Unpacker(BytesIO(pendings)) for unpacked in unpacker: print(unpacked) custom_format = { 'host': '%(hostname)s', 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s' } logging.basicConfig(level=logging.INFO) l = logging.getLogger('fluent.test') h = handler.FluentHandler('fluent-python.asyn-log', host='192.168.181.99', port=30224, buffer_overflow_handler=overflow_handler) formatter = handler.FluentRecordFormatter(custom_format) h.setFormatter(formatter) l.addHandler(h) # --- 具体log日志 l.info({ 'from': 'userA', 'to': 'userB' }) l.info('{"from": "userC", "to": "userD"}') l.info("This log entry will be logged with the additional key: 'message'.") l.error({'erro':"erro info message"})
def add_handlers(self, config_path: str = None, **kwargs): """ Add handlers from config file. :param config_path: Path of config file. :param kwargs: Extra parameters. :returns: None """ self.logger.handlers = [] with open(config_path) as fp: config = JAML.load(fp) for h in config['handlers']: cfg = config['configs'].get(h, None) fmt = getattr(formatter, cfg.get('formatter', 'PlainFormatter')) if h not in self.supported or not cfg: raise ValueError( f'can not find configs for {h}, maybe it is not supported') handler = None if h == 'StreamHandler': handler = logging.StreamHandler(sys.stdout) handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) elif h == 'SysLogHandler': if cfg['host'] and cfg['port']: handler = SysLogHandlerWrapper(address=(cfg['host'], cfg['port'])) else: # a UNIX socket is used if platform.system() == 'Darwin': handler = SysLogHandlerWrapper( address='/var/run/syslog') else: handler = SysLogHandlerWrapper(address='/dev/log') if handler: handler.ident = cfg.get('ident', '') handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) try: handler._connect_unixsocket(handler.address) except OSError: handler = None pass elif h == 'FileHandler': handler = logging.FileHandler(cfg['output'].format_map(kwargs), delay=True) handler.setFormatter(fmt(cfg['format'].format_map(kwargs))) elif h == 'FluentHandler': from ..importer import ImportExtensions with ImportExtensions(required=False, verbose=False): from fluent import asynchandler as fluentasynchandler from fluent.handler import FluentRecordFormatter handler = fluentasynchandler.FluentHandler( cfg['tag'], host=cfg['host'], port=cfg['port'], queue_circular=True) cfg['format'].update(kwargs) fmt = FluentRecordFormatter(cfg['format']) handler.setFormatter(fmt) if handler: self.logger.addHandler(handler) verbose_level = LogVerbosity.from_string(config['level']) if 'JINA_LOG_LEVEL' in os.environ: verbose_level = LogVerbosity.from_string( os.environ['JINA_LOG_LEVEL']) self.logger.setLevel(verbose_level.value)
div = offer.find(attrs={"class": "m-productsBox_variantSizes js-variant_size is-active"}) try: spans = div.findAll("span") except AttributeError: return [] sizes = [] for val in (span.text for span in spans): sizes.append('eu' + format_size_number(val)) return sizes if __name__ == '__main__': log_format = { 'where': '%(module)s.%(funcName)s', 'type': '%(levelname)s', 'stack_trace': '%(exc_text)s', } logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('') logger.setLevel(level=logging.DEBUG) h = asynchandler.FluentHandler('kicks.scraper.%s' % scraper_name, host='localhost', port=24224) h.setLevel(level=logging.DEBUG) formatter = handler.FluentRecordFormatter(log_format) h.setFormatter(formatter) logging.getLogger('').addHandler(h) items = sizeer_parse() writer = SessionedWriter(scraper_name, items) writer.write_items()