Example #1
0
    def testDefaultFormat(self):
        fr = jsonlogger.JsonFormatter()
        self.logHandler.setFormatter(fr)

        msg = "testing logging format"
        self.logger.info(msg)
        logJson = json.loads(self.buffer.getvalue())

        self.assertEqual(logJson["message"], msg)
Example #2
0
def robotlogger():
    logger = logging.getLogger("robot_events")
    logger.setLevel(logging.DEBUG)
    rmq_handler = logstash.AMQPLogstashHandler(host='localhost',
                                               version=1,
                                               durable=True)
    rmq_handler.setFormatter(jsonlogger.JsonFormatter())
    logger.addHandler(rmq_handler)
    return logger
Example #3
0
def filelogger(name, filname, level=INFO, recordfields=[]):

    handler = FileHandler(filename='/Users/nishrame/PycharmProjects/calc/Progr.json')
    log = getLogger('Calculation')
    textformatter = jsonlogger.JsonFormatter('%(method)s %(filename)s %(lineno)s')
    handler.setFormatter(textformatter)
    log.addHandler(handler)
    log.setLevel(INFO)
    return log
Example #4
0
def setup_logging(level=logging.INFO):
    global logger
    logger = logging.getLogger()
    handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        '%(asctime)s %(levelname)s %(message)s', datefmt="%Y-%m-%dT%H:%M:%S%z")
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(level)
Example #5
0
 def _base_configuration_log_file(self) -> logging.FileHandler:
     try:
         file_handler = logging.FileHandler(
         filename=f"{self._log_file}")
         file_handler.setLevel(self._log_level)
         file_handler.setFormatter(jsonlogger.JsonFormatter(self.formatter))
         return file_handler if file_handler else None
     except Exception as error:
         print(f"\nError general exception in create the base configuration to used log file - {error}")
def setup_logging(log_level):
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s'
    )
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler)
Example #7
0
    def testUnknownFormatKey(self):
        fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s')

        self.logHandler.setFormatter(fr)
        msg = "testing unknown logging format"
        try:
            self.logger.info(msg)
        except:
            self.assertTrue(False, "Should succeed")
Example #8
0
    def testJsonDefaultEncoderWithTimestamp(self, time_mock):
        fr = jsonlogger.JsonFormatter(timestamp=True)
        self.logHandler.setFormatter(fr)

        self.logger.info("Hello")

        self.assertTrue(time_mock.called)
        logJson = json.loads(self.buffer.getvalue())
        self.assertEqual(logJson.get("timestamp"), "2017-07-14T02:40:00+00:00")
Example #9
0
    def runJsonFormatter(self):
        print('jsonlogger.JsonFormatter')
        logger = logging.getLogger(self.LOGGER_NAME)
        logger.setLevel(logging.DEBUG)

        logHandler = logging.StreamHandler()
        logger.addHandler(logHandler)

        formatter = jsonlogger.JsonFormatter()
        logHandler.setFormatter(formatter)

        logger.setLevel(logging.DEBUG)

        msg = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}

        logger.info(msg)

        extra = {"text": "testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}

        logger.info("hello", extra=extra)

        SUPPORTED_KEYS = [
            'asctime', 'created', 'filename', 'funcName', 'levelname', 'levelno', 'lineno', 'module', 'msecs',
            'message', 'name', 'pathname', 'process', 'processName', 'relativeCreated', 'thread', 'threadName'
        ]

        def log_format(x):
            return ['%({0:s})'.format(i) for i in x]

        custom_format = ' '.join(log_format(SUPPORTED_KEYS))

        formatter = jsonlogger.JsonFormatter(custom_format)
        logHandler.setFormatter(formatter)

        msg = "A testing logging format"
        logger.info(msg)

        msg = {"text": "B testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}

        logger.info(msg)

        extra = {"text": "C testing logging", "num": 1, 5: "9", "nested": {"more": "data"}}

        logger.info("hello", extra=extra)
Example #10
0
    def create_actor_system(self):
        logcfg = {
            'version': 1,
            'formatters': {
                'normal': {
                    'format': '%(levelname)-8s %(message)s'
                },
                'actor': {
                    'format': '%(levelname)-8s %(actorAddress)s => %(message)s'
                }
            },
            'filters': {
                'isActorLog': {
                    '()': actorLogFilter
                },
                'notActorLog': {
                    '()': notActorLogFilter
                }
            },
            'handlers': {
                'h1': {
                    'class': 'logging.FileHandler',
                    'filename': 'experiment_data.json',
                    'formatter': jsonlogger.JsonFormatter(),
                    'filters': ['notActorLog'],
                    'level': logging.INFO
                },
                'h2': {
                    'class': 'logging.FileHandler',
                    'filename': 'experiment_data.json',
                    'formatter': jsonlogger.JsonFormatter(),
                    'filters': ['isActorLog'],
                    'level': logging.INFO
                },
            },
            'loggers': {
                '': {
                    'handlers': ['h1', 'h2'],
                    'level': logging.DEBUG
                }
            }
        }

        self.actor_system = ActorSystem(None, logDefs=logcfg)
Example #11
0
def init_app(application, interface):
    """Initialize the main app with config information and routes."""
    if os.environ["ENVIRONMENT"] == "production":
        from memote_webservice.settings import Production
        application.config.from_object(Production())
    elif os.environ["ENVIRONMENT"] == "testing":
        from memote_webservice.settings import Testing
        application.config.from_object(Testing())
    else:
        from memote_webservice.settings import Development
        application.config.from_object(Development())

    # Configure logging
    logging.config.dictConfig(application.config['LOGGING'])
    root_logger = logging.getLogger()
    for handler in root_logger.handlers:
        handler.setFormatter(jsonlogger.JsonFormatter())
    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.processors.TimeStamper(fmt="iso"),
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs,
        ],
        # comment
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True,
    )

    # Configure Sentry
    if application.config['SENTRY_DSN']:
        sentry = Sentry(dsn=application.config['SENTRY_DSN'],
                        logging=True,
                        level=logging.WARNING)
        sentry.init_app(application)

    # Import resources.
    import memote_webservice.resources

    # Apparently registering a blueprint takes care of routing.
    interface.init_app(application)

    # Add CORS information for all resources.
    CORS(application)

    # Add Redis caching.
    redis_store.init_app(application)

    LOGGER.debug("Successfully initialized the app.")
Example #12
0
    def initialize(self):
        args = self.get_argparser().parse_args()

        if args.debug:
            self.log_level = logging.DEBUG

        self.load_config_file(args.config)

        if os.path.exists(args.repo):
            # Let's treat this as a local directory we are building
            self.repo_type = 'local'
            self.repo = args.repo
            self.ref = None
            self.cleanup_checkout = False
        else:
            self.repo_type = 'remote'
            self.repo = args.repo
            self.ref = args.ref
            self.cleanup_checkout = args.clean

        if args.json_logs:
            # register JSON excepthook to avoid non-JSON output on errors
            sys.excepthook = self.json_excepthook
            # Need to reset existing handlers, or we repeat messages
            logHandler = logging.StreamHandler()
            formatter = jsonlogger.JsonFormatter()
            logHandler.setFormatter(formatter)
            self.log.handlers = []
            self.log.addHandler(logHandler)
            self.log.setLevel(logging.INFO)
        else:
            # due to json logger stuff above,
            # our log messages include carriage returns, newlines, etc.
            # remove the additional newline from the stream handler
            self.log.handlers[0].terminator = ''
            # We don't want a [Repo2Docker] on all messages
            self.log.handlers[0].formatter = logging.Formatter(fmt='%(message)s')

        if args.image_name:
            self.output_image_spec = args.image_name
        else:
            # Attempt to set a sane default!
            # HACK: Provide something more descriptive?
            self.output_image_spec = 'r2d' + escapism.escape(self.repo, escape_char='-').lower() + str(int(time.time()))

        self.push = args.push
        self.run = args.run
        self.json_logs = args.json_logs

        self.build = args.build
        if not self.build:
            # Can't push nor run if we aren't building
            self.run = False
            self.push = False

        self.run_cmd = args.cmd
def init_logger(*args, **kwargs):
    logger = logging.getLogger(*args, **kwargs)
    handler = logging.StreamHandler(sys.stdout)
    formatter = jsonlogger.JsonFormatter("%(levelname)s %(name)s %(message)s",
                                         json_default=encode,
                                         timestamp=True)
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)
    logging.getLogger().addHandler(handler)
Example #14
0
def configure_log_handler(supported_log_message_keys, log_datetime_format):
    log_format = ' '.join(make_log_format(supported_log_message_keys))
    formatter = jsonlogger.JsonFormatter(fmt=log_format,
                                         datefmt=log_datetime_format)
    # set all datetimes to utc
    formatter.converter = time.gmtime

    log_handler = logging.StreamHandler()
    log_handler.setFormatter(formatter)
    return log_handler
Example #15
0
def get_logger():
    # Configure logging.
    log_format = '%(asctime) %(levelname) %(module) %(funcName) %(lineno) %(message)'
    formatter = jsonlogger.JsonFormatter(log_format)
    logger = logging.getLogger()
    logHandler = logging.StreamHandler()
    logHandler.setFormatter(formatter)
    logger.addHandler(logHandler)
    logger.setLevel(logging.DEBUG)
    return logger
Example #16
0
def log_in_json_format():
    from pythonjsonlogger import jsonlogger

    logger = logging.getLogger()
    logHandler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter()

    logHandler.setFormatter(formatter)
    logger.addHandler(logHandler)
    logger.setLevel(logging.INFO)
Example #17
0
    def _get_formatter(self, json):
        '''
        Return the proper log formatter

        @param json: Boolean value
        '''
        if json:
            return jsonlogger.JsonFormatter()
        else:
            return logging.Formatter(self.format_string)
Example #18
0
def start_logger():
    """
    Start logging to stdout.
    """
    root = logging.getLogger()
    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(
        jsonlogger.JsonFormatter("%(asctime) $(name) %(levelname) %(message)"))
    root.addHandler(handler)
    root.setLevel(logging.INFO)
Example #19
0
def setSensorLogger(config):
    logger = logging.getLogger("sensor")
    logger.setLevel(logging.INFO)

    logHandler = logging.handlers.TimedRotatingFileHandler("log/sensor_log",
                                                           when="midnight",
                                                           backupCount=99)
    formatter = jsonlogger.JsonFormatter()
    logHandler.setFormatter(formatter)
    logger.addHandler(logHandler)
Example #20
0
 def initialize(self, *args, **kwargs):
     super().initialize(*args, **kwargs)
     logHandler = logging.StreamHandler()
     formatter = jsonlogger.JsonFormatter()
     logHandler.setFormatter(formatter)
     # Need to reset existing handlers, or we repeat messages
     self.log.handlers = []
     self.log.addHandler(logHandler)
     self.log.setLevel(logging.INFO)
     self.load_config_file(self.config_file)
Example #21
0
    def add_handler(self, logger):

        from pythonjsonlogger import jsonlogger
        from logstash import TCPLogstashHandler
        host, port = self.config.get('LOGSTASH', 'HOST'), self.config.get(
            'LOGSTASH', 'PORT')
        logHandler = TCPLogstashHandler(host, int(port), version=1)
        formatter = jsonlogger.JsonFormatter()
        logHandler.setFormatter(formatter)
        logger.addHandler(logHandler)
Example #22
0
def json_logger():
    logger = logging.getLogger()
    log_handler = logging.StreamHandler(sys.stdout)
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(name)s %(levelname)s %(message)s')
    log_handler.setFormatter(formatter)
    log_handler.flush = sys.stdout.flush
    logger.setLevel(logging.INFO)
    logger.addHandler(log_handler)
    return logger
Example #23
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     # FIXME: Not sure why this needs to be repeated - shouldn't configuring Application be enough?
     logHandler = logging.StreamHandler()
     formatter = jsonlogger.JsonFormatter()
     logHandler.setFormatter(formatter)
     # Need to reset existing handlers, or we repeat messages
     self.log.handlers = []
     self.log.addHandler(logHandler)
     self.log.setLevel(logging.INFO)
Example #24
0
    def testAddStaticFields(self):
        fr = jsonlogger.JsonFormatter(static_fields={'log_stream': 'kafka'})

        self.logHandler.setFormatter(fr)

        msg = "testing static fields"
        self.logger.info(msg)
        logJson = json.loads(self.buffer.getvalue())

        self.assertEqual(logJson["log_stream"], "kafka")
        self.assertEqual(logJson["message"], msg)
Example #25
0
def setup_logging():
    logger = logging.getLogger(NAME)
    if os.getenv('LOG_LEVEL'):
        logger.setLevel(int(os.getenv('LOG_LEVEL')))
    else:
        logger.setLevel(logging.INFO)
    json_handler = logging.StreamHandler()
    formatter = jsonlogger.JsonFormatter()
    json_handler.setFormatter(formatter)
    logger.addHandler(json_handler)
    return logger
def get_general_logger():
    logger = logging.getLogger("GeneralLogger")
    logger.setLevel(logging.INFO)
    json_handler = logging.FileHandler('general.log')
    formatter = jsonlogger.JsonFormatter(
        fmt='%(asctime)s %(levelname)s %(name)s %(message)s')
    json_handler.setFormatter(formatter)
    if logger.hasHandlers():
        logger.handlers.clear()
    logger.addHandler(json_handler)
    return logger
Example #27
0
    def setup(self, cfg):
        self.loglevel = self.LOG_LEVELS.get(cfg.loglevel.lower(), logging.INFO)
        self.error_log.setLevel(self.loglevel)
        self.access_log.setLevel(logging.INFO)

        formatter = jsonlogger.JsonFormatter()
        h = logging.StreamHandler()
        h.setFormatter(formatter)
        h._gunicorn = True
        self.access_log.addHandler(h)
        self.error_log.addHandler(h)
Example #28
0
def json_log_setup(level=logging.DEBUG):
    logger = logging.getLogger()
    if logger.hasHandlers():
        logger.handlers.clear()
    logger.setLevel(level)
    logHandler = logging.FileHandler('error_json.log', 'a', 'utf-8')
    formatter = jsonlogger.JsonFormatter(
        '%(asctime)s %(levelname)s %(name)s %(message)s',
        datefmt='%m/%d/%Y %I:%M:%S %p')
    logHandler.setFormatter(formatter)
    logger.addHandler(logHandler)
Example #29
0
def initLogger():
    logger.level = logging.DEBUG
    if options.output == 'json':
        formatter = jsonlogger.JsonFormatter()
    else:
        formatter = logging.Formatter('%(asctime)s - %(message)s')
        formatter.formatTime = loggerTimeStamp

    sh = logging.StreamHandler(sys.stderr)
    sh.setFormatter(formatter)
    logger.addHandler(sh)
Example #30
0
def init_logger():
    log_level = _get_log_level()
    logger = logging.getLogger()
    logger.setLevel(log_level)

    formatter = jsonlogger.JsonFormatter(LOG_FORMAT)
    logHandler = logging.StreamHandler()
    logHandler.setFormatter(formatter)
    logHandler.setLevel(log_level)
    logger.addHandler(logHandler)
    logger.info("logger configured in {}".format(log_level))