Пример #1
0
def init_logger(my_logger):
    my_logger.setLevel(logging.INFO)

    if options.text_type == "randomjson":
        json_logging.ENABLE_JSON_LOGGING = True
        json_logging.init_non_web()

    if options.journal :
        jh = logging.handlers.SysLogHandler(address = '/dev/log')
        my_logger.addHandler(jh)

    elif options.log_on_file:
        print 'log_on_file: {}'.format(options.log_on_file)
        formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh = logging.FileHandler(options.log_on_file)

        if not (options.raw or options.text_type == "randomjson") :
           fh.setFormatter(formatter)
        my_logger.addHandler(fh)
    else :
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        sh = logging.StreamHandler(sys.stdout)
        if not (options.raw or options.text_type == "randomjson") :
           sh.setFormatter(formatter)
        my_logger.addHandler(sh)
Пример #2
0
 def __new__(cls, context: "core.BolinetteContext"):
     if context.env["json_logging"]:
         json_logging.init_non_web(enable_json=True)
     logger = logging.getLogger("test-logger")
     logger.setLevel(logging.DEBUG)
     logger.addHandler(logging.StreamHandler(sys.stdout))
     return logger
Пример #3
0
def get_logger():
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)
    logger.addHandler(logging.StreamHandler(sys.stdout))

    if not DEBUG:
        json_logging.ENABLE_JSON_LOGGING = True
        json_logging.init_non_web()

    return logger
Пример #4
0
def logging_init(level, output, _format, format_string, syslog_address,
                 syslog_facility):
    logger.setLevel(logging_level(level))
    handler = None
    if output == config.LOGGING_OUTPUT_SYSLOG:
        facility = logging_facility(syslog_facility)
        handler = logging.handlers.SysLogHandler(address=syslog_address,
                                                 facility=facility)
    else:
        handler = logging.StreamHandler(sys.stdout)
    if _format == config.LOGGING_FORMAT_JSON:
        json_logging.ENABLE_JSON_LOGGING = True
        json_logging.init_non_web()
    else:
        handler.setFormatter(logging.Formatter(format_string))
    logger.addHandler(handler)
Пример #5
0
    def __init__(self, name="logger", level="info"):
        """
        Create a new logger.

        :param name: The name of the logger (printed with each output).
        :param level: Minimum log level to output (see Logger.levels).
        """
        if level not in Logger.levels.keys():
            raise Exception("invalid log level (see Logger.levels)")

        jl.ENABLE_JSON_LOGGING = True
        jl.init_non_web(custom_formatter=Format)

        self.name = name
        self.lg = BaseLogger(name)
        self.lg.setLevel(Logger.levels[level])
        self.lg.addHandler(logging.StreamHandler(sys.stdout))
Пример #6
0
def get_logger():
    """
    Generic utility function to get logger object with fixed configurations
    :return:
    log object
    """

    if "LOG_FILE" not in os.environ:
        print("LOG_FILE environment variable is not defined. This must be defined!!!")
        sys.exit(999)

    log_file = os.environ['LOG_FILE']

    json_logging.init_non_web(enable_json=True)

    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)
    log.addHandler(logging.FileHandler(log_file))
    log.addHandler(logging.StreamHandler(sys.stdout))

    return log
Пример #7
0
    def __create_logger(cls, config_file='logger_settings.yaml', level=logging.INFO):
        json_logging.init_non_web(enable_json=True)

        config_file_path = Path.cwd().joinpath(config_file)
        if config_file_path.exists():
            with open(str(config_file_path), 'rt') as file:
                try:
                    config = yaml.safe_load(file.read())
                    cls._create_logging_directories(cls._get_logging_paths(config))
                    logging.config.dictConfig(config)
                    coloredlogs.install()
                except Exception as e:
                    print('Error in Logging Configuration. Using default configs')
                    logging.basicConfig(level=level)
                    coloredlogs.install(level=level)
        else:
            logging.basicConfig(level=level)
            coloredlogs.install(level=level)
            print('Failed to load configuration file. Using default configs')

        return logging.getLogger(__name__)
Пример #8
0
def get_logger(verbose=False):
    """return a logger object
    :rtype: logging.Logger
    """
    json_logging.init_non_web(enable_json=True)
    if verbose:
        log_level = logging.DEBUG
    else:
        log_level = logging.INFO

    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)
    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(log_level)
    # create file handler which logs even debug messages
    file_handler = logging.FileHandler("aws_whoami.log")
    file_handler.setLevel(log_level)

    logger.addHandler(console_handler)
    logger.addHandler(file_handler)

    return logger
Пример #9
0
import sys, json, logging, json_logging, pickle, os

from binascii import hexlify, unhexlify
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend

# log is initialized without a web framework name
json_logging.ENABLE_JSON_LOGGING = True
json_logging.init_non_web()

log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
log.addHandler(logging.StreamHandler(sys.stdout))

with open("config.json") as f:
    shared_key = json.load(f)["encryptionkey"]

def encrypt(objects):
    blob = pickle.dumps(objects)
    cipher = Cipher(
        algorithms.AES(shared_key.encode()),
        modes.CTR(
            "\x00".encode() * 16),
        backend=default_backend())
    e = cipher.encryptor()

    return hexlify(e.update(blob) + e.finalize())


def decrypt(cryptpickle):
    ct = unhexlify(cryptpickle)
Пример #10
0
def extra(**kw):
    '''Add the required nested props layer'''
    return {'extra': {'props': kw}}


class CustomJSONLog(json_logging.JSONLogFormatter):
    """
    Customized logger
    """

    def format(self, record):
        json_customized_log_object = ({
            "customized_prop": "customized value",
            "message": record.getMessage()
        })

        return json.dumps(json_customized_log_object)


# You would normally import logger_init and setup the logger in your main module - e.g.
# main.py

json_logging.init_non_web(custom_formatter=CustomJSONLog, enable_json=True)

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stderr))

logger.info('sample log message')
Пример #11
0
from time import sleep
import asyncio, json, json_logging, logging
import os, signal, sys, argparse
import re, time

# My custom imports
from instaclustr.instaclustr import getInstaclustrMetrics, getInstaclustrTopics, \
    getInstaclustrConsumerGroups, getInstaclustrConsumerGroupTopics, \
    getInstaclustrConsumerGroupMetrics, getInstaclustrConsumerGroupClientMetrics
from instaclustr.helper import splitMetricsList
from localdatadog.datadog import shipToDataDog

# Logging setup
app_name = os.getenv('APP_NAME', 'instaclustr-monitor')
log_level = logging.getLevelName(os.getenv('LOG_LEVEL', 'INFO').upper())
json_logging.init_non_web(enable_json=True)
logger = logging.getLogger(app_name)
logger.setLevel(log_level)
logger.addHandler(logging.StreamHandler(sys.stdout))

# Environment variable setup
default_value = ''
ic_cluster_id = os.getenv('IC_CLUSTER_ID', default_value)
ic_metrics_list = os.getenv(
    'IC_METRICS_LIST',
    'k::slaConsumerRecordsProcessed,n::cpuutilization,n::diskUtilization,\
                            n::osLoad,k::kafkaBrokerState,k::slaProducerErrors,k::slaConsumerLatency,\
                            k::slaProducerLatencyMs,k::underReplicatedPartitions,k::activeControllerCount,\
                            k::offlinePartitions,k::leaderElectionRate,k::uncleanLeaderElections,\
                            k::leaderCount,k::isrExpandRate,k::isrShrinkRate')
## Each metric must be formatted as such 'kt::{0}::metric' as {0} will be replaced.
Пример #12
0
def init():
    json_logging.ENABLE_JSON_LOGGING = True
    json_logging.init_non_web()
            "python.module": record.module,
            "python.funcName": record.funcName,
            "python.filename": record.filename,
            "python.lineno": record.lineno,
            "python.thread": record.threadName,
            "python.pid": record.process
        }
        if hasattr(record, 'props'):
            json_log_object['data'].update(record.props)

        if record.exc_info or record.exc_text:
            json_log_object['data'].update(self.get_exc_fields(record))

        return json.dumps(json_log_object)


# You would normally import logger_init and setup the logger in your main module - e.g.
# main.py

json_logging.init_non_web(custom_formatter=CustomJSONLog)

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler(sys.stderr))

logger.info('Starting')
try:
    1 / 0
except:  # noqa pylint: disable=bare-except
    logger.exception('You can\'t divide by zero')
Пример #14
0
def getJSONLogger(name):
    json_logging.init_non_web(custom_formatter=CloudLoggingFormatter, enable_json=True)
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    logger.addHandler(logging.StreamHandler(sys.stdout))
    return logger
Пример #15
0
def _get_logger():
    json_logging.init_non_web(enable_json=True)
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)
    logger.addHandler(logging.StreamHandler(sys.stdout))
    return logger