Ejemplo n.º 1
0
def logging_setup(conf):
    logger = logging.getLogger()
    logger.propagate = False

    default = '%(asctime)s - %(process)d - %(levelname)s - %(session)s - %(module)s - %(message)s'
    log_format = conf.get('format', default)
    formatter = logging.Formatter(log_format)

    log_level = conf.get('level', 'INFO')
    logger.setLevel(log_level)

    logfile = conf.get('logfile')
    if logfile:
        logsize = conf.get('logsize', 512 * 1024)
        retain = conf.get('logretain', 5)
        handler = ConcurrentRotatingFileHandler(logfile, 'a', logsize, retain)
    else:
        handler = logging.StreamHandler()

    handler.setFormatter(formatter)
    handler.setLevel(log_level)
    logger.addHandler(handler)

    logger.addFilter(SessionFilter())
    handler.addFilter(SessionFilter())

    logging.getLogger('requests').setLevel('WARN')
    logging.getLogger('urllib3').setLevel('WARN')
Ejemplo n.º 2
0
    def __init__(self):
        # 定义名为case的日志收集器对象
        self.logger = logging.getLogger(do_config("log", "name"))
        # 定义日志收集器等级
        self.logger.setLevel(do_config("log", "content_level"))
        # 加个判断避免一条用例写两次
        if not self.logger.handlers:
            # 定义输出到终端
            consle_handle = logging.StreamHandler()
            file_handle = ConcurrentRotatingFileHandler(
                filename=os.path.join(
                    logDir, do_config("log", "log_name")), mode="a", maxBytes=do_config(
                    "log", "Maxbytes"), backupCount=do_config(
                    "log", "count"), encoding=do_config(
                    "log", "encoding"))
            # 定义日志输出出道等级
            consle_handle.setLevel(do_config("log", "content_level"))

            file_handle.setLevel(do_config("log", "content_level"))
            file_handle.setLevel('ERROR')

            # 定义日志显示格式
            consle_format = logging.Formatter(do_config("log", "clear"))
            file_format = logging.Formatter(do_config("log", "clear"))

            consle_handle.setFormatter(consle_format)
            file_handle.setFormatter(file_format)
            self.logger.addHandler(consle_handle)
            self.logger.addHandler(file_handle)
Ejemplo n.º 3
0
 def _add_handler(self):
     """
     Add output stream for log, including console output and file output
     :return: None
     """
     # add console handler
     console_handler = logging.StreamHandler()
     console_handler.setLevel(CONFIG.LOGGER_LEVEL)
     console_handler.setFormatter(self._log_format)
     self._logger.addHandler(console_handler)
     # create log path
     if not os.path.exists(CONFIG.LOGGER_PATH):
         os.mkdir(CONFIG.LOGGER_PATH, mode=0x644)
     log_file = os.path.join(CONFIG.LOGGER_PATH, CONFIG.LOGGER_FILE_NAME)
     # add file handler
     file_handler = ConcurrentRotatingFileHandler(
         filename=log_file,
         mode='a',
         maxBytes=CONFIG.LOGGER_BUFFER,
         backupCount=CONFIG.LOGGER_FILE_COUNT,
         encoding="utf-8",
         use_gzip=True)
     file_handler.setLevel(CONFIG.LOGGER_LEVEL)
     file_handler.setFormatter(self._log_format)
     self._logger.addHandler(file_handler)
Ejemplo n.º 4
0
 def __add_file_handler(self):
     """
     日志写入日志文件
     """
     if not os.path.exists(self._log_path):
         os.makedirs(self._log_path)
     log_file = os.path.join(self._log_path, self._log_filename)
     rotate_file_handler = None
     if os_name == 'nt':
         # windows下用这个,非进程安全
         rotate_file_handler = ConcurrentRotatingFileHandler(
             log_file,
             maxBytes=self._log_file_size * 1024 * 1024,
             backupCount=3,
             encoding="utf-8")
     if os_name == 'posix':
         # linux下可以使用ConcurrentRotatingFileHandler,进程安全的日志方式
         rotate_file_handler = ConcurrentRotatingFileHandler(
             log_file,
             maxBytes=self._log_file_size * 1024 * 1024,
             backupCount=3,
             encoding="utf-8")
     rotate_file_handler.setLevel(self._logger_level)
     rotate_file_handler.setFormatter(self._formatter)
     self.logger.addHandler(rotate_file_handler)
Ejemplo n.º 5
0
    def make_hander(self, log_name):
        print('make new debug hander: ', log_name)
        logger = logging.getLogger(log_name)
        logger.setLevel(logging.DEBUG)

        # 创建一个handler,用于写入日志文件
        log_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "../loginfo"))
        name = log_path + '/EveryCoinHelps.log'
        if not os.path.isdir(log_path):
            os.makedirs(log_path)

        fh = ConcurrentRotatingFileHandler(name,
                                           "a",
                                           maxBytes=64 * 1024 * 1024,
                                           backupCount=30,
                                           encoding='utf8')
        fh.setLevel(logging.INFO)

        # 再创建一个handler,用于输出到控制台
        ch = logging.StreamHandler()
        ch.setLevel(logging.DEBUG)  # 服务器运行时改成info

        # 定义handler的输出格式
        formatter = logging.Formatter(
            '%(asctime)-8s %(filename)-8s %(levelname)-8s %(name)-12s [line:%(lineno)d]  %(message)s'
        )
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # 给logger添加handler
        logger.addHandler(fh)
        logger.addHandler(ch)
        return logger
Ejemplo n.º 6
0
def init_logger(app):
    """
    初始化日志(按大小滚动)

    :param app: Flask
    :return: None
    """
    log_maxsize = app.config.get('LOG_MAXSIZE', 100)
    log_backup = app.config.get('LOG_BACKUP', 20)
    log_level = app.config.get('LOG_LEVEL', logging.INFO)
    app_log = app.config.get('LOG_FILE')
    if not app_log:
        app_log = os.path.join(os.path.dirname(app.root_path), 'logs',
                               'app.log')
    fh = ConcurrentRotatingFileHandler(app_log,
                                       maxBytes=log_maxsize * 1024 * 1024,
                                       backupCount=log_backup,
                                       encoding='utf-8',
                                       use_gzip=True)
    fh.setLevel(log_level)
    fh.setFormatter(
        logging.Formatter(
            app.config.get(
                'LOG_FORMAT',
                '%(asctime)s %(levelname)s %(module)s.%(funcName)s: %(message)s'
            )))
    app.logger.addHandler(fh)
Ejemplo n.º 7
0
    def __init__(self):
        self.case_logger = logging.getLogger(do_config('log', 'logger_name'))

        self.case_logger.setLevel(do_config('log', 'logger_level'))

        console_output = logging.StreamHandler()
        # file_output = RotatingFileHandler(filename=os.path.join(LOG_DIR, do_config('log', 'logger_name')),
        #                                   maxBytes=do_config('log', 'maxBytes'),
        #                                   backupCount=do_config('log', 'backupCount'),
        #                                   encoding='utf8')

        file_output = ConcurrentRotatingFileHandler(
            filename=os.path.join(LOG_DIR, do_config('log', 'logger_name')),
            maxBytes=do_config('log', 'maxBytes'),
            backupCount=do_config('log', 'backupCount'),
            encoding='utf8')

        console_output.setLevel(do_config('log', 'console_level'))
        file_output.setLevel(do_config('log', 'file_level'))

        simple_formatter = logging.Formatter(
            do_config('log', 'simple_formatter'))
        verbose_formatter = logging.Formatter(
            do_config('log', 'verbose_formatter'))

        console_output.setFormatter(simple_formatter)
        file_output.setFormatter(verbose_formatter)

        self.case_logger.addHandler(console_output)
        self.case_logger.addHandler(file_output)
Ejemplo n.º 8
0
def get_logger(name="main",
               log_file=None,
               log_level=logging.INFO,
               maxBytes=10 * 1024 * 1024,
               backupCount=5):
    logger = logging.getLogger(name)
    if name in logger_initialized:
        return logger
    for logger_name in logger_initialized:
        if name.startswith(logger_name):
            return logger

    formatter = logging.Formatter(
        '[%(asctime)s] %(name)s %(levelname)s: %(message)s',
        datefmt="%Y/%m/%d %H:%M:%S")

    #stream_handler = logging.StreamHandler(stream=sys.stdout)
    #stream_handler.setFormatter(formatter)
    #logger.addHandler(stream_handler)
    if log_file is not None:
        log_file_folder = os.path.split(log_file)
        os.makedirs(log_file_folder[0], exist_ok=True)
        #file_handler = logging.FileHandler(log_file, 'a')
        #file_handler=TimedRotatingFileHandler(filename=log_file,when=when,backupCount=3,interval=interval)
        file_handler = ConcurrentRotatingFileHandler(log_file,
                                                     mode='a',
                                                     maxBytes=maxBytes,
                                                     backupCount=backupCount)
        file_handler.setFormatter(formatter)
        logger.addHandler(file_handler)
    logger.setLevel(log_level)
    logger_initialized[name] = True
    return logger
Ejemplo n.º 9
0
 def __init__(self, isconsole=True):
     self.logger = logging.getLogger(do_config('log',
                                               'logger_name'))  # 1 定义日志器的名字
     self.logger.setLevel(logging.DEBUG)  # 2 指定日志收集器的日志等级
     file_log_dir = os.path.join(LOG_DIR,
                                 do_config("log",
                                           "log_file_name"))  # 日志文件路径
     # 有bug,PermissionError:[WinError 32] 另一个程序正在使用日志文件
     # 解决方案1:每个模块都实例化一个日志器对象
     # 方案2 安装并导入第三方模块 pip install concurrent-log-handler
     # file_handle = RotatingFileHandler(file_log_dir,
     #                                   maxBytes=do_config('log', 'maxBytes'),
     #                                   backupCount=do_config('log', 'backupCount'), encoding='utf8')  # 3 定义文件handle对象,日志回滚
     file_handle = ConcurrentRotatingFileHandler(
         file_log_dir,
         maxBytes=do_config('log', 'maxBytes'),
         backupCount=do_config('log', 'backupCount'),
         encoding='utf8')  # 3 定义文件handle对象,日志回滚
     file_handle.setLevel(do_config(
         'log', 'file_handle_level'))  # 4 指定文件handle对象的日志等级
     formatter = logging.Formatter(do_config('log',
                                             'formatter'))  # 5 定义日志格式对象
     file_handle.setFormatter(formatter)  # 6 设置文件handle格式
     self.logger.addHandler(file_handle)  # 7 日志收集器与handle对接
     if isinstance(isconsole, bool):
         if isconsole:
             console_handle = logging.StreamHandler()  # 定义控制台handle对象
             console_handle.setLevel(
                 do_config('log',
                           'console_handle_level'))  # 设置控制台handle对象的日志等级
             console_handle.setFormatter(formatter)  # 设置控制台handle格式
             self.logger.addHandler(console_handle)  # 日志收集器与控制台handle对接
     else:
         raise ValueError("isconsole为布尔类型")
Ejemplo n.º 10
0
 def __init__(self, logame, logfile):
     """
     日志
     :param logame: log对象名
     :param logfile: 生成log文件路径
     """
     self.logger = logging.getLogger(logame)
     # 创建一个handler,用于写入日志文件,每隔一天分割一次日志文件
     # backupCount 是保留日志个数。默认的0是不会自动删除掉日志。若设10,则在文件的创建过程中库会判断是否有超过这个10,若超过,则会从最先创建的开始删除。
     # file_handler = TimedRotatingFileHandler(logfile, when='D', interval=1, backupCount=30)
     # 按照大小做切割       将切好的文件放到logfile     1024字节     只保留5个文件
     # file_handler = RotatingFileHandler(logfile, maxBytes=1024 * 10, backupCount=5)
     file_handler = ConcurrentRotatingFileHandler(logfile,
                                                  maxBytes=1024 * 1024 * 10,
                                                  backupCount=30)
     # file_handler = logging.FileHandler(logfile, mode='a')
     # 再创建一个handler, 用于输出到控制台
     console_handler = logging.StreamHandler()
     # 定义handler的输出格式
     formatter = logging.Formatter(
         '%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
         datefmt='%Y-%m-%d %H:%M:%S')
     file_handler.setFormatter(formatter)
     console_handler.setFormatter(formatter)
     # 添加handler
     self.logger.addHandler(file_handler)
     # self.logger.addHandler(console_handler)
     # 设置日志级别
     self.logger.setLevel(logging.INFO)
Ejemplo n.º 11
0
def init_logger(app):
    # 128MB "multi-processable" log file(info)
    access = ConcurrentRotatingFileHandler(log_path + "/access.log",
                                           "a",
                                           128 * 1024 * 1024,
                                           7,
                                           encoding="utf-8")
    access.setLevel(logging.INFO)
    access.suffix = "%Y-%m-%d.log"
    access.setFormatter(
        logging.Formatter(
            '[%(asctime)s] %(levelname)s in %(funcName)s: %(message)s'))

    # 128MB "multi-processable" log file(error)
    error = ConcurrentRotatingFileHandler(log_path + "/error.log",
                                          "a",
                                          128 * 1024 * 1024,
                                          7,
                                          encoding="utf-8")
    error.setLevel(logging.ERROR)
    error.suffix = "%Y-%m-%d.log"
    error.setFormatter(
        logging.Formatter(
            '[%(asctime)s] %(levelname)s in %(funcName)s: %(message)s'))

    app.logger.addHandler(access)
    app.logger.addHandler(error)
    app.logger.setLevel(logging.INFO)
Ejemplo n.º 12
0
def setup_logging(name_, level=None, proj_home=None, attach_stdout=False):
    """
    Sets up generic logging to file with rotating files on disk

    :param: name_: the name of the logfile (not the destination!)
    :param: level: the level of the logging DEBUG, INFO, WARN
    :param: proj_home: optional, starting dir in which we'll
            check for (and create) 'logs' folder and set the
            logger there
    :return: logging instance
    """

    if level is None:
        config = load_config(extra_frames=1,
                             proj_home=proj_home,
                             app_name=name_)
        level = config.get('LOGGING_LEVEL', 'INFO')

    level = getattr(logging, level)

    # formatter = logging.Formatter(fmt=logfmt, datefmt=datefmt)
    # formatter = MultilineMessagesFormatter(fmt=logfmt, datefmt=datefmt)
    formatter = get_json_formatter()

    formatter.multiline_marker = ''
    formatter.multiline_fmt = '     %(message)s'

    formatter.converter = time.gmtime
    logging_instance = logging.getLogger(name_)

    if proj_home:
        proj_home = os.path.abspath(proj_home)
        fn_path = os.path.join(proj_home, 'logs')
    else:
        fn_path = os.path.join(_get_proj_home(), 'logs')

    if not os.path.exists(fn_path):
        os.makedirs(fn_path)

    fn = os.path.join(fn_path, '{0}.log'.format(name_.split('.log')[0]))
    rfh = ConcurrentRotatingFileHandler(filename=fn,
                                        maxBytes=10485760,
                                        backupCount=10,
                                        mode='a',
                                        encoding='UTF-8')  # 10MB file
    rfh.setFormatter(formatter)
    logging_instance.handlers = []
    logging_instance.addHandler(rfh)
    logging_instance.setLevel(level)

    if attach_stdout:
        stdout = logging.StreamHandler(sys.stdout)
        stdout.formatter = get_json_formatter()
        logging_instance.addHandler(stdout)

    # Do not propagate to the parent logger to avoid double logging with different formatters
    logging_instance.propagate = False

    return logging_instance
Ejemplo n.º 13
0
def init_log():
    global logger
    log_handler = ConcurrentRotatingFileHandler('attack.log', maxBytes=10000, backupCount=3)
    log_format = logging.Formatter('%(asctime)s %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
    log_handler.setFormatter(log_format)
    logger = logging.getLogger("Attack_log")
    logger.setLevel(logging.INFO)
    logger.addHandler(log_handler)
Ejemplo n.º 14
0
def init_logging(logFilePath = "test.log",level = logging.DEBUG):
    #################################################################################################
    logging.basicConfig(level=level)
    #################################################################################################
    #################################################################################################
    # 定义一个StreamHandler,将INFO级别或更高的日志信息打印到标准错误,并将其添加到当前的日志处理对象#
    Rthandler = LogHandler(logFilePath, maxBytes=10 * 1024 * 1024, backupCount=5)
    Rthandler.setLevel(level)
    formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
    Rthandler.setFormatter(formatter)
    logging.root.addHandler(Rthandler)
Ejemplo n.º 15
0
def set_log(app, level):
    Path(__file__).cwd().joinpath("logs").mkdir(parents=True, exist_ok=True)
    file_log_handler = ConcurrentRotatingFileHandler("logs/log",
                                                     maxBytes=5 * 1024 * 1024,
                                                     backupCount=5,
                                                     encoding="UTF-8")
    formatter = logging.Formatter(
        "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s]%(message)s")
    file_log_handler.setFormatter(formatter)
    file_log_handler.setLevel(level)
    app.logger.addHandler(file_log_handler)
Ejemplo n.º 16
0
def initialize_logging(service_name: str, logging_config: Dict,
                       root_path: Path):
    log_path = path_from_root(
        root_path, logging_config.get("log_filename", "log/debug.log"))
    log_date_format = "%Y-%m-%dT%H:%M:%S"

    mkdir(str(log_path.parent))
    file_name_length = 33 - len(service_name)
    if logging_config["log_stdout"]:
        handler = colorlog.StreamHandler()
        handler.setFormatter(
            colorlog.ColoredFormatter(
                f"%(asctime)s.%(msecs)03d {service_name} %(name)-{file_name_length}s: "
                f"%(log_color)s%(levelname)-8s%(reset)s %(message)s",
                datefmt=log_date_format,
                reset=True,
            ))

        logger = colorlog.getLogger()
        logger.addHandler(handler)
    else:
        logger = logging.getLogger()
        maxrotation = logging_config.get("log_maxfilesrotation", 7)
        handler = ConcurrentRotatingFileHandler(log_path,
                                                "a",
                                                maxBytes=20 * 1024 * 1024,
                                                backupCount=maxrotation)
        handler.setFormatter(
            logging.Formatter(
                fmt=
                f"%(asctime)s.%(msecs)03d {service_name} %(name)-{file_name_length}s: %(levelname)-8s %(message)s",
                datefmt=log_date_format,
            ))
        logger.addHandler(handler)

    if "log_level" in logging_config:
        if logging_config["log_level"] == "CRITICAL":
            logger.setLevel(logging.CRITICAL)
        elif logging_config["log_level"] == "ERROR":
            logger.setLevel(logging.ERROR)
        elif logging_config["log_level"] == "WARNING":
            logger.setLevel(logging.WARNING)
        elif logging_config["log_level"] == "INFO":
            logger.setLevel(logging.INFO)
        elif logging_config["log_level"] == "DEBUG":
            logger.setLevel(logging.DEBUG)
            logging.getLogger("aiosqlite").setLevel(
                logging.INFO)  # Too much logging on debug level
            logging.getLogger("websockets").setLevel(
                logging.INFO)  # Too much logging on debug level
        else:
            logger.setLevel(logging.INFO)
    else:
        logger.setLevel(logging.INFO)
Ejemplo n.º 17
0
def setup_config(loglevel):
    if not os.path.exists('logs'):
        os.makedirs('logs')
    # handler = RotatingFileHandler('logs/flask.log',mode='a',maxBytes=1024*1024,backupCount=10,)
    handler = ConcurrentRotatingFileHandler('logs/flask.log',mode='a',maxBytes=1024*1024,backupCount=5,)
    handler.setLevel(loglevel)

    logging_format = logging.Formatter(
        '%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)d - %(message)s')

    handler.setFormatter(logging_format)
    return handler
Ejemplo n.º 18
0
def setup_logger(logger_name, fn=None, level="debug"):
    """
    Create a logger, can write to a separate file.  This will write to
    the logs folder in the mt_metadata directory.

    :param logger_name: name of the logger, typically __name__
    :type logger_name: string
    :param fn: file name to write to, defaults to None
    :type fn: TYPE, optional
    :param level: DESCRIPTION, defaults to "debug"
    :type level: TYPE, optional
    :return: DESCRIPTION
    :rtype: TYPE

    """

    logger = logging.getLogger(logger_name)

    # if there is a file name create file in logs directory
    if fn is not None:
        # need to clear the handlers to make sure there is only
        # one call per logger plus stdout
        if logger.hasHandlers():
            logger.handlers.clear()

        logger.propagate = False
        # want to add a stream handler for any Info print statements as stdOut
        stream_handler = logging.StreamHandler()
        stream_handler.setFormatter(LOG_FORMAT)
        stream_handler.setLevel(LEVEL_DICT["info"])
        logger.addHandler(stream_handler)

        fn = LOG_PATH.joinpath(fn)
        exists = False
        if fn.exists():
            exists = True

        if fn.suffix not in [".log"]:
            fn = Path(fn.parent, f"{fn.stem}.log")

        # fn_handler = logging.FileHandler(fn)
        fn_handler = ConcurrentRotatingFileHandler(fn,
                                                   maxBytes=2**21,
                                                   backupCount=2)
        fn_handler.setFormatter(LOG_FORMAT)
        fn_handler.setLevel(LEVEL_DICT[level.lower()])
        logger.addHandler(fn_handler)
        if not exists:
            logger.info(
                f"Logging file can be found {logger.handlers[-1].baseFilename}"
            )

    return logger
Ejemplo n.º 19
0
def initLogging(file):

    logger = logging.getLogger(file)
    logger.setLevel(logging.DEBUG)

    rht = ConcurrentRotatingFileHandler(file, 'a', encoding='utf-8')
    fmt = logging.Formatter(
        "%(asctime)s - %(pathname)s - %(funcName)s - %(lineno)s - %(levelname)s : %(message)s",
        "%Y-%m-%d %H:%M:%S")
    rht.setFormatter(fmt)
    logger.addHandler(rht)
    return logger
Ejemplo n.º 20
0
def setup_logger(log_path, is_debug, logger):
    """
    """
    try:
        from concurrent_log_handler import ConcurrentRotatingFileHandler as HandlerClass
    except ImportError:
        from logging.handlers import RotatingFileHandler as HandlerClass
    handler = HandlerClass(log_path.format(pid=os.getpid()), maxBytes=8000000, backupCount=10)
    handler.setFormatter(logging.Formatter(
        fmt='%(asctime)s [%(name)s] %(levelname)s: %(message)s'))
    logger.addHandler(handler)
    logger.setLevel(logging.DEBUG if is_debug else logging.INFO)
Ejemplo n.º 21
0
def get_logger(name: str = "general") -> logging.Logger:
    # ensure log directory exists. if not, create it
    if not os.path.exists("./data/logs"):
        os.mkdir("./data/logs")
    log = logging.getLogger(name)
    log.setLevel(logging.INFO)
    h = ConcurrentRotatingFileHandler(os.path.abspath("data/logs/nparse.log"),
                                      "a", 512 * 1000, 3)
    f = logging.Formatter(
        "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    h.setFormatter(f)
    log.addHandler(h)
    return log
Ejemplo n.º 22
0
 def get_logger(self):
     """在logger中添加日志句柄并返回,如果logger已有句柄,则直接返回"""
     if not self.logger.handlers:
         console_handler = logging.StreamHandler()
         console_handler.setFormatter(self.formatter)
         console_handler.setLevel(self.console_output_level)
         self.logger.addHandler(console_handler)
         file_handler = ConcurrentRotatingFileHandler(filename=self.log_file_name,  maxBytes=self.maxBytes,
                                                  backupCount=self.backup_count, encoding="utf-8")
         file_handler.setFormatter(self.formatter)
         file_handler.setLevel(self.file_output_level)
         self.logger.addHandler(file_handler)
     return self.logger
Ejemplo n.º 23
0
    def init_config(self, name=None):
        """
        initiaize config of each log level

        :param name:        prefix name of log files

        For example:
            if name = 'web', the log files will look like 'z_web_xxx.log', xxx are names of each level

        Please notice that i've put a "z" if no name passed into Log, for making log files listed at the end of
        all the codes in project.
        """

        logging.Formatter.converter = self.opti_time
        base_format = logging.Formatter(
            '【%(levelname)s】 %(asctime)s [%(process)d] \n%(message)s',
            datefmt='%Y-%m-%d %H:%M:%S')

        # logging.Formatter.converter = customTime

        if name not in self.logs:

            # create logger
            logger = logging.getLogger(str(self.log_mapping[name]))
            logger.setLevel(self.log_mapping[name])

            # create handler
            log_path = self.log_root + '/' + self.public_name + '_' + name + '.log'
            base_handler = RotatingFileHandler(
                log_path,
                maxBytes=self.log_config[name]['maxBytes'] * 1024 * 1024,
                backupCount=self.log_config[name]['backupCount'])

            # define output format
            base_handler.setFormatter(base_format)
            base_handler.setLevel(self.log_mapping[name])

            # add handler
            logger.addHandler(base_handler)

            # critical level add console handler
            if name == 'critical':
                console_handler = logging.StreamHandler()
                console_handler.setLevel(self.log_mapping[name])
                console_format = logging.Formatter(
                    '【%(levelname)s】 %(asctime)s [%(process)d] \n%(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S')
                console_handler.setFormatter(console_format)
                logger.addHandler(console_handler)
            self.logs.update({name: logger})
Ejemplo n.º 24
0
    def set_cur_logger(self, name=None, level=None, save_path='logs/'):
        """
        :param name: logger name in logging's loggerDict
        :param level: if level>DEBUG and save_path, logs will be saved to two files, one only for >=INFO, one for all.
        :param save_path: folder path to save log. If set to None, logs will not be saved.
        :return:
        """
        if name is None:
            name = self.name
        if level is None:
            level = self._level
        # noinspection PyUnresolvedReferences
        if name in logging.Logger.manager.loggerDict:
            _logger = logging.getLogger(name)
        else:
            _logger = logging.getLogger(name)
            _logger.setLevel(logging.DEBUG)

            console = logging.StreamHandler()
            console.setFormatter(COLOR_FORMATTER)
            console.setLevel(logging.DEBUG)
            _logger.addHandler(console)

            if save_path is not None:
                if not os.path.exists(save_path):
                    os.makedirs(save_path)
                fp = os.path.join(save_path, f'{name}.log')
                fh = ConcurrentRotatingFileHandler(os.path.abspath(fp),
                                                   encoding='utf8',
                                                   maxBytes=1024 * 1024,
                                                   backupCount=3)
                # fh = RotatingFileHandler(fp, encoding='utf8', maxBytes=1024 * 1024, backupCount=3)
                fh.setFormatter(LOG_FORMATTER)
                fh.setLevel(level)
                _logger.addHandler(fh)
                self._log_filepath = fp

                if level > logging.DEBUG:
                    fp = os.path.join(save_path, f'{name}.full.log')
                    full_fh = ConcurrentRotatingFileHandler(
                        os.path.abspath(fp),
                        encoding='utf8',
                        maxBytes=1024 * 1024,
                        backupCount=3)
                    full_fh.setFormatter(LOG_FORMATTER)
                    full_fh.setLevel(logging.DEBUG)
                    _logger.addHandler(full_fh)
                    # only store the path of full_log if exist
                    self._log_filepath = fp
        self._logger = _logger
Ejemplo n.º 25
0
    def set_log(self, log_name):
        logger = logging.getLogger("{}".format(log_name))
        logger.setLevel(level=logging.DEBUG)
        log_handler = ConcurrentRotatingFileHandler('{}'.format(self.log_file),
                                                    'a',
                                                    maxBytes=0,
                                                    backupCount=10)
        log_handler.setLevel(logging.DEBUG)
        log_format = logging.Formatter(
            '%(asctime)s.%(msecs)03d %(name)s %(process)d %(levelname)s %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S')
        log_handler.setFormatter(log_format)
        logger.addHandler(log_handler)

        return logger
Ejemplo n.º 26
0
def init_log_system():
    root = logging.getLogger()
    handler = logging.StreamHandler(sys.stdout)
    formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    handler.setFormatter(formatter)
    root.addHandler(handler)

    path = Path('log/camera.log').absolute()
    path.parent.mkdir(exist_ok=True)

    # Rotate log after reaching 512K, keep 5 old copies.
    rotate_handler = ConcurrentRotatingFileHandler(str(path), "a", 512 * 1024, 5)
    rotate_handler.setFormatter(formatter)
    root.addHandler(rotate_handler)
    root.setLevel(logging.INFO)
    root.info("Logging system initialized, kept in file {}...".format(str(path)))
Ejemplo n.º 27
0
def setup_log(environment):
    """根据环境配置日志"""
    # 设置日志的记录等级
    logging.basicConfig(level=config[environment].LOG_LEVEL)  # 调试debug级
    # 创建日志记录器, 指明日志保存的路径, 每个日志文件的最大大小,保存日志的文件上限个数
    file_log_handler = ConcurrentRotatingFileHandler("logs/log",
                                                     maxBytes=1024 * 1024,
                                                     backupCount=10)
    # 创建日志文件的记录格式            时间            文件名        行数                等级          信息
    formatter = logging.Formatter(
        '%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s'
    )
    # 为日志记录器设置日志的记录格式
    file_log_handler.setFormatter(formatter)
    # 为全局的日志对象添加日志记录器
    logging.getLogger().addHandler(file_log_handler)
Ejemplo n.º 28
0
    def __initialize(self):
        if config.level == "debug":
            level = logging.DEBUG
        elif config.level == "info":
            level = logging.INFO
        elif config.level == "warning":
            level = logging.WARNING
        elif config.level == "error":
            level = logging.ERROR
        elif config.level == "critical":
            level = logging.CRITICAL
        else:
            level = logging.DEBUG
        self.__logger.setLevel(level=level)
        formatter = logging.Formatter(
            fmt='[%(asctime)s] -> [%(levelname)s] : %(message)s')
        # 文件输出按照时间分割
        time_rotating_file_handler = handlers.TimedRotatingFileHandler(
            filename=self.__path,
            when='MIDNIGHT',
            interval=1,
            backupCount=1000)
        time_rotating_file_handler.setFormatter(formatter)
        time_rotating_file_handler.suffix = "%Y%m%d-%H%M%S.log"

        # 控制台输出
        console_formatter = colorlog.ColoredFormatter(
            fmt='%(log_color)s[%(asctime)s] -> [%(levelname)s] : %(message)s',
            datefmt='%Y-%m-%d  %H:%M:%S',
            log_colors=log_colors_config)
        stream_handler = logging.StreamHandler()
        stream_handler.setFormatter(console_formatter)

        # 文件输出按照大小分割
        rotatingHandler = ConcurrentRotatingFileHandler(
            self.__path, "a", 1024 * 1024, 1000)  # a为追加模式,按1M大小分割,保留最近1000个文件
        rotatingHandler.setFormatter(formatter)

        if config.handler == "time":
            if not self.__logger.handlers:
                self.__logger.addHandler(time_rotating_file_handler)
        elif config.handler == "file":
            if not self.__logger.handlers:
                self.__logger.addHandler(rotatingHandler)
        else:
            if not self.__logger.handlers:
                self.__logger.addHandler(stream_handler)
Ejemplo n.º 29
0
def init_logger(config):
    """
    初始化日志句柄(写文件)
    需要多进程安全的ConcurrentRotatingFileHandler
    当日志级别大于等于Warning时会同时打印到屏幕和写文件
    :param config: 配置参数
    :return: 
    """
    # console_handler = logging.StreamHandler(sys.__stdout__)
    console_handler = logging.StreamHandler(sys.__stderr__)
    console_handler.level = logging.ERROR
    # console_handler.level = logging.DEBUG
    console_logger = logging.getLogger('obscmd')
    console_logger.addHandler(console_handler)

    format = '%(asctime)s - %(levelname)s - %(process)d - %(thread)d - %(filename)s[line:%(lineno)d] - %(message)s'
    logfile = make_log_filename(config)
    handler = ConcurrentRotatingFileHandler(
        logfile,
        mode='a',
        maxBytes=unitstr_to_bytes(config.log.maxbytes),
        backupCount=int(config.log.backupcount),
        encoding=None,
        delay=0)
    handler.setFormatter(logging.Formatter(format))
    logger = logging.getLogger("obscmd.file")
    logger.propagate = False
    logger.addHandler(handler)
    # logger.setLevel(logging.DEBUG)

    format = '%(message)s'
    handler1 = ConcurrentRotatingFileHandler(
        logfile,
        mode='a',
        maxBytes=unitstr_to_bytes(config.log.maxbytes),
        backupCount=int(config.log.backupcount),
        encoding=None,
        delay=0)
    handler1.setFormatter(logging.Formatter(format))
    logger1 = logging.getLogger("print")
    logger1.propagate = True
    logger1.addHandler(handler1)

    logger.setLevel(LOG_LEVEL[config.log.level])
    return logger, logfile
Ejemplo n.º 30
0
    def __init__(self,
                 file_dir,
                 open_file_log=1,
                 file_log_level="DEBUG",
                 open_stream_log=1,
                 stream_log_level=1,
                 simple_mode=True):
        super(MyLogger, self).__init__(self)
        self.level_list = ["DEBUG", "INFO", "WARN", "ERROR"]
        self.sep = "-"
        self.max_bytes = 1024 * 1024 * 30
        self.max_count = 10
        self.simple_mode = simple_mode
        self.open_file_log = open_file_log
        self.open_stream_log = open_stream_log
        self.file_log_level = file_log_level if file_log_level in self.level_list else "DEBUG"
        self.stream_log_level = stream_log_level if stream_log_level in self.level_list else "DEBUG"
        os.makedirs(file_dir, exist_ok=True)
        self.file_dir = file_dir
        format_str = self.get_simple_format(
        ) if simple_mode else self.get_detail_format()
        formatter = logging.Formatter(format_str)
        if self.open_file_log:
            debug_handler = ConcurrentRotatingFileHandler(
                os.path.join(self.file_dir, "run.log"),
                maxBytes=self.max_bytes,
                backupCount=self.max_count,
                encoding="utf8")
            debug_handler.setLevel(self.file_log_level)
            debug_handler.setFormatter(formatter)
            self.addHandler(debug_handler)

            error_handler = ConcurrentRotatingFileHandler(
                os.path.join(self.file_dir, "error.log"),
                maxBytes=self.max_bytes,
                backupCount=self.max_count,
                encoding="utf8")
            error_handler.setLevel(logging.ERROR)
            error_handler.setFormatter(formatter)
            self.addHandler(error_handler)
        if self.open_stream_log:
            ch = logging.StreamHandler(sys.stdout)
            ch.setLevel(self.stream_log_level)
            ch.setFormatter(formatter)
            self.addHandler(ch)
Ejemplo n.º 31
0
 def setup_logger(conf):
     """
     Sets up file-based rotating logger. All the parameters are extracted
     from conf argument:
     path: /kontext/global/log_path
     maximum file size (optional, default is 8MB): /kontext/global/log_file_size
     number of backed-up files (optional, default is 10): /kontext/global/log_num_files
     """
     try:
         from concurrent_log_handler import ConcurrentRotatingFileHandler as HandlerClass
     except ImportError:
         from logging.handlers import RotatingFileHandler as HandlerClass
     handler = HandlerClass(conf.get('logging', 'path').format(pid=os.getpid()),
                            maxBytes=conf.get_int(
                                'logging', 'file_size', 8000000),
                            backupCount=conf.get_int('logging', 'num_files', 10))
     handler.setFormatter(logging.Formatter(
         fmt='%(asctime)s [%(name)s] %(levelname)s: %(message)s'))
     logger.addHandler(handler)
     logger.setLevel(logging.INFO if not settings.is_debug_mode() else logging.DEBUG)