Ejemplo n.º 1
0
    def __init__(self):
        # 定义名为case的日志收集器对象
        self.logger = logging.getLogger(do_config("log", "name"))
        # 定义日志收集器等级
        self.logger.setLevel(do_config("log", "content_level"))
        # 加个判断避免一条用例写两次
        if not self.logger.handlers:
            # 定义输出到终端
            consle_handle = logging.StreamHandler()
            file_handle = ConcurrentRotatingFileHandler(
                filename=os.path.join(
                    logDir, do_config("log", "log_name")), mode="a", maxBytes=do_config(
                    "log", "Maxbytes"), backupCount=do_config(
                    "log", "count"), encoding=do_config(
                    "log", "encoding"))
            # 定义日志输出出道等级
            consle_handle.setLevel(do_config("log", "content_level"))

            file_handle.setLevel(do_config("log", "content_level"))
            file_handle.setLevel('ERROR')

            # 定义日志显示格式
            consle_format = logging.Formatter(do_config("log", "clear"))
            file_format = logging.Formatter(do_config("log", "clear"))

            consle_handle.setFormatter(consle_format)
            file_handle.setFormatter(file_format)
            self.logger.addHandler(consle_handle)
            self.logger.addHandler(file_handle)
Ejemplo n.º 2
0
def logging_setup(conf):
    logger = logging.getLogger()
    logger.propagate = False

    default = '%(asctime)s - %(process)d - %(levelname)s - %(session)s - %(module)s - %(message)s'
    log_format = conf.get('format', default)
    formatter = logging.Formatter(log_format)

    log_level = conf.get('level', 'INFO')
    logger.setLevel(log_level)

    logfile = conf.get('logfile')
    if logfile:
        logsize = conf.get('logsize', 512 * 1024)
        retain = conf.get('logretain', 5)
        handler = ConcurrentRotatingFileHandler(logfile, 'a', logsize, retain)
    else:
        handler = logging.StreamHandler()

    handler.setFormatter(formatter)
    handler.setLevel(log_level)
    logger.addHandler(handler)

    logger.addFilter(SessionFilter())
    handler.addFilter(SessionFilter())

    logging.getLogger('requests').setLevel('WARN')
    logging.getLogger('urllib3').setLevel('WARN')
Ejemplo n.º 3
0
    def make_hander(self, log_name):
        print('make new debug hander: ', log_name)
        logger = logging.getLogger(log_name)
        logger.setLevel(logging.DEBUG)

        # 创建一个handler,用于写入日志文件
        log_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "../loginfo"))
        name = log_path + '/EveryCoinHelps.log'
        if not os.path.isdir(log_path):
            os.makedirs(log_path)

        fh = ConcurrentRotatingFileHandler(name,
                                           "a",
                                           maxBytes=64 * 1024 * 1024,
                                           backupCount=30,
                                           encoding='utf8')
        fh.setLevel(logging.INFO)

        # 再创建一个handler,用于输出到控制台
        ch = logging.StreamHandler()
        ch.setLevel(logging.DEBUG)  # 服务器运行时改成info

        # 定义handler的输出格式
        formatter = logging.Formatter(
            '%(asctime)-8s %(filename)-8s %(levelname)-8s %(name)-12s [line:%(lineno)d]  %(message)s'
        )
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # 给logger添加handler
        logger.addHandler(fh)
        logger.addHandler(ch)
        return logger
Ejemplo n.º 4
0
 def __add_file_handler(self):
     """
     日志写入日志文件
     """
     if not os.path.exists(self._log_path):
         os.makedirs(self._log_path)
     log_file = os.path.join(self._log_path, self._log_filename)
     rotate_file_handler = None
     if os_name == 'nt':
         # windows下用这个,非进程安全
         rotate_file_handler = ConcurrentRotatingFileHandler(
             log_file,
             maxBytes=self._log_file_size * 1024 * 1024,
             backupCount=3,
             encoding="utf-8")
     if os_name == 'posix':
         # linux下可以使用ConcurrentRotatingFileHandler,进程安全的日志方式
         rotate_file_handler = ConcurrentRotatingFileHandler(
             log_file,
             maxBytes=self._log_file_size * 1024 * 1024,
             backupCount=3,
             encoding="utf-8")
     rotate_file_handler.setLevel(self._logger_level)
     rotate_file_handler.setFormatter(self._formatter)
     self.logger.addHandler(rotate_file_handler)
Ejemplo n.º 5
0
def init_logger(app):
    # 128MB "multi-processable" log file(info)
    access = ConcurrentRotatingFileHandler(log_path + "/access.log",
                                           "a",
                                           128 * 1024 * 1024,
                                           7,
                                           encoding="utf-8")
    access.setLevel(logging.INFO)
    access.suffix = "%Y-%m-%d.log"
    access.setFormatter(
        logging.Formatter(
            '[%(asctime)s] %(levelname)s in %(funcName)s: %(message)s'))

    # 128MB "multi-processable" log file(error)
    error = ConcurrentRotatingFileHandler(log_path + "/error.log",
                                          "a",
                                          128 * 1024 * 1024,
                                          7,
                                          encoding="utf-8")
    error.setLevel(logging.ERROR)
    error.suffix = "%Y-%m-%d.log"
    error.setFormatter(
        logging.Formatter(
            '[%(asctime)s] %(levelname)s in %(funcName)s: %(message)s'))

    app.logger.addHandler(access)
    app.logger.addHandler(error)
    app.logger.setLevel(logging.INFO)
Ejemplo n.º 6
0
def init_logger(app):
    """
    初始化日志(按大小滚动)

    :param app: Flask
    :return: None
    """
    log_maxsize = app.config.get('LOG_MAXSIZE', 100)
    log_backup = app.config.get('LOG_BACKUP', 20)
    log_level = app.config.get('LOG_LEVEL', logging.INFO)
    app_log = app.config.get('LOG_FILE')
    if not app_log:
        app_log = os.path.join(os.path.dirname(app.root_path), 'logs',
                               'app.log')
    fh = ConcurrentRotatingFileHandler(app_log,
                                       maxBytes=log_maxsize * 1024 * 1024,
                                       backupCount=log_backup,
                                       encoding='utf-8',
                                       use_gzip=True)
    fh.setLevel(log_level)
    fh.setFormatter(
        logging.Formatter(
            app.config.get(
                'LOG_FORMAT',
                '%(asctime)s %(levelname)s %(module)s.%(funcName)s: %(message)s'
            )))
    app.logger.addHandler(fh)
Ejemplo n.º 7
0
 def __init__(self, isconsole=True):
     self.logger = logging.getLogger(do_config('log',
                                               'logger_name'))  # 1 定义日志器的名字
     self.logger.setLevel(logging.DEBUG)  # 2 指定日志收集器的日志等级
     file_log_dir = os.path.join(LOG_DIR,
                                 do_config("log",
                                           "log_file_name"))  # 日志文件路径
     # 有bug,PermissionError:[WinError 32] 另一个程序正在使用日志文件
     # 解决方案1:每个模块都实例化一个日志器对象
     # 方案2 安装并导入第三方模块 pip install concurrent-log-handler
     # file_handle = RotatingFileHandler(file_log_dir,
     #                                   maxBytes=do_config('log', 'maxBytes'),
     #                                   backupCount=do_config('log', 'backupCount'), encoding='utf8')  # 3 定义文件handle对象,日志回滚
     file_handle = ConcurrentRotatingFileHandler(
         file_log_dir,
         maxBytes=do_config('log', 'maxBytes'),
         backupCount=do_config('log', 'backupCount'),
         encoding='utf8')  # 3 定义文件handle对象,日志回滚
     file_handle.setLevel(do_config(
         'log', 'file_handle_level'))  # 4 指定文件handle对象的日志等级
     formatter = logging.Formatter(do_config('log',
                                             'formatter'))  # 5 定义日志格式对象
     file_handle.setFormatter(formatter)  # 6 设置文件handle格式
     self.logger.addHandler(file_handle)  # 7 日志收集器与handle对接
     if isinstance(isconsole, bool):
         if isconsole:
             console_handle = logging.StreamHandler()  # 定义控制台handle对象
             console_handle.setLevel(
                 do_config('log',
                           'console_handle_level'))  # 设置控制台handle对象的日志等级
             console_handle.setFormatter(formatter)  # 设置控制台handle格式
             self.logger.addHandler(console_handle)  # 日志收集器与控制台handle对接
     else:
         raise ValueError("isconsole为布尔类型")
Ejemplo n.º 8
0
 def _add_handler(self):
     """
     Add output stream for log, including console output and file output
     :return: None
     """
     # add console handler
     console_handler = logging.StreamHandler()
     console_handler.setLevel(CONFIG.LOGGER_LEVEL)
     console_handler.setFormatter(self._log_format)
     self._logger.addHandler(console_handler)
     # create log path
     if not os.path.exists(CONFIG.LOGGER_PATH):
         os.mkdir(CONFIG.LOGGER_PATH, mode=0x644)
     log_file = os.path.join(CONFIG.LOGGER_PATH, CONFIG.LOGGER_FILE_NAME)
     # add file handler
     file_handler = ConcurrentRotatingFileHandler(
         filename=log_file,
         mode='a',
         maxBytes=CONFIG.LOGGER_BUFFER,
         backupCount=CONFIG.LOGGER_FILE_COUNT,
         encoding="utf-8",
         use_gzip=True)
     file_handler.setLevel(CONFIG.LOGGER_LEVEL)
     file_handler.setFormatter(self._log_format)
     self._logger.addHandler(file_handler)
Ejemplo n.º 9
0
    def __init__(self):
        self.case_logger = logging.getLogger(do_config('log', 'logger_name'))

        self.case_logger.setLevel(do_config('log', 'logger_level'))

        console_output = logging.StreamHandler()
        # file_output = RotatingFileHandler(filename=os.path.join(LOG_DIR, do_config('log', 'logger_name')),
        #                                   maxBytes=do_config('log', 'maxBytes'),
        #                                   backupCount=do_config('log', 'backupCount'),
        #                                   encoding='utf8')

        file_output = ConcurrentRotatingFileHandler(
            filename=os.path.join(LOG_DIR, do_config('log', 'logger_name')),
            maxBytes=do_config('log', 'maxBytes'),
            backupCount=do_config('log', 'backupCount'),
            encoding='utf8')

        console_output.setLevel(do_config('log', 'console_level'))
        file_output.setLevel(do_config('log', 'file_level'))

        simple_formatter = logging.Formatter(
            do_config('log', 'simple_formatter'))
        verbose_formatter = logging.Formatter(
            do_config('log', 'verbose_formatter'))

        console_output.setFormatter(simple_formatter)
        file_output.setFormatter(verbose_formatter)

        self.case_logger.addHandler(console_output)
        self.case_logger.addHandler(file_output)
Ejemplo n.º 10
0
def init_logging(logFilePath = "test.log",level = logging.DEBUG):
    #################################################################################################
    logging.basicConfig(level=level)
    #################################################################################################
    #################################################################################################
    # 定义一个StreamHandler,将INFO级别或更高的日志信息打印到标准错误,并将其添加到当前的日志处理对象#
    Rthandler = LogHandler(logFilePath, maxBytes=10 * 1024 * 1024, backupCount=5)
    Rthandler.setLevel(level)
    formatter = logging.Formatter('%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s')
    Rthandler.setFormatter(formatter)
    logging.root.addHandler(Rthandler)
Ejemplo n.º 11
0
def set_log(app, level):
    Path(__file__).cwd().joinpath("logs").mkdir(parents=True, exist_ok=True)
    file_log_handler = ConcurrentRotatingFileHandler("logs/log",
                                                     maxBytes=5 * 1024 * 1024,
                                                     backupCount=5,
                                                     encoding="UTF-8")
    formatter = logging.Formatter(
        "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s]%(message)s")
    file_log_handler.setFormatter(formatter)
    file_log_handler.setLevel(level)
    app.logger.addHandler(file_log_handler)
Ejemplo n.º 12
0
def setup_config(loglevel):
    if not os.path.exists('logs'):
        os.makedirs('logs')
    # handler = RotatingFileHandler('logs/flask.log',mode='a',maxBytes=1024*1024,backupCount=10,)
    handler = ConcurrentRotatingFileHandler('logs/flask.log',mode='a',maxBytes=1024*1024,backupCount=5,)
    handler.setLevel(loglevel)

    logging_format = logging.Formatter(
        '%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)d - %(message)s')

    handler.setFormatter(logging_format)
    return handler
Ejemplo n.º 13
0
def setup_logger(logger_name, fn=None, level="debug"):
    """
    Create a logger, can write to a separate file.  This will write to
    the logs folder in the mt_metadata directory.

    :param logger_name: name of the logger, typically __name__
    :type logger_name: string
    :param fn: file name to write to, defaults to None
    :type fn: TYPE, optional
    :param level: DESCRIPTION, defaults to "debug"
    :type level: TYPE, optional
    :return: DESCRIPTION
    :rtype: TYPE

    """

    logger = logging.getLogger(logger_name)

    # if there is a file name create file in logs directory
    if fn is not None:
        # need to clear the handlers to make sure there is only
        # one call per logger plus stdout
        if logger.hasHandlers():
            logger.handlers.clear()

        logger.propagate = False
        # want to add a stream handler for any Info print statements as stdOut
        stream_handler = logging.StreamHandler()
        stream_handler.setFormatter(LOG_FORMAT)
        stream_handler.setLevel(LEVEL_DICT["info"])
        logger.addHandler(stream_handler)

        fn = LOG_PATH.joinpath(fn)
        exists = False
        if fn.exists():
            exists = True

        if fn.suffix not in [".log"]:
            fn = Path(fn.parent, f"{fn.stem}.log")

        # fn_handler = logging.FileHandler(fn)
        fn_handler = ConcurrentRotatingFileHandler(fn,
                                                   maxBytes=2**21,
                                                   backupCount=2)
        fn_handler.setFormatter(LOG_FORMAT)
        fn_handler.setLevel(LEVEL_DICT[level.lower()])
        logger.addHandler(fn_handler)
        if not exists:
            logger.info(
                f"Logging file can be found {logger.handlers[-1].baseFilename}"
            )

    return logger
Ejemplo n.º 14
0
 def get_logger(self):
     """在logger中添加日志句柄并返回,如果logger已有句柄,则直接返回"""
     if not self.logger.handlers:
         console_handler = logging.StreamHandler()
         console_handler.setFormatter(self.formatter)
         console_handler.setLevel(self.console_output_level)
         self.logger.addHandler(console_handler)
         file_handler = ConcurrentRotatingFileHandler(filename=self.log_file_name,  maxBytes=self.maxBytes,
                                                  backupCount=self.backup_count, encoding="utf-8")
         file_handler.setFormatter(self.formatter)
         file_handler.setLevel(self.file_output_level)
         self.logger.addHandler(file_handler)
     return self.logger
Ejemplo n.º 15
0
    def set_cur_logger(self, name=None, level=None, save_path='logs/'):
        """
        :param name: logger name in logging's loggerDict
        :param level: if level>DEBUG and save_path, logs will be saved to two files, one only for >=INFO, one for all.
        :param save_path: folder path to save log. If set to None, logs will not be saved.
        :return:
        """
        if name is None:
            name = self.name
        if level is None:
            level = self._level
        # noinspection PyUnresolvedReferences
        if name in logging.Logger.manager.loggerDict:
            _logger = logging.getLogger(name)
        else:
            _logger = logging.getLogger(name)
            _logger.setLevel(logging.DEBUG)

            console = logging.StreamHandler()
            console.setFormatter(COLOR_FORMATTER)
            console.setLevel(logging.DEBUG)
            _logger.addHandler(console)

            if save_path is not None:
                if not os.path.exists(save_path):
                    os.makedirs(save_path)
                fp = os.path.join(save_path, f'{name}.log')
                fh = ConcurrentRotatingFileHandler(os.path.abspath(fp),
                                                   encoding='utf8',
                                                   maxBytes=1024 * 1024,
                                                   backupCount=3)
                # fh = RotatingFileHandler(fp, encoding='utf8', maxBytes=1024 * 1024, backupCount=3)
                fh.setFormatter(LOG_FORMATTER)
                fh.setLevel(level)
                _logger.addHandler(fh)
                self._log_filepath = fp

                if level > logging.DEBUG:
                    fp = os.path.join(save_path, f'{name}.full.log')
                    full_fh = ConcurrentRotatingFileHandler(
                        os.path.abspath(fp),
                        encoding='utf8',
                        maxBytes=1024 * 1024,
                        backupCount=3)
                    full_fh.setFormatter(LOG_FORMATTER)
                    full_fh.setLevel(logging.DEBUG)
                    _logger.addHandler(full_fh)
                    # only store the path of full_log if exist
                    self._log_filepath = fp
        self._logger = _logger
Ejemplo n.º 16
0
    def init_config(self, name=None):
        """
        initiaize config of each log level

        :param name:        prefix name of log files

        For example:
            if name = 'web', the log files will look like 'z_web_xxx.log', xxx are names of each level

        Please notice that i've put a "z" if no name passed into Log, for making log files listed at the end of
        all the codes in project.
        """

        logging.Formatter.converter = self.opti_time
        base_format = logging.Formatter(
            '【%(levelname)s】 %(asctime)s [%(process)d] \n%(message)s',
            datefmt='%Y-%m-%d %H:%M:%S')

        # logging.Formatter.converter = customTime

        if name not in self.logs:

            # create logger
            logger = logging.getLogger(str(self.log_mapping[name]))
            logger.setLevel(self.log_mapping[name])

            # create handler
            log_path = self.log_root + '/' + self.public_name + '_' + name + '.log'
            base_handler = RotatingFileHandler(
                log_path,
                maxBytes=self.log_config[name]['maxBytes'] * 1024 * 1024,
                backupCount=self.log_config[name]['backupCount'])

            # define output format
            base_handler.setFormatter(base_format)
            base_handler.setLevel(self.log_mapping[name])

            # add handler
            logger.addHandler(base_handler)

            # critical level add console handler
            if name == 'critical':
                console_handler = logging.StreamHandler()
                console_handler.setLevel(self.log_mapping[name])
                console_format = logging.Formatter(
                    '【%(levelname)s】 %(asctime)s [%(process)d] \n%(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S')
                console_handler.setFormatter(console_format)
                logger.addHandler(console_handler)
            self.logs.update({name: logger})
Ejemplo n.º 17
0
    def set_log(self, log_name):
        logger = logging.getLogger("{}".format(log_name))
        logger.setLevel(level=logging.DEBUG)
        log_handler = ConcurrentRotatingFileHandler('{}'.format(self.log_file),
                                                    'a',
                                                    maxBytes=0,
                                                    backupCount=10)
        log_handler.setLevel(logging.DEBUG)
        log_format = logging.Formatter(
            '%(asctime)s.%(msecs)03d %(name)s %(process)d %(levelname)s %(message)s',
            datefmt='%Y-%m-%d %H:%M:%S')
        log_handler.setFormatter(log_format)
        logger.addHandler(log_handler)

        return logger
Ejemplo n.º 18
0
    def __init__(self,
                 file_dir,
                 open_file_log=1,
                 file_log_level="DEBUG",
                 open_stream_log=1,
                 stream_log_level=1,
                 simple_mode=True):
        super(MyLogger, self).__init__(self)
        self.level_list = ["DEBUG", "INFO", "WARN", "ERROR"]
        self.sep = "-"
        self.max_bytes = 1024 * 1024 * 30
        self.max_count = 10
        self.simple_mode = simple_mode
        self.open_file_log = open_file_log
        self.open_stream_log = open_stream_log
        self.file_log_level = file_log_level if file_log_level in self.level_list else "DEBUG"
        self.stream_log_level = stream_log_level if stream_log_level in self.level_list else "DEBUG"
        os.makedirs(file_dir, exist_ok=True)
        self.file_dir = file_dir
        format_str = self.get_simple_format(
        ) if simple_mode else self.get_detail_format()
        formatter = logging.Formatter(format_str)
        if self.open_file_log:
            debug_handler = ConcurrentRotatingFileHandler(
                os.path.join(self.file_dir, "run.log"),
                maxBytes=self.max_bytes,
                backupCount=self.max_count,
                encoding="utf8")
            debug_handler.setLevel(self.file_log_level)
            debug_handler.setFormatter(formatter)
            self.addHandler(debug_handler)

            error_handler = ConcurrentRotatingFileHandler(
                os.path.join(self.file_dir, "error.log"),
                maxBytes=self.max_bytes,
                backupCount=self.max_count,
                encoding="utf8")
            error_handler.setLevel(logging.ERROR)
            error_handler.setFormatter(formatter)
            self.addHandler(error_handler)
        if self.open_stream_log:
            ch = logging.StreamHandler(sys.stdout)
            ch.setLevel(self.stream_log_level)
            ch.setFormatter(formatter)
            self.addHandler(ch)
Ejemplo n.º 19
0
def create_logger_t3(file_path='F:/log/log.json'):
    file_util.prepare_path(file_path)
    formatter_str = '{"time":"%(asctime)s","levelname":"%(levelname)s","path":"%(filename)s %(lineno)d","message":%(message)s,"type":"log"}'
    formatter = logging.Formatter(formatter_str)
    logger = logging.getLogger(__name__)
    logger.setLevel(level=logging.INFO)

    rHandler = ConcurrentRotatingFileHandler(file_path, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf_8")
    rHandler.setLevel(logging.INFO)
    rHandler.setFormatter(formatter)

    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    console.setFormatter(formatter)

    logger.addHandler(rHandler)
    logger.addHandler(console)

    return logger
Ejemplo n.º 20
0
def create_logger(file_path='F:/log/log.json'):
    file_util.prepare_path(file_path)

    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)

    # write log to file
    # handler = logging.FileHandler(file_path)
    handler = ConcurrentRotatingFileHandler(file_path, maxBytes=5 * 1024 * 1024, backupCount=3, encoding="utf_8")
    handler.setLevel(logging.INFO)
    handler.setFormatter(JSONFormatter())
    # write log to console
    handler_console = logging.StreamHandler()
    handler_console.setLevel(logging.INFO)
    handler_console.setFormatter(JSONFormatter())

    logger.addHandler(handler)
    logger.addHandler(handler_console)
    return logger
Ejemplo n.º 21
0
def log_handler(log_level=logging.INFO, console_out=False):
    if not os.path.exists(LOG_DIRECTORY):
        os.makedirs(LOG_DIRECTORY)

    error_logfile = LOG_DIRECTORY + 'error.log'
    info_logfile = LOG_DIRECTORY + 'info.log'
    all_logfile = LOG_DIRECTORY + 'all.log'

    fmt = '%(asctime)s - [pid:%(process)d] %(processName)s - %(filename)s[line:%(lineno)d] %(levelname)s - %(message)s'
    formatter = logging.Formatter(fmt)
    error_handler = ConcurrentRotatingFileHandler(error_logfile, "a", 1024 * 1024 * 40, 5, encoding='utf-8')
    error_handler.setFormatter(formatter)
    error_handler.setLevel(logging.ERROR)

    info_handler = ConcurrentRotatingFileHandler(info_logfile, "a", 1024 * 1024 * 40, 5, encoding='utf-8')
    info_handler.setFormatter(formatter)
    info_handler.setLevel(log_level)

    all_handler = ConcurrentRotatingFileHandler(all_logfile, "a", 1024 * 1024 * 40, 5, encoding='utf-8')
    all_handler.setFormatter(formatter)
    all_handler.setLevel(log_level)
    logger = logging.getLogger('logistic_log')

    logger.addHandler(error_handler)
    logger.addHandler(info_handler)

    if console_out is True:
        console_handler = logging.StreamHandler()
        formatter = logging.Formatter(fmt)
        console_handler.setFormatter(formatter)
        logger.addHandler(console_handler)

    logger.setLevel(log_level)
    return logger
Ejemplo n.º 22
0
    def __init__(self):
        self.case_logger = logging.getLogger(do_config("log", "logger_name"))
        self.case_logger.setLevel(do_config("log", "logger_level"))
        console_handle = logging.StreamHandler()
        # file_handle = RotatingFileHandler(filename=do_config("log","log_filename"),maxBytes=do_config("log","max_byte"),backupCount=do_config("log","backcount"),
        # 									   encoding="utf-8")  # filename选择文件名,那么日志文件就在当前文件夹下,为了解决这个问题,我们要导入日志路径
        # file_handle = RotatingFileHandler(os.path.join(LOGS_DIR,do_config("log","log_filename")),
        # 								  maxBytes=do_config("log","max_byte"),
        # 								  backupCount=do_config("log","backcount"),
        # 								  encoding="utf-8")
        file_handle = ConcurrentRotatingFileHandler(
            os.path.join(LOGS_DIR, do_config("log", "log_filename")),
            maxBytes=do_config("log", "max_byte"),
            backupCount=do_config("log", "backcount"),
            encoding="utf-8")

        console_handle.setLevel(do_config("log", "console_level"))
        file_handle.setLevel(do_config("log", "file_level"))
        simple_log = logging.Formatter(do_config("log", "simple_log"))
        verbose_log = logging.Formatter(do_config("log", "verbose_log"))
        console_handle.setFormatter(simple_log)
        file_handle.setFormatter(verbose_log)
        self.case_logger.addHandler(console_handle)
        self.case_logger.addHandler(file_handle)
Ejemplo n.º 23
0
def main():
    _USAGE = '''
    Usage:
      mlagents-learn <trainer-config-path> <main-config-path> [options]
      mlagents-learn --help

    Options:
      --env=<file>               Name of the Unity executable [default: None].
      --curriculum=<directory>   Curriculum json directory for environment [default: None].
      --keep-checkpoints=<n>     How many model checkpoints to keep [default: 5].
      --lesson=<n>               Start learning from this lesson [default: 0].
      --load                     Whether to load the model or randomly initialize [default: False].
      --run-id=<path>            The directory name for model and summary statistics [default: ppo].
      --save-freq=<n>            Frequency at which to save model [default: 50000].
      --train                    Whether to train model, or only run inference [default: False].
      --worker-id=<n>            Number to add to communication port (5005) [default: 0].
      --no-graphics              Whether to run the environment in no-graphics mode [default: False].
      --config=<file>            Configuration of the upper level Learning       
    '''

    logger = logging.getLogger("anha")

    error_handler = ConcurrentRotatingFileHandler("logs/" +
                                                  time.strftime("%Y%m%d") +
                                                  "-ERROR.log")
    error_handler.setLevel(logging.ERROR)
    error_handler.setFormatter(
        logging.Formatter(
            '%(asctime)s - %(filename)s - %(funcName)s - %(message)s'))

    debug_handler = ConcurrentRotatingFileHandler("logs/" +
                                                  time.strftime("%Y%m%d") +
                                                  "-DEBUG.log")
    debug_handler.setLevel(logging.DEBUG)
    debug_handler.setFormatter(
        logging.Formatter(
            '%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(message)s'
        ))

    info_handler = ConcurrentRotatingFileHandler("logs/" +
                                                 time.strftime("%Y%m%d") +
                                                 "-INFO.log")
    info_handler.setLevel(logging.INFO)
    info_handler.setFormatter(
        logging.Formatter(
            '%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(message)s'
        ))

    logger.addHandler(error_handler)
    logger.addHandler(debug_handler)
    logger.addHandler(info_handler)

    options = docopt(_USAGE)
    dispatcher = Dispatcher(options)
    dispatcher.start()
Ejemplo n.º 24
0
import urllib
import re
import json
from django.conf import settings

#os.environ['http_proxy'] = 'http://10.6.209.203:8080'
#os.environ['https_proxy'] = 'https://10.6.209.203:8080'

log_file_path = os.path.join(settings.BASE_DIR, "logs", "debugtalk.log")
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.DEBUG)
rHandler = ConcurrentRotatingFileHandler(log_file_path,
                                         maxBytes=5 * 1024 * 1024,
                                         backupCount=5,
                                         encoding='utf-8')
rHandler.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '[%(asctime)s] [%(name)s:%(lineno)d] [%(module)s:%(funcName)s] [%(levelname)s]- %(message)s'
)
rHandler.setFormatter(formatter)
logger.addHandler(rHandler)


def executeES(es, *eslines):
    logger.info("execute es")
    _es = []
    _es.append(es)
    es = Elasticsearch(_es)
    _params = {"refresh": "true"}
    for esline in eslines:
        logger.info("1==================")
Ejemplo n.º 25
0
def createApp(theServers=None, theWorkers=None, localEngine=None):
    logHandler = ConcurrentRotatingFileHandler(os.path.join(
        base_dir, 'server.log'),
                                               "a",
                                               maxBytes=10000,
                                               backupCount=1)
    logHandler.setLevel(logging.DEBUG)
    app = Flask(__name__,
                static_folder=os.path.join(base_dir, 'static'),
                template_folder=os.path.join(base_dir, 'templates'))
    app.register_blueprint(daqbroker, url_prefix='/daqbroker')
    app.register_blueprint(instrumentsBP, url_prefix='/instruments')
    app.register_blueprint(monitoringBP, url_prefix='/monitoring')
    app.register_blueprint(dataBP, url_prefix='/data')
    app.register_blueprint(runsBP, url_prefix='/runs')
    app.register_blueprint(adminBP, url_prefix='/admin')
    # app.register_blueprint(daqbroker,url_prefix='/daqbroker')
    app.logger.setLevel(logging.DEBUG)
    app.logger.addHandler(logHandler)
    app.secret_key = os.urandom(24)
    app.config['servers'] = theServers
    app.config['workers'] = theWorkers
    app.config['localEngine'] = localEngine

    login_manager = LoginManager()
    login_manager.init_app(app)
    login_manager.login_view = "login"

    @login_manager.user_loader
    def load_user(user_id):
        """ Something """
        found = False
        for user in usersArray:
            user.is_active()
            if user_id == user.id:
                print("I AM HERE", user_id, usersArray)
                if request.method == 'POST':
                    requestCheck = request.form
                else:
                    requestCheck = request.args
                if 'dbname' in requestCheck:
                    database = requestCheck["dbname"]
                elif 'database' in requestCheck:
                    database = requestCheck["database"]
                elif user.database:
                    database = user.database
                else:
                    database = None
                if 'username' in requestCheck:
                    user.username = requestCheck['username']
                elif 'user' in requestCheck:
                    user.username = requestCheck['user']
                if 'password' in requestCheck:
                    user.password = requestCheck['password']
                elif 'pass' in requestCheck:
                    user.password = requestCheck['pass']
                if 'serverName' in requestCheck:
                    user.server = requestCheck['serverName']
                elif 'server' in requestCheck:
                    user.server = requestCheck['server']
                elif 'newServerName' in requestCheck:
                    user.server = requestCheck['newServerName']
                if 'serverEngine' in requestCheck:
                    user.engine = requestCheck['serverEngine']
                elif 'engine' in requestCheck:
                    user.engine = requestCheck['engine']
                elif 'newServerEngine' in requestCheck:
                    user.engine = requestCheck['newServerEngine']
                newURI = user.engine + "://" + user.username + ":" + user.password + "@" + user.server
                if database:
                    newURI = user.engine + "://" + user.username + ":" + user.password + "@" + user.server + '/daqbro_' + database
                if not (newURI == user.uri):
                    if database and (not database == user.database):
                        user.database = database
                        user.uri = user.engine + "://" + user.username + ":" + user.password + "@" + user.server + '/daqbro_' + user.database
                        user.uriHome = user.engine + "://" + user.username + ":" + user.password + "@" + user.server
                    user.updateDB()
                if user.database:
                    daqbrokerDatabase.daqbroker_database.metadata.reflect(
                        user.engineObj, extend_existing=True)
                Session = sessionmaker(bind=user.engineObjSettings)
                session = Session()
                if not database_exists(user.uriSettings):
                    create_database(user.uriSettings)
                    daqbrokerSettings.daqbroker_settings.metadata.create_all(
                        user.engineObjSettings)
                    Session = sessionmaker(bind=user.engineObjSettings)
                    session = Session()
                    thisUser = daqbrokerSettings.users(username=user.username,
                                                       type=1)
                    session.add(thisUser)
                    session.commit()
                else:
                    thisUser = session.query(
                        daqbrokerSettings.users).filter_by(
                            username=user.username).first()
                if thisUser:
                    if thisUser.type:
                        user.type = thisUser.type
                    else:
                        user.type = 1
                session.close()
                return user
        if not found:
            None

    @login_manager.unauthorized_handler
    def unauthorized():
        session["CURRENT_URL"] = request.path
        return redirect(url_for('login'), code=307)

    # @app.after_request
    # def apply_caching(response):
    #	session.pop('_csrf_token', None)
    #	response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
    #	return response

    @app.route("/login", methods=["GET", "POST"])
    def login():
        """ Start a session or update an existing session with a login with a given user to a specific database. A database server is defined by an address and a database engine. Only one user is allowed to be connected to a single database server at any time. This request is always run for session start automatically for any request. For single command line requests, these parameters MUST be supplied apart from the target request parameters unless the user uses already existing sessions, which is unusual. It is also required to run to connect to a new server, and the user does not have credentials for said server

        .. :quickref: Log to server; Login to a specified server and engine

        :param: username: (String) username
        :param: user: (String) alias of  **username**
        :param: password: (String) user password
        :param: pass: (String) alias of  **password**
        :param: serverName: (String) server address to connect to. Defaults to 'localhost'
        :param: server: (String) alias of  **serverName**
        :param: serverEngine: (String) server address to connect to
        :param: engine: (String) alias of  **serverEngine**
        :param: database: (String - optional) DAQBroker database to point server session
        :param: engine: (String - optional) alias of  **database**


        """
        browserList = [
            'aol', 'ask', 'baidu', 'bing', 'camino', 'chrome', 'firefox',
            'galeon', 'google', 'kmeleon', 'konqueror', 'links', 'lynx',
            'mozilla', 'msie', 'msn', 'netscape', 'opera', 'safari',
            'seamonkey', 'webkit', 'yahoo'
        ]
        if request.method == 'POST':
            if request.is_json:
                requestCheck = request.get_json()
            else:
                requestCheck = request.form
            allGood = True
            loginCreds = {}
            if 'username' in requestCheck:
                loginCreds['username'] = requestCheck['username']
            elif 'user' in requestCheck:
                loginCreds['username'] = requestCheck['user']
            else:
                allGood = False
            if 'password' in requestCheck:
                loginCreds['password'] = requestCheck['password']
            elif 'pass' in requestCheck:
                loginCreds['password'] = requestCheck['pass']
            else:
                allGood = False
            if 'serverName' in requestCheck:
                loginCreds['server'] = requestCheck['serverName']
            elif 'server' in requestCheck:
                loginCreds['server'] = requestCheck['server']
            elif 'newServerName' in requestCheck:
                loginCreds['server'] = requestCheck['newServerName']
            else:
                # allGood=False
                loginCreds['server'] = "localhost"
            if 'serverEngine' in requestCheck:
                loginCreds['engine'] = requestCheck['serverEngine']
            elif 'engine' in requestCheck:
                loginCreds['engine'] = requestCheck['engine']
            elif 'newServerEngine' in requestCheck:
                loginCreds['engine'] = requestCheck['newServerEngine']
            else:
                # allGood=False
                loginCreds['engine'] = 'mysql'
            if 'database' in requestCheck:
                loginCreds['database'] = requestCheck['database']
            if 'dbname' in requestCheck:
                loginCreds['database'] = requestCheck['dbname']
            if allGood:
                if 'database' in loginCreds:
                    user = User(loginCreds['username'], loginCreds['password'],
                                loginCreds['server'], loginCreds['engine'],
                                loginCreds['database'])
                else:
                    user = User(loginCreds['username'], loginCreds['password'],
                                loginCreds['server'], loginCreds['engine'])
                if user.id:
                    usersArray.append(user)
                    login_user(user)
                    scoped = daqbrokerSettings.getScoped()
                    localSession = scoped()
                    newServer = daqbrokerSettings.servers(
                        server=loginCreds["server"],
                        engine=loginCreds["engine"])
                    localSession.merge(newServer)
                    localSession.commit()
                else:
                    if request.user_agent.browser not in browserList:
                        raise InvalidUsage('Error connecting to database',
                                           status_code=500)
                    else:
                        return render_template('login.html')
                if "CURRENT_URL" in session:
                    next = session["CURRENT_URL"]
                    session.pop("CURRENT_URL", None)
                else:
                    next = None
                if not request.user_agent.browser:
                    return redirect(next or url_for('daqbroker.main'),
                                    code=307)
                else:
                    return redirect(next or url_for('daqbroker.main'))
            else:
                if request.user_agent.browser not in browserList:
                    raise InvalidUsage('Error connecting to database',
                                       status_code=500)
                else:
                    return render_template('login.html')
        else:
            return render_template('login.html')

    @app.route("/logout")
    def logout():
        """ End the user login session by loging out of all currnetly connected database servers

        .. :quickref: End session; End previously logged in session

        """
        message = Markup(
            '<p class="loggedOut"><span style="color:#28a745">Successfully logged out</span></p>'
        )
        flash(message)
        logout_user()
        return redirect(url_for('login'))

    app.jinja_env.globals['csrf_token'] = generate_csrf_token
    app.jinja_env.globals['daqbroker_server'] = getServer
    app.jinja_env.globals['daqbroker_engine'] = getEngine
    app.jinja_env.globals['daqbroker_usertype'] = getusertype

    return app
Ejemplo n.º 26
0
import sys
from utils.LayerDialog import LayerDialog
from utils.OptimizerDialog import Ops
import copy
from utils.optimizerInit import ops, loss
import numpy as np

#日志
LOG_PATH = './static/logs/log_offline.txt'
# logging.basicConfig(filename=LOG_PATH,level=logging.INFO,filemode='a',format='%(asctime)s %(message)s',datefmt='%m-%d-%Y %I:%M:%S %p')
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
rHandler = ConcurrentRotatingFileHandler(filename=LOG_PATH,
                                         maxBytes=5 * 1024 * 1024,
                                         backupCount=5)
rHandler.setLevel(logging.INFO)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
rHandler.setFormatter(formatter)

logger.addHandler(rHandler)


class MainWindow(QMainWindow):
    def __init__(self):
        super(MainWindow, self).__init__()
        self.setWindowTitle('Visual Machine Learning')
        self.resize(640, 480)
        self.codeReview = QTextEdit(self)
        self.codeReview.setPlaceholderText("# this shows the code")
Ejemplo n.º 27
0
# 日志记录
def make_dir(make_dir_path):
    path = make_dir_path.strip()
    if not os.path.exists(path):
        os.makedirs(path)
    return path


log_dir_name = "logs"
log_file_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) + os.sep + log_dir_name
make_dir(log_file_folder)
logging.basicConfig(level=logging.DEBUG)
# 安照日志文件大小切割,超过1M时切割,最多保留10个日志文件
fileHandler = ConcurrentRotatingFileHandler("logs/leo-api-auto.log", maxBytes=1024 * 1024, backupCount=10)
fileHandler.setLevel('DEBUG')
logging_format = logging.Formatter(
    "[%(asctime)s][%(levelname)s][%(filename)s:%(funcName)s:%(lineno)s] - %(message)s")
fileHandler.setFormatter(logging_format)
app.logger.addHandler(fileHandler)

app_config = Config()

app.config['SECRET_KEY'] = app_config.get_secret_key()
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7)  # 设置session的保存时间=7days
# 解决中文乱码问题
app.config['JSON_AS_ASCII'] = False
# 禁止jsonify 按照key 自动排序
app.config['JSON_SORT_KEYS'] = False

login_manager = LoginManager()
Ejemplo n.º 28
0
class Log(object):
    """
        operation log of the system
    """

    def __init__(self, name=__name__, path=None):
        self.__current_rotating_file_handler = None

        self.__path = os.path.join(
            configuration.LOG_PATH, "log_info.log")
        if path:
            self.__path = path

        if not os.path.exists(self.__path):
            try:
                os.makedirs(os.path.split(self.__path)[0])
            except FileExistsError:
                pathlib.Path(self.__path).touch(mode=0o644)
        self.__max_bytes = configuration.MAX_BYTES
        self.__backup_count = configuration.BACKUP_COUNT
        self.__level = configuration.LOG_LEVEL
        self.__logger = logging.getLogger(name)
        self.__logger.setLevel(self.__level)

    def __init_handler(self):
        self.__current_rotating_file_handler = ConcurrentRotatingFileHandler(filename=self.__path,
                                                                             mode='a',
                                                                             maxBytes=self.__max_bytes,
                                                                             backupCount=self.__backup_count,
                                                                             encoding="utf-8",
                                                                             use_gzip=True)
        self.__set_formatter()
        self.__set_handler()

    def __set_formatter(self):
        formatter = logging.Formatter('%(asctime)s-%(name)s-%(filename)s-[line:%(lineno)d]'
                                      '-%(levelname)s-[ log details ]: %(message)s',
                                      datefmt='%a, %d %b %Y %H:%M:%S')
        self.__current_rotating_file_handler.setFormatter(formatter)

    def __set_handler(self):
        self.__current_rotating_file_handler.setLevel(self.__level)
        self.__logger.addHandler(self.__current_rotating_file_handler)

    @property
    def logger(self):
        """
            Gets the logger property
        """
        if not self.__current_rotating_file_handler:
            self.__init_handler()
        return self.__logger

    @property
    def file_handler(self):
        """
        The file handle to the log
        """
        if not self.__current_rotating_file_handler:
            self.__init_handler()
        return self.__current_rotating_file_handler