def process_logs(config):
    """
    扫描日志目录下所有日志文件,将非今日产生的文件非压缩文件进行压缩
    计算日志目录中所有日志文件大小,所超过设置的最大日志量,依次删除老日志文件
    直到总日志量小于等于设置的最大日志量
    :param config: 配置参数
    :return: 
    """
    for filename in os.listdir(LOG_DIR):
        filepath = os.path.join(LOG_DIR, filename)
        if os.path.isfile(filepath) and not filename.endswith(
                '.zip') and not filename.endswith(
                    '.lock') and filepath != make_log_filename(config):
            zipfilepath = filepath + '.zip'
            azip = zipfile.ZipFile(zipfilepath, 'w')
            azip.write(filepath, compress_type=ZIP_COMPRESSION_MODE)
            azip.close()

            os.remove(filepath)

    total_size = get_dir_total_size(LOG_DIR)
    max_size = unitstr_to_bytes(config.log.maxsize)
    if total_size > max_size:

        files = sorted(
            os.listdir(LOG_DIR),
            key=lambda x: os.path.getmtime(os.path.join(LOG_DIR, x)))
        for file in files:
            filepath = os.path.join(LOG_DIR, file)
            size = os.path.getsize(filepath)
            total_size -= size
            os.remove(filepath)
            if total_size <= max_size:
                break
 def create_size_file(self, filename, size):
     size = unitstr_to_bytes(size)
     full_path = os.path.join(self.rootdir, filename)
     with open(full_path, 'w') as fp:
         fp.seek(size - 1)
         fp.write('a')
         fp.close()
     return full_path
def init_logger(config):
    """
    初始化日志句柄(写文件)
    需要多进程安全的ConcurrentRotatingFileHandler
    当日志级别大于等于Warning时会同时打印到屏幕和写文件
    :param config: 配置参数
    :return: 
    """
    # console_handler = logging.StreamHandler(sys.__stdout__)
    console_handler = logging.StreamHandler(sys.__stderr__)
    console_handler.level = logging.ERROR
    # console_handler.level = logging.DEBUG
    console_logger = logging.getLogger('obscmd')
    console_logger.addHandler(console_handler)

    format = '%(asctime)s - %(levelname)s - %(process)d - %(thread)d - %(filename)s[line:%(lineno)d] - %(message)s'
    logfile = make_log_filename(config)
    handler = ConcurrentRotatingFileHandler(
        logfile,
        mode='a',
        maxBytes=unitstr_to_bytes(config.log.maxbytes),
        backupCount=int(config.log.backupcount),
        encoding=None,
        delay=0)
    handler.setFormatter(logging.Formatter(format))
    logger = logging.getLogger("obscmd.file")
    logger.propagate = False
    logger.addHandler(handler)
    # logger.setLevel(logging.DEBUG)

    format = '%(message)s'
    handler1 = ConcurrentRotatingFileHandler(
        logfile,
        mode='a',
        maxBytes=unitstr_to_bytes(config.log.maxbytes),
        backupCount=int(config.log.backupcount),
        encoding=None,
        delay=0)
    handler1.setFormatter(logging.Formatter(format))
    logger1 = logging.getLogger("print")
    logger1.propagate = True
    logger1.addHandler(handler1)

    logger.setLevel(LOG_LEVEL[config.log.level])
    return logger, logfile
Exemple #4
0
 def print_run_time(self, size, seconds, tasknum, cmd):
     total = unitstr_to_bytes(size)
     flowspeed = bytes_to_unitstr(total / seconds)
     msg = '%s/s\t\d tasks\t%s' % (flowspeed, tasknum, cmd)
     print(msg)
 def test_bytes_more_gb(self):
     self.assertEqual(unitstr_to_bytes('3.6G'), int(3.6 * GB))
 def test_bytes_mb_lowercase(self):
     self.assertEqual(unitstr_to_bytes('100M'), int(100 * MB))
 def test_bytes_mb_lowercase(self):
     self.assertEqual(unitstr_to_bytes('23.7m'), int(23.7 * MB))
 def test_bytes_kb_lowercase(self):
     self.assertEqual(unitstr_to_bytes('1.1k'), int(1.1 * KB))
 def test_bytes_kb(self):
     self.assertEqual(unitstr_to_bytes('1K'), int(1024 * 1.0))
 def test_bytes_less_kb(self):
     self.assertEqual(unitstr_to_bytes('1023B'), 1023)
 def test_1b(self):
     self.assertEqual(unitstr_to_bytes('1B'), 1)
 def test_no_unit(self):
     self.assertEqual(unitstr_to_bytes('123133.3'), 123133)