def setUp(self): rm_file('/tmp/t.out') # root logger logutil.make_logger(base_dir='/tmp', log_fn='t.out', level=logging.DEBUG, fmt='message')
def setUp(self): rm_file(this_base + '/t.out') # root logger logutil.make_logger(base_dir=this_base, log_fn='t.out', level=logging.DEBUG, fmt='message')
def log(self): logger = logutil.make_logger(base_dir=this_base, log_name='test_log') cnt = 1 while True: logger.info('info') logger.warn('warn') logger.error('error') time.sleep(0.001) cnt += 1 if cnt > 100: break
def test_set_logger_level(self): cases = ( (None, 'debug1\ndebug2'), ('1_prefix', 'debug1\ndebug2\ndebug2'), (('1_prefix', '2_prefix'), 'debug1\ndebug2'), (('not_exist', ), 'debug1\ndebug2\ndebug1\ndebug2'), (('not_exist', '1_prefix'), 'debug1\ndebug2\ndebug2'), ) for inp, expected in cases: rm_file('/tmp/ss') logger1 = logutil.make_logger(base_dir='/tmp', log_name='1_prefix_1', log_fn='ss', level='DEBUG', fmt='%(message)s', datefmt='%H%M%S') logger2 = logutil.make_logger(base_dir='/tmp', log_name='2_prefix_1', log_fn='ss', level='DEBUG', fmt='%(message)s', datefmt='%H%M%S') logger1.debug('debug1') logger2.debug('debug2') logutil.set_logger_level(level='INFO', name_prefixes=inp) logger1.debug('debug1') logger2.debug('debug2') content = read_file('/tmp/ss') self.assertEqual(expected, content.strip())
def test_set_logger_level(self): cases = ( (None, 'debug1\ndebug2'), ('1_prefix', 'debug1\ndebug2\ndebug2'), (('1_prefix', '2_prefix'), 'debug1\ndebug2'), (('not_exist',), 'debug1\ndebug2\ndebug1\ndebug2'), (('not_exist', '1_prefix'), 'debug1\ndebug2\ndebug2'), ) for inp, expected in cases: rm_file(this_base + '/ss') logger1 = logutil.make_logger(base_dir=this_base, log_name='1_prefix_1', log_fn='ss', level='DEBUG', fmt='%(message)s', datefmt='%H%M%S') logger2 = logutil.make_logger(base_dir=this_base, log_name='2_prefix_1', log_fn='ss', level='DEBUG', fmt='%(message)s', datefmt='%H%M%S') logger1.debug('debug1') logger2.debug('debug2') logutil.set_logger_level(level='INFO', name_prefixes=inp) logger1.debug('debug1') logger2.debug('debug2') content = read_file(this_base + '/ss') self.assertEqual(expected, content.strip())
def test_make_logger(self): rm_file('/tmp/tt') l = logutil.make_logger(base_dir='/tmp', log_name='m', log_fn='tt', level='INFO', fmt='%(message)s', datefmt='%H%M%S') l.debug('debug') l.info('info') cont = read_file('/tmp/tt').strip() self.assertEqual(cont, 'info')
def test_make_logger(self): rm_file(this_base + '/tt') l = logutil.make_logger(base_dir=this_base, log_name='m', log_fn='tt', level='INFO', fmt='%(message)s', datefmt='%H%M%S' ) l.debug('debug') l.info('info') cont = read_file(this_base + '/tt').strip() self.assertEqual(cont, 'info')
def _init(): if _glb['unittest_logger'] is not None: return # test_logutil might require this module and logutil is still under test! try: from pykit import logutil logger = logutil.make_logger('/tmp', log_name='unittest', level='DEBUG', fmt=('[%(asctime)s' ' %(_fn)s:%(_ln)d' ' %(levelname)s]' ' %(message)s')) logger.addFilter(ContextFilter()) _glb['unittest_logger'] = logger except Exception as e: print repr(e) + ' while init root logger'
def test_make_file_handler(self): rm_file('/tmp/handler_change') l = logutil.make_logger(base_dir='/tmp', log_name='h', log_fn='dd', level='INFO', fmt='%(message)s', datefmt='%H%M%S') l.handlers = [] handler = logutil.make_file_handler(base_dir='/tmp', log_fn='handler_change', fmt='%(message)s', datefmt='%H%M%S') l.addHandler(handler) l.debug('debug') l.info('info') cont = read_file('/tmp/handler_change').strip() self.assertEqual(cont, 'info')
def test_concurrent_write_and_remove(self): l = logutil.make_logger(base_dir=this_base, log_name='rolling', log_fn='rolling.out', level=logging.DEBUG, fmt='message') n = 10240 sess = {'running': True} def _remove(): while sess['running']: rm_file(this_base + '/rolling.out') th = threading.Thread(target=_remove) th.daemon = True th.start() for ii in range(n): l.debug('123') sess['running'] = False th.join()
def test_make_file_handler(self): rm_file(this_base + '/handler_change') l = logutil.make_logger(base_dir=this_base, log_name='h', log_fn='dd', level='INFO', fmt='%(message)s', datefmt='%H%M%S' ) l.handlers = [] handler = logutil.make_file_handler(base_dir=this_base, log_fn='handler_change', fmt='%(message)s', datefmt='%H%M%S') l.addHandler(handler) l.debug('debug') l.info('info') cont = read_file(this_base + '/handler_change').strip() self.assertEqual(cont, 'info')
def _init(): if _glb['unittest_logger'] is not None: return # test_logutil might require this module and logutil is still under test! try: from pykit import logutil logger = logutil.make_logger( '/tmp', log_name='unittest', level='DEBUG', fmt=('[%(asctime)s' ' %(_fn)s:%(_ln)d' ' %(levelname)s]' ' %(message)s' ) ) logger.addFilter(ContextFilter()) _glb['unittest_logger'] = logger except Exception as e: print repr(e) + ' while init root logger'
def test_concurrent_write_and_remove(self): l = logutil.make_logger(base_dir='/tmp', log_name='rolling', log_fn='rolling.out', level=logging.DEBUG, fmt='message') n = 10240 sess = {'running': True} def _remove(): while sess['running']: rm_file('/tmp/rolling.out') th = threading.Thread(target=_remove) th.daemon = True th.start() for ii in range(n): l.debug('123') sess['running'] = False th.join()
report_sess['stop'] = True report_th.join() except KeyboardInterrupt: logger.exception('get KeyboardInterrupt') sys.exit(0) finally: report_state() dump_state() if __name__ == "__main__": logutil.make_logger(base_dir='/var/log/opstool', level='INFO') opts, args = getopt.getopt(sys.argv[1:], '', [ 'conf=', ]) opts = dict(opts) if opts.get('--conf') is None: conf_path = '../conf/sync_cross_cluster.yaml' else: conf_path = opts['--conf'] cnf = get_conf(conf_path) src_client = get_boto_client(cnf['SRC_ENDPOINT'], cnf['SRC_ACCESS_KEY'], cnf['SRC_SECRET_KEY'])
def rm_file(fn): try: os.unlink(fn) except OSError as e: if e.errno == errno.ENOENT: pass else: raise rm_file('/tmp/handler_change') l = logutil.make_logger(base_dir='/tmp', log_name='h', log_fn='dd', level='INFO', fmt='%(message)s', datefmt='%H%M%S' ) l.handlers = [] handler = logutil.make_file_handler(log_fn='handler_change', fmt='%(message)s', datefmt='%H%M%S') l.addHandler(handler) l.debug('debug') l.info('info') cont = read_file('/tmp/handler_change').strip() print cont
def rm_file(fn): try: os.unlink(fn) except OSError as e: if e.errno == errno.ENOENT: pass else: raise rm_file('/tmp/handler_change') l = logutil.make_logger(base_dir='/tmp', log_name='h', log_fn='dd', level='INFO', fmt='%(message)s', datefmt='%H%M%S') l.handlers = [] handler = logutil.make_file_handler(log_fn='handler_change', fmt='%(message)s', datefmt='%H%M%S') l.addHandler(handler) l.debug('debug') l.info('info') cont = read_file('/tmp/handler_change').strip() print cont
def test_basic(self): logger = logutil.make_logger(base_dir=this_base, log_name='test_log') def log(): start_time = time.time() while True: logger.info('info') logger.warn('warn') logger.error('error') if time.time() > start_time + 2.5: break time.sleep(0.01) log_th = threadutil.start_daemon_thread(log) log_entries = [] def get_level(log_str): for k in ('INFO', 'WARNING', 'ERROR'): if k in log_str: return k.lower() else: return 'unknown' def send_log(log_entry): log_entries.append(log_entry) kwargs = { 'node_id': '123abc', 'node_ip': '1.2.3.4', 'send_log': send_log, 'conf': { 'my_test_log': { 'file_path': os.path.join(this_base, 'test_log.out'), 'level': ['error'], 'get_level': get_level, 'is_first_line': is_first_line, 'parse': parse, }, }, } threadutil.start_daemon_thread(collector.run, kwargs=kwargs) log_th.join() time.sleep(2) self.assertEqual(3, len(log_entries)) dd(log_entries) dd(log_entries[0]['count']) dd(log_entries[1]['count']) dd(log_entries[2]['count']) self.assertAlmostEqual(100, log_entries[1]['count'], delta=30) self.assertEqual('error', log_entries[0]['level']) self.assertEqual('my_test_log', log_entries[0]['log_name']) self.assertEqual('test_log.out', log_entries[0]['log_file'])
import logging from pykit import logutil logger = logutil.make_logger('/tmp', log_fn='stdlog', level='INFO', fmt='message') logutil.add_std_handler(logger, 'stdout', fmt='message', level=logging.ERROR) logger.debug('debug') logger.info('stdlog') logger.error('error')
cli.request('/') reponse_msg = cli.headers except (socket.timeout, ssl.SSLError) as e: logger.warn('Socket timeout, ' + 'While access the {url}'.format(url=url)) return -1 except Exception as e: logger.exception(repr(e) + ' While access the {url}'.format(url=url)) return -1 return response_code, reponse_msg if __name__ == '__main__': logutil.make_logger(base_dir='/var/log/zabbix', level='INFO') url = sys.argv[1] method = sys.argv[2] expect_response_code = sys.argv[3] response = visit_url(url, method) if isinstance(response, tuple): response_code, msg = response if response_code == int(expect_response_code): print 0 else: print -1 else: print -1
import logging import json from pykit import humannum from pykit import jobq from pykit import logutil from pykit import utfjson import mysqlbackup logger = logging.getLogger(__name__) if __name__ == "__main__": rootlogger = logutil.make_logger(base_dir='/tmp', log_fn=logutil.get_root_log_fn(), level=logging.DEBUG) logutil.add_std_handler(rootlogger, stream=sys.stdout) rootlogger.handlers[1].setLevel(logging.WARN) parser = argparse.ArgumentParser( description='run commands for one or more ports concurrenty') parser.add_argument('--conf-base', type=str, required=False, help='base path to config file') parser.add_argument('--conf-fn', type=str, required=False, help='conf file name for each port')
def main(): # config root logger logging.basicConfig(stream=sys.stdout, level=logging.WARNING) # config logger for this module logutil.make_logger(base_dir='/tmp', log_name=__name__, log_fn=logutil.get_root_log_fn(), level=logging.DEBUG) args = load_cli_args() conf = load_conf(args) if args.verbose is not None: lvls = { 1: logging.DEBUG, } level = lvls.get(args.verbose, logging.DEBUG) logging.basicConfig(stream=sys.stdout, level=level) mb = MysqlBackup(conf) # silently quit if condition 'when' is not satisfied when = args.when if when is not None: if when == 'no-data-dir': if os.path.exists(mb.render("{mysql_data_dir}")): return elif when == 'stopped': if mb.is_instance_alive(): return else: raise ValueError('invalid argument "when": {w}'.format(w=when)) # run command cmd = args.cmd[0] try: if cmd == 'backup': mb.backup() elif cmd == 'restore': mb.restore_from_backup() elif cmd == 'catchup_binlog': mb.assert_instance_is_down() mb.apply_remote_binlog() elif cmd == 'setup_replication': mb.setup_replication() else: raise ValueError('invalid command: ' + str(cmd)) except (MysqlBackupError, MysqlRestoreError) as e: print e.__class__.__name__ for i in range(4): print e[i] sys.exit(1)
def read_file(fn): with open(fn, 'r') as f: return f.read() def rm_file(fn): try: os.unlink(fn) except OSError as e: if e.errno == errno.ENOENT: pass else: raise rm_file('/tmp/tt') l = logutil.make_logger(base_dir=None, log_name='m', log_fn='tt', level='INFO', fmt='%(message)s', datefmt='%H%M%S' ) l.debug('debug') l.info('info') cont = read_file('/tmp/tt').strip() print cont