def test_logger_caching(): ''' Test that loggers are properly cached ''' logwood.basic_config(handlers=[], level=logwood.DEBUG) logger = logwood.get_logger('A') logger2 = logwood.get_logger('A') # Those two loggers should be completely the same assert logger is logger2 # Store logger id so we can compare it later logger_id = id(logger) # Try to send something logger.info('Test message') logger2.info('Test message') # Then delete both loggers del logger del logger2 # Collect all removed instances gc.collect() logger3 = logwood.get_logger('A') assert logger_id != id(logger3)
def no_handler(): ''' Logwood with no handlers. ''' logwood.basic_config(handlers=[]) logger = logwood.get_logger(__name__) return logger
def main(self, args=None): """ Launch the commandline interface. :param args: Optionally pass arguments. If not given, the arguments passed to the program will be parsed. """ self._configure_parser() argcomplete.autocomplete(self.parser) args = self.parser.parse_args(args) logwood.basic_config( format='%(timestamp).6f %(level)-5s %(name)s: %(message)s', level=self._get_loglevel(args), ) if not args.command: self.parser.print_usage() sys.exit(1) else: command = self._commands[args.command] if command.needs_app: app = Application() self.register_application_components(args, app) command.__call__(args, app) else: command.__call__(args)
def syslog_handler(): ''' Logwood with syslog handler. ''' logwood.testing.reset_state() logwood.basic_config(handlers=[SysLogHandler()]) logger = logwood.get_logger(__name__) return logger
def std_err_handler(): ''' Logwood with stderr handler. ''' logwood.testing.reset_state() logwood.basic_config(handlers=[ColoredStderrHandler(logwood.DEBUG)]) logger = logwood.get_logger(__name__) return logger
def file_handler(): ''' Logwood with FileHandler. ''' logwood.testing.reset_state() logwood.basic_config(handlers=[FileHandler(filename='example.log')]) logger = logwood.get_logger(__name__) return logger
def syslog_threaded_handler(): ''' Logwood with syslog handler run via ThreadedHandler. ''' logwood.testing.reset_state() logwood.basic_config( handlers=[ThreadedHandler(underlying_handler=SysLogHandler())]) logger = logwood.get_logger(__name__) return logger
def test_emit(syslog_mock): ''' emit on libsyslog will call syslog.syslog ''' logwood.basic_config(handlers=[logwood.handlers.syslog.SysLogLibHandler()]) logger = logwood.get_logger('Test') logger.warning('Warning') logwood.shutdown() assert syslog_mock.called
def test_SysLogHandler_emit(): ''' Test that the refactored syslog handler's emit is working. ''' with unittest.mock.patch('socket.socket'): handler = logwood.handlers.logging.SysLogHandler() logwood.basic_config(handlers=[handler]) logger = logwood.get_logger('Test') logger.warning('Warning') assert handler.socket.sendto.called
def test_chunk_long_message(socket): message = '1234567890' * 100 handler = ChunkedSysLogHandler(address='/not/existing', chunk_size=10) logwood.basic_config(format='%(message)s', handlers=[handler]) logger = logwood.get_logger('Test') logger.error(message) # We have chunk size 10 so emit should be called 100 times assert handler.socket.send.call_count == 100
def configure_and_reset_logwood(logwood_handler_mock): ''' Make sure Logwood is always configured, otherwise no loggers can be instantiated. This fixture automatically runs around each testcase in pytests that import logwood. All messages are sent to stderr and saved to `logwood_handler_mock`. ''' logwood.basic_config(handlers=[ logwood.handlers.stderr.ColoredStderrHandler(), logwood_handler_mock ]) yield reset_state()
def test_chunk_short_message(socket): message = 'Short message' handler = ChunkedSysLogHandler(address='/not/existing', chunk_size=len(message)) logwood.basic_config(format='%(message)s', handlers=[handler]) logger = logwood.get_logger('Test') logger.error(message) # Do not call parent emit more then once for a short message assert handler.socket.send.call_count == 1 # It was called just once (first zero) and we read the record (second zero) assert message in str(handler.socket.send.call_args[0][0])
def test_basic_config(): ''' Test basic logging setup. ''' handler1, handler2 = unittest.mock.Mock(), unittest.mock.Mock() msg_format = 'MSG: %(message)s' logwood.basic_config(format=msg_format, level=logwood.DEBUG, handlers=[handler1, handler2]) assert handler1 in logwood.global_config.default_handlers assert handler2 in logwood.global_config.default_handlers assert logwood.global_config.default_format == msg_format assert logwood.global_config.default_log_level == logwood.DEBUG
def test_underlying_handler_is_called(): ''' Assert that underlying handler got called in a thread. ''' underlying_handler = unittest.mock.Mock() handler = ThreadedHandler(underlying_handler = underlying_handler) logwood.basic_config(level = logwood.DEBUG, handlers = [handler]) logger = logwood.get_logger('Test') logger.error('Error message') # After the message we can safely close all handlers handler.close() # Wait for a while for the scheduler to run our thread time.sleep(0.1) assert underlying_handler.emit.called record = underlying_handler.emit.call_args[0][0] assert record['message'] == 'Error message' assert underlying_handler.close.called
def __init__(self): logwood.basic_config() self.hub = aiopubsub.Hub() self.publisher = aiopubsub.Publisher(self.hub, prefix=aiopubsub.Key('peer')) self.subscriber_epoch = aiopubsub.Subscriber(self.hub, 'epoch_subscr') self.subscriber_connection = aiopubsub.Subscriber( self.hub, 'conn_subscr') self.subscriber_pom = aiopubsub.Subscriber(self.hub, 'pom_subsrc') sub_key_epoch = aiopubsub.Key('peer', 'epoch') self.subscriber_epoch.subscribe(sub_key_epoch) sub_key_conn = aiopubsub.Key('peer', 'connection') self.subscriber_connection.subscribe(sub_key_conn) sub_key_pom = aiopubsub.Key('peer', 'pom') self.subscriber_pom.subscribe(sub_key_pom)
def test_get_logger(): ''' get_logger returns a logger with configured handlers. ''' handler1, handler2, handler3 = unittest.mock.Mock(), unittest.mock.Mock( ), unittest.mock.Mock() logwood.basic_config(level=logwood.DEBUG, handlers=[handler1, handler2]) logger = logwood.get_logger('Test') logger.add_handler(handler3) assert handler1 not in logger.handlers assert handler2 not in logger.handlers assert handler3 in logger.handlers logger.error('Error') assert handler1.handle.call_count == 1 assert handler2.handle.call_count == 1 assert handler3.handle.call_count == 1
def test_calling_basic_config_multiple_times(): ''' After first logger is created a call to basic_config should raise an exception. ''' handler1, handler2 = unittest.mock.Mock(), unittest.mock.Mock() logwood.basic_config(level=logwood.DEBUG, handlers=[handler1, handler2]) assert logwood.state.config_called # But until the first logger is created it can be called multiple times logwood.basic_config(handlers=[]) logwood.basic_config(handlers=[]) # now create some loggers logwood.get_logger('A') logwood.get_logger('B') logwood.get_logger('C') # Created loggers should be defined in state assert len(logwood.state.defined_loggers) == 3 # After loggers are created it is impossible to change logging settings with pytest.raises(AssertionError): logwood.basic_config(handlers=[])
import logwood from logwood.handlers.stderr import ColoredStderrHandler # Configure logging logwood.basic_config(level=logwood.INFO, handlers=[ColoredStderrHandler()]) SUPPORTED_POLICY_TYPES = { 'action': ['most-visited', 'proportional-to-visit-count'], 'selection': ['ucb1', 'puct'], 'expansion': ['vanilla', 'neural'], 'simulation': ['to-end'], 'update': ['vanilla', 'value'], 'expansion_rollout': ['random-unvisited', 'random'] }
def logger(handler): logwood.basic_config(handlers=[]) return Logger('TestLogger', [handler])
def logging_logger(logwood_handler): logwood.basic_config(handlers=[logwood_handler]) logwood.compat.redirect_standard_logging() return logging.getLogger(str(uuid.uuid4()))
help='name of the symbol/s you want to collect (btcusd, etheur, etc)') cli.add_argument('--since', type=str, default='2019-01-01T00:00.00Z', help='date since (e.g. 2019-04-29 or 2019-01-02T23:59.59Z)') cli.add_argument('--until', type=str, default='2019-01-02T23:59.59Z', help='date until (e.g. 2019-04-29 or 2019-01-02T23:59.59Z)') cli.add_argument('--split', action='store_const', const=True, default=False, help='include when you want to split data info into multiples CSVs') cli.add_argument('--debug', action='store_const', const=True, default=False, help='include when you want to see debug-level logs') sysargv = cli.parse_args() LOG_FORMAT = '{timestamp:.3f} [{level}] {message}' logwood.basic_config( level=logwood.DEBUG if sysargv.debug else logwood.INFO, handlers=[StderrHandler(format=LOG_FORMAT)]) L = logwood.get_logger('GLOBAL') # ------------------------------------------------------------------------------ import asyncio import aiohttp import certifi import csv import os import ssl import time import ujson from datetime import datetime from dateutil import parser from typing import List, Callable
def configure_logging(): logwood.basic_config( level=logwood.DEBUG, handlers=[ColoredStderrHandler()], format='%(timestamp).6f %(level)-5s %(name)s: %(message)s', )
import logwood import multiprocessing import os import random import pandas as pd import modules.database import modules.god import modules.settings as settings logwood.basic_config(level=logwood.INFO) def set_parameters_value(parameters: pd.DataFrame) -> None: parameters = parameters.to_dict('records')[0] settings.ZERO_INTELLIGENCE_COUNT = int( parameters['zero_intelligence_count']) settings.INTENSITY_ZERO_INTELLIGENCE = parameters[ 'zero_intelligence_intensity'] settings.SHADING_MIN = int(parameters['zero_intelligence_shading_min']) settings.SHADING_MAX = int(parameters['zero_intelligence_shading_max']) settings.MARKET_MAKERS_COUNT = int(parameters['market_maker_count']) settings.INTENSITY_MARKET_MAKER = parameters['market_maker_intensity'] settings.MARKET_MAKER_NUMBER_ORDERS = int( parameters['market_maker_number_orders']) settings.MARKET_MAKER_NUMBER_OF_TICKS_BETWEEN_ORDERS = int( parameters['market_maker_number_of_ticks_between_orders']) settings.MARKET_MAKER_SPREAD_AROUND_ASSET = int( parameters['market_maker_spread_around_asset']) settings.NATIONAL_BEST_BID_AND_OFFER_DELAY = int( parameters['national_best_bid_and_offer_delay'])
def logger(handler): logwood.basic_config(level=logwood.WARNING, handlers=[ handler, ]) return logwood.get_logger('TestLogger')