def initialize_logger(debug_log_path='immunotyper-debug'): LOG_FORMAT = '{record.message}' if debug_log_path: debug_log_path = debug_log_path + '.log' if os.path.exists(debug_log_path): os.remove(debug_log_path) handler = logbook.NestedSetup([ logbook.NullHandler(), logbook.FileHandler(debug_log_path, level='DEBUG', format_string=LOG_FORMAT), logbook.more.ColorizedStderrHandler(format_string=LOG_FORMAT, level='INFO', bubble=True) ]) else: handler = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler(format_string=LOG_FORMAT, level='INFO', bubble=True) ]) handler.push_application()
def _setup(self, app_obj): super(PmLogHandler, self)._setup(app_obj) if self._meta.namespace is None: self._meta.namespace = self.app._meta.label self.backend = Logger(self._meta.namespace) # hack for application debugging if is_true(self.app._meta.debug): self.app.config.set('log', 'level', 'DEBUG') # Mainly for backwards compatibility since Logger level should # be NOTSET (level 0). Output level is controlled by handlers self.set_level(self.app.config.get('log', 'level')) # clear loggers? if is_true(self._meta.clear_loggers): self.clear_loggers() # console if is_true(self.app.config.get('log', 'to_console')): self._setup_console_log() # file if self.app.config.get('log', 'file'): self._setup_file_log() # nested setup self.backend.handlers.append(logbook.NullHandler(bubble=False)) self.log_setup = logbook.NestedSetup(self.backend.handlers) with self._console_handler.applicationbound(): self.debug("logging initialized for '%s' using PmLogHandler" % \ self._meta.namespace)
def test_nested_setups(activation_strategy): with capturing_stderr_context() as captured: logger = logbook.Logger('App') test_handler = logbook.TestHandler(level='WARNING') mail_handler = make_fake_mail_handler(bubble=True) handlers = logbook.NestedSetup( [logbook.NullHandler(), test_handler, mail_handler]) with activation_strategy(handlers): logger.warn('This is a warning') logger.error('This is also a mail') try: 1 / 0 except Exception: logger.exception() logger.warn('And here we go straight back to stderr') assert test_handler.has_warning('This is a warning') assert test_handler.has_error('This is also a mail') assert len(mail_handler.mails) == 2 assert 'This is also a mail' in mail_handler.mails[0][2] assert '1 / 0' in mail_handler.mails[1][2] assert 'And here we go straight back to stderr' in captured.getvalue() with activation_strategy(handlers): logger.warn('threadbound warning') handlers.push_application() try: logger.warn('applicationbound warning') finally: handlers.pop_application()
def setup_logger(self): format_string = '[{record.time:%Y-%m-%d %H:%M:%S.%f%z}] {record.level_name} {record.channel} : {record.message} (in {record.filename}:{record.lineno}), args: {record.kwargs})' handler = logbook.FileHandler(self.config.log_filename, format_string = format_string, bubble=True) return logbook.NestedSetup([ handler, ])
def test_nested_setups(self): with capture_stderr() as captured: logger = logbook.Logger('App') test_handler = logbook.TestHandler(level='WARNING') mail_handler = make_fake_mail_handler(bubble=True) handlers = logbook.NestedSetup([ logbook.NullHandler(), test_handler, mail_handler ]) with handlers: logger.warn('This is a warning') logger.error('This is also a mail') with logger.catch_exceptions(): 1 / 0 logger.warn('And here we go straight back to stderr') self.assert_(test_handler.has_warning('This is a warning')) self.assert_(test_handler.has_error('This is also a mail')) self.assertEqual(len(mail_handler.mails), 2) self.assert_('This is also a mail' in mail_handler.mails[0][2]) self.assert_('1 / 0' in mail_handler.mails[1][2]) self.assert_('And here we go straight back to stderr' in captured.getvalue()) with handlers.threadbound(): logger.warn('threadbound warning') with handlers.applicationbound(): logger.warn('applicationbound warning')
def main(): """ Scans TVDB and downloads new episodes from Torrentleech. """ with logbook.NestedSetup(_get_log_handlers()).applicationbound(): file_path = JSON_FILE_PATH or os.path.join( os.path.dirname(os.path.realpath(__file__)), 'last_state.json') last_state = load_last_state(file_path) # Login to TorrentLeech. with requests.session() as session: session.post(TORRENTLEECH_BASE_URL + '/user/account/login/', data={ 'username': TORRENTLEECH_USERNAME, 'password': TORRENTLEECH_PASSWORD, 'remember_me': 'on', 'login': '******' }) last_episodes_map = check_shows(last_state, session) if SHOULD_SEND_REPORT: report(last_episodes_map) if SHOULD_DOWNLOAD_720_TORRENTS or SHOULD_DOWNLOAD_1080_TORRENTS: download(last_episodes_map, session) # Update state file. ujson.dump(last_episodes_map, open(file_path, 'w', encoding='UTF-8')) logger.info('All done!')
def main(): """ Scans mma-torrents and downloads new episodes. """ with logbook.NestedSetup(_get_log_handlers()).applicationbound(): file_path = config.JSON_FILE_PATH or \ os.path.join(os.path.dirname(os.path.realpath(__file__)), 'last_state.json') last_state = _load_last_state(file_path) with requests_html.HTMLSession() as session: # Login to mma-torrents. r = session.post(MMA_TORRENTS_BASE_URL + '/account-login.php', data={ 'username': config.MMA_TORRENTS_USERNAME, 'password': config.MMA_TORRENTS_PASSWORD }) r.raise_for_status() new_state = check_today_torrents(last_state, session) # Create a diff state, for downloads and reporting. diff_state = { k: v for k, v in new_state.items() if v['episode'] > last_state[k]['episode'] } if config.SHOULD_DOWNLOAD_TORRENTS and diff_state: download(diff_state, session) if config.SHOULD_SEND_REPORT and diff_state: report(diff_state) else: logger.info('Nothing to report - No mail was sent.') # Update state file. ujson.dump(new_state, open(file_path, 'w'), indent=4) logger.info('All done!')
def main(): parser = argparse.ArgumentParser( description= """Pull records for a batch of users and submit to external services.""" ) parser.add_argument('--timeout', dest="timeout", type=int, default=10) parser.add_argument( '--limit', dest="limit", type=int, default=10, help="""Retrieve data for at most this many users simultaneously.""") parser.add_argument('--log-file', dest='logfile', default='batch.log') parser.add_argument('input', nargs='?', type=argparse.FileType('r'), default=None) parser.add_argument('--collect-only', dest="collect_only", action="store_true") parser.add_argument('--debug', dest="debug", action="store_true", default=False) config = stethoscope.api.factory.get_config() args = parser.parse_args() for plugin in ['BITFIT', 'JAMF']: config[plugin + '_TIMEOUT'] = args.timeout config['LOGBOOK'] = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler( level='INFO', bubble=False, format_string= '[{record.level_name:<8s} {record.channel:s}] {record.message:s}'), logbook.MonitoringFileHandler( args.logfile, mode='w', level='DEBUG', bubble=True, format_string= ('--------------------------------------------------------------------------\n' '[{record.time} {record.level_name:<8s} {record.channel:>10s}]' ' {record.filename:s}:{record.lineno:d}\n{record.message:s}')), ]) config['LOGBOOK'].push_application() config['DEBUG'] = args.debug config['TESTING'] = args.debug yaml.add_representer(arrow.arrow.Arrow, arrow_representer) yaml.SafeDumper.add_representer(arrow.arrow.Arrow, arrow_representer) task.react(_main, (args, config))
def reg_diff(first_hive_path, second_hive_path, output_path, verbose): with logbook.NestedSetup( _get_log_handlers(verbose=verbose)).applicationbound(): REGDIFF_HEADERS = [ 'difference', 'first_hive', 'second_hive', 'description' ] found_differences = compare_hives(first_hive_path, second_hive_path, verbose=verbose) click.secho('Comparing {} vs {}'.format( os.path.basename(first_hive_path), os.path.basename(second_hive_path))) if output_path: with open(output_path, 'w') as csvfile: csvwriter = csv.writer(csvfile, delimiter='|', quoting=csv.QUOTE_MINIMAL) csvwriter.writerow(REGDIFF_HEADERS) for difference in found_differences: csvwriter.writerow(difference) else: click.secho( tabulate(found_differences, headers=REGDIFF_HEADERS, tablefmt='fancy_grid')) click.secho(f'Detected {len(found_differences)} differences', fg='green')
def main(): """ Organizes the MP3 album in the given path. Should be called with the album's path as an argument. Path must be in the format '...\<Artist Name>\<Album Name>' """ # Get arguments from the user. args = get_arguments() # Print the clients menu, if asked by the user. if args.clients_menu: print('Available clients are (sorted by order of quality):') for client_class in CLIENTS_LIST: print(client_class.get_name()) print('Please run the program again with your choice, ' 'or without one to use default order.') return # Print the lyrics menu, if asked by the user. if args.lyrics_menu: print('Available lyrics websites are (sorted by order of quality):') for grabber_class in GRABBERS_LIST: print(grabber_class.get_name()) print('Please run the program again with your choice, ' 'or without one to use default order.') return with logbook.NestedSetup(_get_log_handlers( args.logs_directory)).applicationbound(): return organize(args)
def log_init(quiet=False, verbose=False): log_level = logbook.INFO if quiet: log_level = logbook.NOTICE if verbose: log_level = logbook.DEBUG # TODO: get rid of global_state setup = global_state[1] if setup is not None: setup.pop_application() handler_null = LogNullHandler( level=log_level, bubble=False, ) handler_stderr = logbook.StreamHandler( stream=sys.stderr, level=log_level, ) setup = global_state[1] = logbook.NestedSetup([ handler_stderr, handler_null, ]) setup.push_application() return True
def run_plugins(hive_path, output_path, plugins, verbose): with logbook.NestedSetup( _get_log_handlers(verbose=verbose)).applicationbound(): registry_hive = RegistryHive(hive_path) click.secho('Loaded {} plugins'.format(len(PLUGINS)), fg='white') if plugins: plugin_names = {x.NAME for x in PLUGINS} plugins = plugins.split(',') plugins = set(plugins) if not plugins.issubset(plugin_names): click.secho('Invalid plugin names given: {}'.format( ','.join(set(plugins) - plugin_names)), fg='red') click.secho( 'Use --help or -h to get list of plugins and their descriptions', fg='red') return # Run relevant plugins plugin_results = run_relevant_plugins(registry_hive, as_json=True, plugins=plugins) # If output path was set, dump results to disk if output_path: with open(output_path, 'w') as f: f.write(json.dumps(plugin_results, indent=4)) else: print(json.dumps(plugin_results, indent=4)) click.secho('Finished: {}/{} plugins matched the hive type'.format( len(plugin_results), len(PLUGINS)), fg='green')
def _run_organizer(self): """ Calls the MP3 organizer with the proper parameters. """ self.log_text.clear() print('Running organizer on path:"{}".'.format(self.dir_path.text())) args = Arguments(path=str(self.dir_path.text()), album=str(self.album.text()), artist=str(self.artist.text()), genre=str(self.genre.text()), image=str(self.image_path.text()), client=str(self.client.currentText()), grabber=str(self.lyrics.currentText())) handlers_list = list() handlers_list.append(logbook.NullHandler()) handlers_list.append( logbook.StreamHandler(sys.stdout, level='DEBUG', bubble=True)) handlers_list.append( logbook.StreamHandler(sys.stderr, level='ERROR', bubble=True)) handlers_list.append( logbook.StreamHandler(stream=ConsoleLogStream(self.log_text), bubble=True, level=logbook.INFO)) with logbook.NestedSetup(handlers_list).applicationbound(): organize(args)
def main(): """ This function is designed to be called from command line. If an argument (either as the full path, or as a base dir and a file) is provided, the script will try to expand it. Else, we assume transmission is calling the script. """ with logbook.NestedSetup(_get_log_handlers()).applicationbound(): logger.info('Py-expander started!') try: # Set subliminal cache first. if config.SHOULD_FIND_SUBTITLES: logger.debug('Setting subtitles cache...') configure_subtitles_cache() # Parse input arguments. if len(sys.argv) == 3: directory = sys.argv[1] filename = sys.argv[2] if directory == config.DEFAULT_PATH: torrent_path = os.path.join(directory, filename) logger.info('Input is a file: {}'.format(torrent_path)) else: torrent_path = directory logger.info('Input is a dir: {}'.format(torrent_path)) expand_torrent(torrent_path) elif len(sys.argv) == 2: expand_torrent(sys.argv[1]) else: expand_torrent_from_transmission() except: logger.exception('Critical exception occurred!') raise
def setup_logbook(logfile, logfile_kwargs=None): """Return a basic `logbook` setup which logs to `stderr` and to file.""" if logfile_kwargs is None: logfile_kwargs = {} logfile_kwargs.setdefault('level', 'DEBUG') logfile_kwargs.setdefault('mode', 'w') logfile_kwargs.setdefault('bubble', True) logfile_kwargs.setdefault('format_string', ( '--------------------------------------------------------------------------\n' '[{record.time} {record.level_name:<8s} {record.channel:>10s}]' ' {record.filename:s}:{record.lineno:d}\n{record.message:s}')) logbook_setup = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler( level='INFO', bubble=False, format_string= '[{record.level_name:<8s} {record.channel:s}] {record.message:s}'), logbook.FileHandler(logfile, **logfile_kwargs), ]) return logbook_setup
def create_logger(app): if app.config.get('ENVIRONMENT') == 'production': server_addr = ('localhost', 25) else: server_addr = ('localhost', 2525) mail_handler = logbook.MailHandler( '*****@*****.**', ['*****@*****.**'], server_addr=server_addr, level='DEBUG', format_string=u'''Subject: ERROR at gitorama.com [{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}''', related_format_string= u'[{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}', ) file_handler = logbook.FileHandler( app.config['LOG_FILE'], level='DEBUG', format_string= u'[{record.time:%Y-%m-%d %H:%M}] {record.extra[request_id]}: {record.level_name}: {record.channel}: {record.message}' ) def inject_id(record): record.extra['request_id'] = getattr(_request_ctx_stack.top, 'logbook_request_id', None) logger = logbook.NestedSetup([ logbook.NullHandler(), logbook.FingersCrossedHandler(mail_handler, reset=True), logbook.FingersCrossedHandler(file_handler, reset=True, bubble=True), logbook.Processor(inject_id), ]) return logger
def __call__(self): # Find an open port for the logs # (that's a race condition, deal with it) tmpsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) tmpsock.bind(('localhost', 0)) log_uri = 'tcp://{}:{}'.format(*tmpsock.getsockname()) tmpsock.close() setup = logbook.NestedSetup([ logbook.NullHandler(), logbook.StderrHandler(level=logbook.INFO), logbook.Processor(self._process_record), ]) self.subscriber = ZeroMQSubscriber(log_uri, multi=True) self.subscriber.dispatch_in_background(setup=setup) self.process = sh.python( '-m', 'onitu', '--entries', self.entries, '--log-uri', log_uri, _bg=self.bg, ) return self.process
def script_logging_context(syslog=_has_syslog_handler, syslog_facility=logbook.SyslogHandler.LOG_LOCAL1, syslog_buffer_size=1024, syslog_message_size=32768, syslog_address=("127.0.0.1", 514), syslog_level=logbook.DEBUG, logfile=True, logfile_path="logfile", logfile_mode='a', logfile_encoding='utf-8', logfile_level=logbook.DEBUG, logfile_delay=False, logfile_max_size=1024 * 1024, logfile_backup_count=32, stderr=True, stderr_level=logbook.INFO): """ Context manager that creates a setup of logbook handlers based on the parameters received and sensible defaults. """ from logbook.concurrency import enable_gevent enable_gevent() redirect_python_logging_to_logbook() processor = create_processor() flags = logbook.Flags(errors='silent') handlers = [logbook.NullHandler()] if syslog: handlers.append(create_syslog_handler(facility=syslog_facility, buffer_size=syslog_buffer_size, message_size=syslog_message_size, address=syslog_address, level=syslog_level)) if logfile: handlers.append(create_rotating_file_handler(path=logfile_path, mode=logfile_mode, encoding=logfile_encoding, level=logfile_level, delay=logfile_delay, max_size=logfile_max_size, backup_count=logfile_backup_count)) if stderr: handlers.append(create_stderr_handler(level=stderr_level)) with logbook.NestedSetup([processor, flags] + handlers).applicationbound(): yield
def parse_args(self): """This method makes any alpha file can be turned into a script.""" today = datetime.now().strftime('%Y%m%d') parser = argparse.ArgumentParser() parser.add_argument('-s', '--start', help='start date(included)', type=str) parser.add_argument('-e', '--end', help='end date(included); default: today', default=today, nargs='?') parser.add_argument('date', help='the date to be updated', default=today, nargs='?') parser.add_argument('--source', choices=('mssql', 'oracle'), help='type of source database', default='oracle') parser.add_argument('--debug_on', action='store_true') parser.add_argument('-f', '--logfile', type=str) parser.add_argument('-o', '--logoff', action='store_true') args = parser.parse_args() if args.start and args.end: _dates = [dt.strftime('%Y%m%d') for dt in pd.date_range(args.start, args.end)] else: _dates = [args.date] self._dates = _dates if args.source: self.source = args.source if args.logfile: args.logoff = False if not args.logoff: if not args.logfile: self.logger.debug('@logfile not explicitly provided') logdir = os.path.join('logs', today[:4], today[4:6]) if not os.path.exists(logdir): os.makedirs(logdir) self.logger.debug('Created directory {}', logdir) args.logfile = os.path.join(logdir, 'log.'+today) self.logger.debug('@logfile set to: {}', args.logfile) self.setup = logbook.NestedSetup([ logbook.NullHandler(), logbook.FileHandler(args.logfile, level='INFO'), logbook.StreamHandler(sys.stdout, level='DEBUG', bubble=True)]) else: self.setup = logbook.NestedSetup([ logbook.NullHandler(), logbook.StreamHandler(sys.stdout, level='DEBUG', bubble=True)])
def run(self): with logbook.NestedSetup([logbook.NullHandler(), logbook.StreamHandler(sys.stdout, bubble=True)]): self.bef_work() for date in self.trdday: if int(date) < int(self.startdate): continue self.logger.info('--------------------------') self.logger.info('START Working on '+date) self.work(date) self.logger.info('FINISH Working on '+date) self.aft_work()
def parse_header(hive_path, verbose): with logbook.NestedSetup(_get_log_handlers(verbose=verbose)).applicationbound(): registry_hive = RegistryHive(hive_path) click.secho(tabulate(registry_hive.header.items(), tablefmt='fancy_grid')) if registry_hive.header.primary_sequence_num != registry_hive.header.secondary_sequence_num: click.secho('Hive is not clean! You should apply transaction logs', fg='red') calculated_checksum = calculate_xor32_checksum(registry_hive._stream.read(4096)) if registry_hive.header.checksum != calculated_checksum: click.secho('Hive is not clean! Header checksum does not match', fg='red')
def hive_to_json(hive_path, output_path, registry_path, timeline, hive_type, partial_hive_path, verbose): with logbook.NestedSetup( _get_log_handlers(verbose=verbose)).applicationbound(): registry_hive = RegistryHive(hive_path, hive_type=hive_type, partial_hive_path=partial_hive_path) if registry_path: try: name_key_entry = registry_hive.get_key(registry_path) except RegistryKeyNotFoundException as ex: logger.debug('Did not find the key: {}'.format(ex)) return else: name_key_entry = registry_hive.root if timeline and not output_path: click.secho( 'You must provide an output path if choosing timeline output!', fg='red') return if output_path: if timeline: with open(output_path, 'w') as csvfile: csvwriter = csv.DictWriter(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, fieldnames=[ 'timestamp', 'subkey_name', 'values_count' ]) csvwriter.writeheader() for entry in tqdm( registry_hive.recurse_subkeys(name_key_entry, as_json=True)): subkey_name = entry.pop('subkey_name') path = entry.pop('path') entry['subkey_name'] = r'{}\{}'.format( path, subkey_name) entry.pop('values') csvwriter.writerow(entry) else: dump_hive_to_json(registry_hive, output_path, name_key_entry, verbose) else: for entry in registry_hive.recurse_subkeys(name_key_entry, as_json=True): click.secho(json.dumps(attr.asdict(entry), indent=4))
def parse_transaction_log(hive_path, primary_log_path, secondary_log_path, output_path, verbose): with logbook.NestedSetup(_get_log_handlers(verbose=verbose)).applicationbound(): logger.info(f'Processing hive {hive_path} with transaction log {primary_log_path}') if secondary_log_path: logger.info(f'Processing hive {hive_path} with secondary transaction log {primary_log_path}') restored_hive_path, recovered_dirty_pages_count = apply_transaction_logs(hive_path, primary_log_path, secondary_log_path=secondary_log_path, restored_hive_path=output_path, verbose=verbose) if recovered_dirty_pages_count: click.secho( f'Recovered {recovered_dirty_pages_count} dirty pages. Restored hive is at {restored_hive_path}', fg='green')
def setup_logging(): os.makedirs(LOG_DIR, exist_ok=True) format_string = "[{record.time:%H:%M:%S}] {record.level_name}: {record.channel}:{record.extra[strat_id]} {record.message}" handlers = [logbook.NullHandler()] if CLOUD_LOGGING: cloud_handler = GoogleCloudHandler(level="DEBUG", bubble=True, format_string=format_string) handlers.append(cloud_handler) file_handler = logbook.RotatingFileHandler(APP_LOG, level="DEBUG", bubble=True, format_string=format_string) stream_handler = logbook.StreamHandler(sys.stdout, level="INFO", bubble=True) stream_handler.format_string = format_string error_file_handler = logbook.RotatingFileHandler(ERROR_LOG, level="ERROR", bubble=True) error_file_handler.format_string = """ ---------------------------------------------------------------------------------- {record.time:%H:%M:%S} KRYPTOS:{record.channel}:{record.level_name}: {record.message} Module: {record.module}:{record.lineno} Function: {record.func_name} Channel: {record.channel} Trade Date: {record.extra[strat_date]} Exception: {record.formatted_exception} ---------------------------------------------------------------------------------- """ handlers.extend([file_handler, stream_handler, error_file_handler]) setup = logbook.NestedSetup(handlers) setup.push_thread()
def setup(level='debug', show_log=False, filename=settings.LOG['file']): ''' Hivy formated logger ''' level = level.upper() handlers = [logbook.NullHandler()] if show_log: handlers.append( logbook.StreamHandler(sys.stdout, format_string=settings.LOG['format'], level=level)) else: handlers.append( logbook.FileHandler(filename, format_string=settings.LOG['format'], level=level)) return logbook.NestedSetup(handlers)
def setup_logbook(app_name, config, min_level=None): if not config.syslog: try: os.makedirs(config.log_dir) except OSError: pass app_config = config.applications[app_name] or {} handlers = app_config.get("handlers") or config.default.handler_list logbook_handlers = [] finger_cross_config = config.finger_cross.copy() top_handler = True if min_level: min_level = logbook.lookup_level(min_level) for handler_name in handlers: handler_config = config.handlers[handler_name].copy() level = handler_config.get("level") if min_level and level: level = logbook.lookup_level(level) handler_config["level"] = max(min_level, level) handler_class = getattr(logbook, handler_config.pop("type")) finger_cross = handler_config.pop("finger_cross", False) _replace_config(handler_config, "__APP__", app_name) if "format_string" not in handler_config and handler_class is not logbook.NullHandler: handler_config["format_string"] = config.default.format_string if top_handler: handler_config["bubble"] = False if "filter" in handler_config: handler_config["filter"] = globals()[handler_config["filter"]] handler = handler_class(**handler_config) if finger_cross: finger_cross_level = logbook.lookup_level( finger_cross_config.pop("action_level")) handler = logbook.FingersCrossedHandler( handler, action_level=finger_cross_level, **finger_cross_config) logbook_handlers.append(handler) top_handler = False setup = logbook.NestedSetup(logbook_handlers) return setup
def post_fork(server, worker): server.log.info('Worker spawned (pid: %s)', worker.pid) logging_rotating_file_handler = logging.handlers.RotatingFileHandler( config.LOG_FILE_PATH.replace('.log', f'.{worker.pid}.flask.log'), maxBytes=5 * 1024 * 1024, backupCount=5) root_logger = logging.getLogger() root_logger.addHandler(logging_rotating_file_handler) root_logger.setLevel(logging.CRITICAL) logger_setup = logbook.NestedSetup([ logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True), logbook.RotatingFileHandler(config.LOG_FILE_PATH.replace( '.log', f'.{worker.pid}.log'), level=logbook.INFO, max_size=5 * 1024 * 1024, bubble=True) ]) logger_setup.push_application()
# -*- coding:utf-8 -*- import sys import logbook import numpy as np import pandas as pd import talib import zipline from DRL_PairsTrading import DRL_PairsTrading from zipline.api import record, symbol, order_target_percent from zipline.finance import commission, slippage zipline_logging = logbook.NestedSetup([ logbook.NullHandler(level=logbook.DEBUG), logbook.StreamHandler(sys.stdout, level=logbook.INFO), logbook.StreamHandler(sys.stderr, level=logbook.ERROR), ]) zipline_logging.push_application() from history.ZiplineTensorboard import TensorBoard def generate_tech_data(p1_df,p2_df): sample = pd.DataFrame({'p1': p1_df.values.ravel(), 'p2': p2_df.values.ravel()}, index=p1_df.index) p1=p1_df.values.ravel() p2=p2_df.values.ravel() sample['p1'+'_mom'] = talib.MOM(p1) sample['p1' + '_macd'], sample['p1' + '_macd_sig'], sample['p1' + '_macd_hist'] = talib.MACD(p1) sample['p1' + '_rsi'] = talib.RSI(p1, timeperiod=10) sample['p1' + '_cmo'] = talib.CMO(p1) sample['p2' + '_mom'] = talib.MOM(p2) sample['p2' + '_macd'], sample['p2' + '_macd_sig'], sample['p2' + '_macd_hist'] = talib.MACD(p2) sample['p2' + '_rsi'] = talib.RSI(p2, timeperiod=10)
from __future__ import absolute_import, print_function, unicode_literals import os import logbook import logbook.more LOGFILE = os.environ.get('LOGFILE', 'api.log') LOGBOOK = logbook.NestedSetup([ logbook.NullHandler(), logbook.more.ColorizedStderrHandler(level='INFO'), logbook.FileHandler( LOGFILE, mode='w', level='DEBUG', delay=True, bubble=True, format_string= ('--------------------------------------------------------------------------\n' '[{record.time} {record.level_name:<8s} {record.channel:>10s}]' ' {record.filename:s}:{record.lineno:d}\n{record.message:s}')), ]) DEBUG = True TESTING = True JWT_ALGORITHM = 'HS256' JWT_EXPIRATION_DELTA = 60 * 60 * 24 def IS_PRIVILEGED_USER(userinfo):
def get_nested_setup(self): nested_log_setup = logbook.NestedSetup(self.handlers) return nested_log_setup