Exemplo n.º 1
0
 def test_null_handler(self):
     null_handler = logbook.NullHandler()
     handler = logbook.TestHandler(level='ERROR')
     with capture_stderr() as captured:
         with null_handler:
             with handler:
                 self.log.error('An error')
                 self.log.warn('A warning')
         self.assertEqual(captured.getvalue(), '')
         self.assertFalse(handler.has_warning('A warning'))
         self.assert_(handler.has_error('An error'))
Exemplo n.º 2
0
 def run(self):
     with logbook.NestedSetup([logbook.NullHandler(), logbook.StreamHandler(sys.stdout, bubble=True)]):
         self.bef_work()
         for date in self.trdday:
             if int(date) < int(self.startdate):
                 continue
             self.logger.info('--------------------------')
             self.logger.info('START  Working on '+date)
             self.work(date)
             self.logger.info('FINISH Working on '+date)
         self.aft_work()
Exemplo n.º 3
0
def rhs_func(initial_conditions):
    with logbook.NullHandler().applicationbound():
        rhs, internal_data = ode_system(
            γ=initial_conditions.γ,
            a_0=initial_conditions.a_0,
            norm_kepler_sq=initial_conditions.norm_kepler_sq,
            init_con=initial_conditions.params,
            with_taylor=False,
            η_derivs=True,
        )
    return rhs
Exemplo n.º 4
0
def _get_log_handlers():
    """
    Initializes all relevant log handlers.

    :return: A list of log handlers.
    """
    return [
        logbook.NullHandler(),
        logbook.StreamHandler(sys.stdout, level=logbook.DEBUG, bubble=True),
        logbook.RotatingFileHandler(config.LOGFILE, level=logbook.DEBUG, max_size=5 * 1024 * 1024, bubble=True)
    ]
Exemplo n.º 5
0
    def test_custom_handling_tester(self):
        flag = True

        class MyTestHandler(logbook.TestHandler):
            def should_handle(self, record):
                return flag
        with logbook.NullHandler():
            with MyTestHandler() as handler:
                self.log.warn('1')
                flag = False
                self.log.warn('2')
                self.assert_(handler.has_warning('1'))
                self.assert_(not handler.has_warning('2'))
Exemplo n.º 6
0
 def setup_simple_file_handler(cls, file_path):
     """
        Push a file handler logging only the message (no timestamp)
     """
     
     null_handler = logbook.NullHandler()
     
     handler      = logbook.FileHandler(file_path, format_string='{record.message}', level = 2, bubble = False)
      
     # first stack null handler to not have anything else logged 
     null_handler.push_application()
     # add Stderr Handler
     handler.push_application() 
Exemplo n.º 7
0
def setup_logbook(**kwargs):
    import logbook
    from logbook.ticketing import TicketingHandler
    from .ticketing import DjangoORMBackend

    null_handler = logbook.NullHandler()
    null_handler.push_application()

    ticketing_handler = TicketingHandler('',
                                         backend=DjangoORMBackend,
                                         level=logbook.INFO,
                                         bubble=True)
    ticketing_handler.push_application()
Exemplo n.º 8
0
 def thread_server(wait_for_start, wait_for_close):
     try:
         print(
             ("starting server, hub: {}".format(gevent.hub.get_hub())))
         with logbook.NullHandler().applicationbound():
             with server_context(FooService(), max_response_time=0.1):
                 print("server started.")
                 wait_for_start.set()
                 while not wait_for_close.is_set():
                     gevent.sleep(0.1)
     except:
         import traceback
         traceback.print_exc()
Exemplo n.º 9
0
def setup_script_logging():
    """
    Use this logger for standalone scripts, or script-like subcommands,
    such as bcbio_prepare_samples and bcbio_nextgen.py -w template.
    """
    handlers = [logbook.NullHandler()]
    format_str = ("[{record.time:%Y-%m-%dT%H:%MZ}] "
                  "{record.level_name}: {record.message}")

    handler = logbook.StreamHandler(sys.stderr, format_string=format_str,
                                    level="DEBUG")
    handler.push_thread()
    return handler
Exemplo n.º 10
0
def main(args):
#    signal.signal(signal.SIGHUP, politwoops.utils.restart_process)

    log_handler = politwoops.utils.configure_log_handler(_script_, args.loglevel, args.output)
    with logbook.NullHandler():
        with log_handler.applicationbound():
            try:
                app = FillTwitterID()
                if args.authtest:
                    return
                else:
                    return app.run()
            except KeyboardInterrupt:
                log.notice("Killed by CTRL-C")
Exemplo n.º 11
0
def _create_log_handler(config, add_hostname=False):
    logbook.set_datetime_format("local")
    handlers = [logbook.NullHandler()]
    format_str = " ".join([
        "[{record.time:%Y-%m-%d %H:%M}]",
        "{record.extra[source]}:" if add_hostname else "", "{record.message}"
    ])

    log_dir = get_log_dir(config)
    if log_dir:
        if not os.path.exists(log_dir):
            utils.safe_makedir(log_dir)
            # Wait to propagate, Otherwise see logging errors on distributed filesystems.
            time.sleep(5)
        handlers.append(
            logbook.FileHandler(os.path.join(log_dir, "%s.log" % LOG_NAME),
                                format_string=format_str,
                                level="INFO",
                                filter=_not_cl))
        handlers.append(
            logbook.FileHandler(os.path.join(log_dir,
                                             "%s-debug.log" % LOG_NAME),
                                format_string=format_str,
                                level="DEBUG",
                                bubble=True,
                                filter=_not_cl))
        handlers.append(
            logbook.FileHandler(os.path.join(log_dir,
                                             "%s-commands.log" % LOG_NAME),
                                format_string=format_str,
                                level="DEBUG",
                                filter=_is_cl))

    email = config.get("email",
                       config.get("resources", {}).get("log", {}).get("email"))
    if email:
        email_str = u'''Subject: [bcbio-nextgen] {record.extra[run]} \n\n {record.message}'''
        handlers.append(
            logbook.MailHandler(email, [email],
                                format_string=email_str,
                                level='INFO',
                                bubble=True))

    handlers.append(
        logbook.StreamHandler(sys.stderr,
                              format_string=format_str,
                              bubble=True,
                              filter=_not_cl))
    return CloseableNestedSetup(handlers)
Exemplo n.º 12
0
def test_null_handler_filtering(activation_strategy):
    logger1 = logbook.Logger("1")
    logger2 = logbook.Logger("2")
    outer = logbook.TestHandler()
    inner = logbook.NullHandler()

    inner.filter = lambda record, handler: record.dispatcher is logger1

    with activation_strategy(outer):
        with activation_strategy(inner):
            logger1.warn("1")
            logger2.warn("2")

    assert outer.has_warning('2', channel='2')
    assert (not outer.has_warning('1', channel='1'))
Exemplo n.º 13
0
def setup_logging():
    os.makedirs(LOG_DIR, exist_ok=True)

    format_string = "[{record.time:%H:%M:%S}] {record.level_name}: {record.channel}:{record.extra[strat_id]} {record.message}"

    handlers = [logbook.NullHandler()]

    if CLOUD_LOGGING:
        cloud_handler = GoogleCloudHandler(level="DEBUG",
                                           bubble=True,
                                           format_string=format_string)
        handlers.append(cloud_handler)

    file_handler = logbook.RotatingFileHandler(APP_LOG,
                                               level="DEBUG",
                                               bubble=True,
                                               format_string=format_string)

    stream_handler = logbook.StreamHandler(sys.stdout,
                                           level="INFO",
                                           bubble=True)
    stream_handler.format_string = format_string

    error_file_handler = logbook.RotatingFileHandler(ERROR_LOG,
                                                     level="ERROR",
                                                     bubble=True)
    error_file_handler.format_string = """
----------------------------------------------------------------------------------
{record.time:%H:%M:%S} KRYPTOS:{record.channel}:{record.level_name}:

{record.message}

Module: {record.module}:{record.lineno}
Function: {record.func_name}

Channel: {record.channel}
Trade Date: {record.extra[strat_date]}

Exception: {record.formatted_exception}

----------------------------------------------------------------------------------
"""

    handlers.extend([file_handler, stream_handler, error_file_handler])

    setup = logbook.NestedSetup(handlers)

    setup.push_thread()
Exemplo n.º 14
0
def get_logger(perform_rollover=False):
    """
    Push to the app stack the needed handlers and return a Logger object.

    :rtype: logbook.Logger
    """
    # NOTE: make sure that the folder exists, the logger is created before
    # saving settings on the first run.
    _base = os.path.join(get_path_prefix(), "leap")
    mkdir_p(_base)
    bitmask_log_file = os.path.join(_base, 'bitmask.log')

    level = logbook.WARNING
    if flags.DEBUG:
        level = logbook.NOTSET

    # This handler consumes logs not handled by the others
    null_handler = logbook.NullHandler()
    null_handler.push_application()

    silencer = SelectiveSilencerFilter()

    zmq_handler = SafeZMQHandler('tcp://127.0.0.1:5000', multi=True,
                                 level=level, filter=silencer.filter)
    zmq_handler.push_application()

    file_handler = logbook.RotatingFileHandler(
        bitmask_log_file, format_string=LOG_FORMAT, bubble=True,
        filter=silencer.filter, max_size=sys.maxint)

    if perform_rollover:
        file_handler.perform_rollover()

    file_handler.push_application()

    # don't use simple stream, go for colored log handler instead
    # stream_handler = logbook.StreamHandler(sys.stdout,
    #                                        format_string=LOG_FORMAT,
    #                                        bubble=True)
    # stream_handler.push_application()
    stream_handler = ColorizedStderrHandler(
        level=level, format_string=LOG_FORMAT, bubble=True,
        filter=silencer.filter)
    stream_handler.push_application()

    logger = logbook.Logger('leap')

    return logger
Exemplo n.º 15
0
    def test_blackhole_setting(self):
        null_handler = logbook.NullHandler()
        heavy_init = logbook.LogRecord.heavy_init
        try:
            def new_heavy_init(self):
                raise RuntimeError('should not be triggered')
            logbook.LogRecord.heavy_init = new_heavy_init
            with null_handler:
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

        null_handler.bubble = True
        with capture_stderr() as captured:
            logbook.warning('Not a blockhole')
            self.assertNotEqual(captured.getvalue(), '')
Exemplo n.º 16
0
def main(args):
    signal.signal(signal.SIGHUP, politwoops.utils.restart_process)

    log_handler = politwoops.utils.configure_log_handler(
        _script_, args.loglevel, args.output)
    with logbook.NullHandler():
        with log_handler.applicationbound():
            log.debug("Starting tweets-client.py")
            try:
                app = TweetStreamClient()
                if args.restart:
                    return politwoops.utils.run_with_restart(app.run)
                else:
                    return app.run()
            except KeyboardInterrupt:
                log.notice("Killed by CTRL-C")
Exemplo n.º 17
0
 def _get_file_log_handler(self, subpath, symlink, bubble=False, filter=None):
     root_path = config.root.log.root
     if root_path is None or subpath is None:
         log_path = None
         if bubble:
             handler = NoopHandler()
         else:
             handler = logbook.NullHandler(filter=filter)
     else:
         log_path = self._normalize_path(os.path.join(root_path, _format_log_path(subpath)))
         ensure_containing_directory(log_path)
         handler = self._get_file_handler_class()(log_path, bubble=bubble, filter=filter)
         if symlink:
             self._try_create_symlink(log_path, symlink)
         self._set_formatting(handler, config.root.log.format)
     return handler, log_path
Exemplo n.º 18
0
    def test_flush(self, logger):
        from logbook.more import RiemannHandler
        riemann_handler = RiemannHandler("127.0.0.1",
                                         5555,
                                         message_type="test",
                                         flush_threshold=2,
                                         level=logbook.INFO)
        null_handler = logbook.NullHandler()
        with null_handler.applicationbound():
            with riemann_handler:
                logger.info("Msg #1")
                logger.info("Msg #2")
                logger.info("Msg #3")

        q = riemann_handler.queue
        assert len(q) == 1
        assert q[0]["description"] == "Msg #3"
Exemplo n.º 19
0
 def _log_file_handler_context(self,
                               subpath,
                               symlink,
                               bubble=False,
                               filter=_slash_logs_filter,
                               use_compression=False,
                               use_rotation=False):
     if subpath is None or config.root.log.root is None:
         yield NoopHandler() if bubble else logbook.NullHandler(
             filter=filter)
     else:
         log_path = self._get_log_file_path(subpath, use_compression)
         handler = self._log_path_to_handler.get(log_path, None)
         if handler is not None:
             yield handler
         else:
             result = context.result
             ensure_containing_directory(log_path)
             if symlink:
                 self._try_create_symlink(log_path, symlink)
             handler = self._create_log_file_handler(
                 log_path,
                 bubble=bubble,
                 use_compression=use_compression,
                 use_rotation=use_rotation,
                 filter=filter)
             try:
                 self._log_path_to_handler[log_path] = handler
                 self._set_formatting(handler, config.root.log.format)
                 with handling_exceptions():
                     yield handler
             finally:
                 handler.close()
                 self._log_path_to_handler[log_path] = None
                 with handling_exceptions(swallow=True):
                     hooks.log_file_closed(path=log_path, result=result)  # pylint: disable=no-member
                 if config.root.log.cleanup.enabled and self._should_delete_log(
                         result):
                     with handling_exceptions(swallow=True):
                         os.remove(log_path)
                         dir_path = os.path.dirname(log_path)
                         if not os.listdir(
                                 dir_path
                         ) and dir_path != self._normalize_path(
                                 config.root.log.root):
                             os.rmdir(dir_path)
Exemplo n.º 20
0
def setup(level='debug', show_log=False, filename=settings.LOG['file']):
    ''' Hivy formated logger '''

    level = level.upper()
    handlers = [logbook.NullHandler()]
    if show_log:
        handlers.append(
            logbook.StreamHandler(sys.stdout,
                                  format_string=settings.LOG['format'],
                                  level=level))
    else:
        handlers.append(
            logbook.FileHandler(filename,
                                format_string=settings.LOG['format'],
                                level=level))

    return logbook.NestedSetup(handlers)
Exemplo n.º 21
0
def xcat(ctx, target, arguments, target_parameter, match_string, method,
         detection_method, loglevel, logfile, limit, public_ip):
    null_handler = logbook.NullHandler()
    null_handler.push_application()

    out_handler = logbook.StreamHandler(logfile,
                                        level=getattr(logbook,
                                                      loglevel.upper()))
    out_handler.push_application()

    if detection_method == "true":
        checker = lambda r, b: match_string in b
    else:
        checker = lambda r, b: not match_string in b

    public_ip, public_port = public_ip.split(
        ":") if public_ip and ":" in public_ip else (public_ip, "0")
    if not public_port.isdigit():
        print("Error: Port is not a number")
        ctx.exit(-1)

    public_port = int(public_port)

    if public_ip == "autodetect":
        try:
            public_ip = ipgetter.IPgetter().get_externalip()
        except Exception:
            click.echo("Could not detect public IP, please explicitly specify")
            ctx.exit()
        click.echo("External IP: {}".format(public_ip))

    if public_ip is not None:
        # Hack Hack Hack:
        # Setup an OOB http server instance on the doc feature class
        OOBDocFeature.server = OOBHttpServer(host=public_ip, port=public_port)

    ctx.obj["target_param"] = target_parameter
    request_maker = RequestMaker(
        target,
        method,
        arguments,
        target_parameter if target_parameter != "*" else None,
        checker=checker,
        limit_request=limit)
    ctx.obj["detector"] = detector.Detector(checker, request_maker)
Exemplo n.º 22
0
def _get_log_handlers():
    """
    Initializes all relevant log handlers.

    :return: A list of log handlers.
    """
    handlers = [
        logbook.NullHandler(),
        logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True),
    ]
    if LOG_FILE_PATH:
        handlers.append(
            logbook.RotatingFileHandler(LOG_FILE_PATH,
                                        level=logbook.DEBUG,
                                        backup_count=1,
                                        max_size=5 * 1024 * 1024,
                                        bubble=True))
    return handlers
Exemplo n.º 23
0
    def read_metadata(self, post, lang=None):
        """Read the metadata from a post, and return a metadata dict."""
        if lang is None:
            lang = LocaleBorg().current_lang
        source_path = post.translated_source_path(lang)

        # Silence reST errors, some of which are due to a different
        # environment. Real issues will be reported while compiling.
        null_logger = logbook.Logger('NULL')
        null_logger.handlers = [logbook.NullHandler()]
        with io.open(source_path, 'r', encoding='utf-8') as inf:
            data = inf.read()
            _, _, _, document = rst2html(data,
                                         logger=null_logger,
                                         source_path=source_path,
                                         transforms=self.site.rst_transforms,
                                         no_title_transform=False)
        meta = {}
        if 'title' in document:
            meta['title'] = document['title']
        for docinfo in document.traverse(docutils.nodes.docinfo):
            for element in docinfo.children:
                if element.tagname == 'field':  # custom fields (e.g. summary)
                    name_elem, body_elem = element.children
                    name = name_elem.astext()
                    value = body_elem.astext()
                elif element.tagname == 'authors':  # author list
                    name = element.tagname
                    value = [element.astext() for element in element.children]
                else:  # standard fields (e.g. address)
                    name = element.tagname
                    value = element.astext()
                name = name.lower()

                meta[name] = value

        # Put 'authors' meta field contents in 'author', too
        if 'authors' in meta and 'author' not in meta:
            meta['author'] = '; '.join(meta['authors'])

        # Map metadata from other platforms to names Nikola expects (Issue #2817)
        map_metadata(meta, 'rest_docinfo', self.site.config)
        return meta
Exemplo n.º 24
0
 def __init__(self, stdout=colorama_stdout, stderr=sys.stderr):
     self.stdout = stdout
     self.stderr = stderr
     self._null_handler = logbook.NullHandler()
     self._output_handler = OutputHandler(self.stdout)
     self._file_handler = DelayedFileHandler()
     self._relevel_processor = Relevel(allowed=['dbt', 'werkzeug'])
     self._state_processor = DbtProcessState('internal')
     # keep track of wheter we've already entered to decide if we should
     # be actually pushing. This allows us to log in main() and also
     # support entering dbt execution via handle_and_check.
     self._stack_depth = 0
     super().__init__([
         self._null_handler,
         self._output_handler,
         self._file_handler,
         self._relevel_processor,
         self._state_processor,
     ])
Exemplo n.º 25
0
def test_blackhole_setting(activation_strategy):
    null_handler = logbook.NullHandler()
    heavy_init = logbook.LogRecord.heavy_init
    with activation_strategy(null_handler):

        def new_heavy_init(self):
            raise RuntimeError('should not be triggered')

        logbook.LogRecord.heavy_init = new_heavy_init
        try:
            with activation_strategy(null_handler):
                logbook.warn('Awesome')
        finally:
            logbook.LogRecord.heavy_init = heavy_init

    null_handler.bubble = True
    with capturing_stderr_context() as captured:
        logbook.warning('Not a blockhole')
        assert captured.getvalue() != ''
Exemplo n.º 26
0
def main(args):
    signal.signal(signal.SIGHUP, politwoops.utils.restart_process)

    log_handler = politwoops.utils.configure_log_handler(_script_, args.loglevel, args.output)
    with logbook.NullHandler():
        with log_handler.applicationbound():
            try:
                log.notice("Log level {0}".format(log_handler.level_name))
                ensure_phantomjs_is_runnable()

                with politwoops.utils.Heart() as heart:
                    politwoops.utils.start_watchdog_thread(heart)
                    worker = TweetEntityWorker(heart)
                    if args.restart:
                        politwoops.utils.run_with_restart(worker.run)
                    else:
                        worker.run()
            except KeyboardInterrupt:
                log.notice("Killed by CTRL-C")
Exemplo n.º 27
0
def connect(login):
    '''Connect to IMAP server with gmvault credential'''
    logbook.NullHandler().push_application()
    credential = credential_utils.CredentialHelper.get_credential({
        'email':
        login,
        'passwd':
        'not_seen',
        'oauth':
        'empty'
    })

    imap = GIMAPFetcher('imap.gmail.com',
                        993,
                        login,
                        credential,
                        readonly_folder=True)
    imap.connect(go_to_all_folder=True)
    return imap
Exemplo n.º 28
0
    def handle(self, *args, **options):
        log_handler = configure_log_handler('mirrormirror',
                                            options['loglevel'],
                                            options['output'])
        with logbook.NullHandler():
            with log_handler.applicationbound():

                local_timezone = pytz.timezone(settings.TIME_ZONE)
                now = pytz.datetime.datetime.now(tz=local_timezone)

                if ElectionMirror.objects.count() == 0:
                    urls = ElectionUrl.objects.all()
                else:
                    # URLs that have never been mirrored
                    urls = ElectionUrl.objects.filter(
                        mirrors__isnull=True)[:settings.MIRROR_BATCH_SIZE]
                    if not urls:
                        mirror_timestamps = list(
                            ElectionMirror.objects.values('election_url').
                            annotate(timestamp=Max('timestamp')))
                        mirror_timestamps.sort(key=itemgetter('timestamp'))
                        batch_urls = [
                            m['election_url'] for m in mirror_timestamps
                        ][:settings.MIRROR_BATCH_SIZE]
                        urls = ElectionUrl.objects.filter(pk__in=batch_urls)

                for url in urls:
                    previous = url.latest_mirror()
                    if previous:
                        # Don't mirror URLs too often
                        since_last = now - previous.timestamp
                        if since_last < settings.MIRROR_WAIT:
                            wait = math.floor((settings.MIRROR_WAIT -
                                               since_last).total_seconds())
                            log.notice(
                                "Waiting {0} seconds before mirroring again: {1}"
                                .format(url.url, wait))
                            time.sleep(wait)

                    mirror_url(url)

                restart_process()
Exemplo n.º 29
0
def get_nestedlog(level='DEBUG', filename='quantrade.log', uri=None):
    # Default uri: tcp://127.0.0.1:5540
    if uri is not None:
        log_setup = NestedSetup([
            ZeroMQHandler(uri),
        ])
    else:
        log_setup = NestedSetup([
            logbook.NullHandler(level=logbook.DEBUG, bubble=True),
            logbook.StreamHandler(sys.stdout,
                                  level=logbook.INFO,
                                  format_string=log_format),
            logbook.StreamHandler(sys.stderr,
                                  level=logbook.ERROR,
                                  format_string=log_format),
            logbook.FileHandler('{}/{}'.format(log_destination, filename),
                                level=level),
        ])

    return log_setup
Exemplo n.º 30
0
 def setup_cli_app_handler(cls, activate_log_file=False, console_level= 'CRITICAL', file_path=DEFAULT_LOG, log_file_level = 'DEBUG'):
     """
        Setup a handler for communicating with the user and still log everything in a logfile
     """
     null_handler      = logbook.NullHandler()
     
     out_handler       = StdoutHandler(format_string='{record.message}', level = console_level , bubble = False)
     
     # first stack null handler to not have anything else logged 
     null_handler.push_application()
     
     # add output Handler
     out_handler.push_application() 
     
     # add file Handler
     if activate_log_file:
         
         file_handler      = logbook.FileHandler(file_path, mode='w', format_string='[{record.time:%Y-%m-%d %H:%M}]:{record.level_name}:{record.channel}:{record.message}', level = log_file_level, bubble = True)
         
         file_handler.push_application()