예제 #1
0
def setup_logging():
    os.makedirs(LOG_DIR, exist_ok=True)

    format_string = "[{record.time:%H:%M:%S}] {record.level_name}: {record.channel}:{record.extra[strat_id]} {record.message}"

    handlers = [logbook.NullHandler()]

    if CLOUD_LOGGING:
        cloud_handler = GoogleCloudHandler(level="DEBUG",
                                           bubble=True,
                                           format_string=format_string)
        handlers.append(cloud_handler)

    file_handler = logbook.RotatingFileHandler(APP_LOG,
                                               level="DEBUG",
                                               bubble=True,
                                               format_string=format_string)

    stream_handler = logbook.StreamHandler(sys.stdout,
                                           level="INFO",
                                           bubble=True)
    stream_handler.format_string = format_string

    error_file_handler = logbook.RotatingFileHandler(ERROR_LOG,
                                                     level="ERROR",
                                                     bubble=True)
    error_file_handler.format_string = """
----------------------------------------------------------------------------------
{record.time:%H:%M:%S} KRYPTOS:{record.channel}:{record.level_name}:

{record.message}

Module: {record.module}:{record.lineno}
Function: {record.func_name}

Channel: {record.channel}
Trade Date: {record.extra[strat_date]}

Exception: {record.formatted_exception}

----------------------------------------------------------------------------------
"""

    handlers.extend([file_handler, stream_handler, error_file_handler])

    setup = logbook.NestedSetup(handlers)

    setup.push_thread()
예제 #2
0
    def _setup_file_handler(self):
        """
        This helper method sets up a rotating file logger based on the base
        path supplied in the configuration. No file logging is performed if
        the base path is not supplied
        """
        log_path_base = self.app.config.get('LOGGER_PATH_BASE')
        if log_path_base is None:
            return

        log_path_base = os.path.expanduser(log_path_base)
        if not os.path.exists(log_path_base):
            os.makedirs(log_path_base)

        log_file_name = self.app.config.get('LOGGER_FILE_NAME')
        if log_file_name is None:
            log_file_name = os.path.basename(sys.argv[0])

        level_name = self.app.config.get('LOGGER_LEVEL', 'INFO')

        logbook.RotatingFileHandler(
            os.path.join(log_path_base, log_file_name + '.log'),
            level=logbook.lookup_level(level_name),
            bubble=True,
            format_string=self.app.config.get('LOGGER_FORMAT'),
        ).push_application()
예제 #3
0
def test_rotating_file_handler(logfile, activation_strategy, logger):
    basename = os.path.basename(logfile)
    handler = logbook.RotatingFileHandler(
        logfile,
        max_size=2048,
        backup_count=3,
    )
    handler.format_string = '{record.message}'
    with activation_strategy(handler):
        for c, x in zip(LETTERS, xrange(32)):
            logger.warn(c * 256)
    files = [
        x for x in os.listdir(os.path.dirname(logfile))
        if x.startswith(basename)
    ]
    files.sort()

    assert files == [
        basename, basename + '.1', basename + '.2', basename + '.3'
    ]
    with open(logfile) as f:
        assert f.readline().rstrip() == ('C' * 256)
        assert f.readline().rstrip() == ('D' * 256)
        assert f.readline().rstrip() == ('E' * 256)
        assert f.readline().rstrip() == ('F' * 256)
예제 #4
0
def init_logging_file(filename, log_level='notset', rotate_log=True, rotate_max_size=10485760):
    log_dir = os.path.dirname(filename)
    if not os.path.exists(log_dir):
        os.makedirs(log_dir)
    if rotate_log is True:
        handler = logbook.RotatingFileHandler(filename, level=figure_out_log_level(log_level),
                                              max_size=int(rotate_max_size), bubble=True)
    else:
        handler = logbook.FileHandler(filename, level=figure_out_log_level(log_level), bubble=True)
    handler.push_application()
    get_logger().debug("file based logging initialized in directory: " + log_dir)
예제 #5
0
def _get_log_handlers():
    """
    Initializes all relevant log handlers.

    :return: A list of log handlers.
    """
    return [
        logbook.NullHandler(),
        logbook.StreamHandler(sys.stdout, level=logbook.DEBUG, bubble=True),
        logbook.RotatingFileHandler(config.LOGFILE, level=logbook.DEBUG, max_size=5 * 1024 * 1024, bubble=True)
    ]
예제 #6
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--hostname', default='0.0.0.0')
    parser.add_argument('--port', default=8080)
    parser.add_argument('--database', default='rss.db')
    parser.add_argument('--logfile', default='webapp.log')
    parser.add_argument('-d', '--debug', action='store_true')
    args = parser.parse_args()

    log_handler = logbook.RotatingFileHandler(
        args.logfile, level=logbook.DEBUG if args.debug else logbook.INFO)
    with log_handler.applicationbound():
        webapp = WebApp(args.hostname, args.port, args.database, args.debug)
        webapp.start()
예제 #7
0
def get_logger(perform_rollover=False):
    """
    Push to the app stack the needed handlers and return a Logger object.

    :rtype: logbook.Logger
    """
    # NOTE: make sure that the folder exists, the logger is created before
    # saving settings on the first run.
    _base = os.path.join(get_path_prefix(), "leap")
    mkdir_p(_base)
    bitmask_log_file = os.path.join(_base, 'bitmask.log')

    level = logbook.WARNING
    if flags.DEBUG:
        level = logbook.NOTSET

    # This handler consumes logs not handled by the others
    null_handler = logbook.NullHandler()
    null_handler.push_application()

    silencer = SelectiveSilencerFilter()

    zmq_handler = SafeZMQHandler('tcp://127.0.0.1:5000', multi=True,
                                 level=level, filter=silencer.filter)
    zmq_handler.push_application()

    file_handler = logbook.RotatingFileHandler(
        bitmask_log_file, format_string=LOG_FORMAT, bubble=True,
        filter=silencer.filter, max_size=sys.maxint)

    if perform_rollover:
        file_handler.perform_rollover()

    file_handler.push_application()

    # don't use simple stream, go for colored log handler instead
    # stream_handler = logbook.StreamHandler(sys.stdout,
    #                                        format_string=LOG_FORMAT,
    #                                        bubble=True)
    # stream_handler.push_application()
    stream_handler = ColorizedStderrHandler(
        level=level, format_string=LOG_FORMAT, bubble=True,
        filter=silencer.filter)
    stream_handler.push_application()

    logger = logbook.Logger('leap')

    return logger
예제 #8
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('--loop', action='store_true')
    parser.add_argument('--interval', default=60 * 30, type=float)
    parser.add_argument('--database', default='rss.db')
    parser.add_argument('--logfile', default='crawler.log')
    parser.add_argument('-d', '--debug', action='store_true')
    args = parser.parse_args()

    log_handler = logbook.RotatingFileHandler(
        args.logfile, level=logbook.DEBUG if args.debug else logbook.INFO)
    with log_handler.applicationbound():
        if args.loop:
            crawler = LoopCrawler(args.interval, args.database, args.debug)
        else:
            crawler = Crawler(args.database, args.debug)
        crawler.start()
예제 #9
0
def _get_log_handlers():
    """
    Initializes all relevant log handlers.

    :return: A list of log handlers.
    """
    handlers = [
        logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True),
    ]
    if config.LOG_FILE_PATH:
        handlers.append(
            logbook.RotatingFileHandler(config.LOG_FILE_PATH,
                                        level=logbook.DEBUG,
                                        backup_count=1,
                                        max_size=5 * 1024 * 1024,
                                        bubble=True))
    return handlers
예제 #10
0
    def test_rotating_file_handler(self):
        basename = os.path.join(self.dirname, 'rot.log')
        handler = logbook.RotatingFileHandler(basename, max_size=2048,
                                              backup_count=3,
                                              )
        handler.format_string = '{record.message}'
        with handler:
            for c, x in izip(LETTERS, xrange(32)):
                self.log.warn(c * 256)
        files = [x for x in os.listdir(self.dirname)
                 if x.startswith('rot.log')]
        files.sort()

        self.assertEqual(files, ['rot.log', 'rot.log.1', 'rot.log.2',
                                 'rot.log.3'])
        with open(basename) as f:
            self.assertEqual(f.readline().rstrip(), 'C' * 256)
            self.assertEqual(f.readline().rstrip(), 'D' * 256)
            self.assertEqual(f.readline().rstrip(), 'E' * 256)
            self.assertEqual(f.readline().rstrip(), 'F' * 256)
예제 #11
0
def _get_log_handlers(logs_directory_path=None):
    """
    Returns a list of the nested log handlers setup.
    """
    handlers_list = list()
    handlers_list.append(logbook.NullHandler())
    # Add the rotating file handler, if a logs directory path was supplied.
    if logs_directory_path is not None:
        if not os.path.exists(logs_directory_path):
            os.makedirs(logs_directory_path)
        handlers_list.append(
            logbook.RotatingFileHandler(os.path.join(logs_directory_path,
                                                     LOG_FILE_NAME),
                                        max_size=1024 * 1024,
                                        backup_count=5,
                                        bubble=True))
    handlers_list.append(
        logbook.StreamHandler(sys.stdout, level='DEBUG', bubble=True))
    handlers_list.append(
        logbook.StreamHandler(sys.stderr, level='ERROR', bubble=True))
    return handlers_list
예제 #12
0
def post_fork(server, worker):
    server.log.info('Worker spawned (pid: %s)', worker.pid)

    logging_rotating_file_handler = logging.handlers.RotatingFileHandler(
        config.LOG_FILE_PATH.replace('.log', f'.{worker.pid}.flask.log'),
        maxBytes=5 * 1024 * 1024,
        backupCount=5)

    root_logger = logging.getLogger()
    root_logger.addHandler(logging_rotating_file_handler)
    root_logger.setLevel(logging.CRITICAL)

    logger_setup = logbook.NestedSetup([
        logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True),
        logbook.RotatingFileHandler(config.LOG_FILE_PATH.replace(
            '.log', f'.{worker.pid}.log'),
                                    level=logbook.INFO,
                                    max_size=5 * 1024 * 1024,
                                    bubble=True)
    ])
    logger_setup.push_application()
예제 #13
0
def get_logger(perform_rollover=False):
    """
    Push to the app stack the needed handlers and return a Logger object.

    :rtype: logbook.Logger
    """
    # NOTE: make sure that the folder exists, the logger is created before
    # saving settings on the first run.
    _base = os.path.join(get_path_prefix(), "leap")
    mkdir_p(_base)
    bitmask_log_file = os.path.join(_base, 'bitmask.log')

    # level = logbook.WARNING
    # if flags.DEBUG:
    #     level = logbook.NOTSET
    level = logbook.NOTSET

    # This handler consumes logs not handled by the others
    null_handler = logbook.NullHandler()
    null_handler.push_application()

    file_handler = logbook.RotatingFileHandler(bitmask_log_file,
                                               format_string=LOG_FORMAT,
                                               bubble=True,
                                               max_size=sys.maxint)

    if perform_rollover:
        file_handler.perform_rollover()

    file_handler.push_application()

    stream_handler = ColorizedStderrHandler(level=level,
                                            format_string=LOG_FORMAT,
                                            bubble=True)
    stream_handler.push_application()

    logger = logbook.Logger('leap')

    return logger
예제 #14
0
import sys
import logbook
from logbook import Logger

from pyexpander import config


file_handler = logbook.RotatingFileHandler(config.LOGFILE, level=logbook.DEBUG)
console_handler = logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True)

file_handler.push_application()
console_handler.push_application()


def get_logger(name):
    """
    Return the logger for the given name.

    :param name: The name of the logger.
    :return: A logbook Logger.
    """
    logger = Logger(name, level=logbook.DEBUG)
    return logger
예제 #15
0
# -*- coding: utf-8 -*-
""" local logger """

import logbook

from conf import Conf

conf = Conf()

logbook.set_datetime_format(conf.datetime_format)

log = logbook.RotatingFileHandler(conf.logfile, max_size = conf.max_size, \
                                    backup_count = conf.backup_count,\
                                    level = conf.level, \
                                    bubble=True
                                    )
#print dir(log)

log.format_string = "[{record.time:%Y-%m-%d %H:%M:%S.%f}][{record.thread},{record.module},{record.func_name},{record.lineno}] {record.level_name}: {record.channel}: {record.message}"

log.default_format_string = "[{record.time:%Y-%m-%d %H:%M:%S.%f}][{record.thread},{record.module},{record.func_name},{record.lineno}] {record.level_name}: {record.channel}: {record.message}"

log.push_application()


def get_logger(logger_name):
    """ get logger """

    logger = logbook.Logger(logger_name)
    #print dir(logger)
    return logger
예제 #16
0
def main():
    with spin_consumers(), \
         logbook.RotatingFileHandler(DEFAULT_LOG_PATH).applicationbound(), \
         logbook.StreamHandler(sys.stdout).applicationbound():
        app.run()
예제 #17
0
"""
import logbook

import socket
import gevent

import toml

logbook.set_datetime_format("local")

logger = logbook.Logger('hekac')

#log = logbook.FileHandler('heka_tcp.log')

log = logbook.RotatingFileHandler('heka_tcp.log', max_size=1024, backup_count=5)

log.push_application()


def get_conf(conf_fn):
    
    """ get configuration from .toml"""
    
    
    with open(conf_fn) as conf_fh:
        
        conf = toml.loads(conf_fh.read())
        
        #print(config)
        return conf
import api
import logbook
from webbrowser import open as web_open

logbook.RotatingFileHandler('my_search_api',
                            level=logbook.TRACE).push_application()
logger = logbook.Logger("Main")


def main():
    logger.info('Taking user input...')
    user_input = input('Enter search criteria: ')
    logger.info(f'Searching for {user_input}')
    articles = api.search(user_input)
    print(f'Found {len(articles)} articles')
    id_to_article_dict = {}
    for r in articles:
        id_to_article_dict[r.id] = r
        print(f"[{r.id}] {r.title}: {r.category}")

    print("*" * 20)

    user_input = input(
        'For which article do you need more information [q for quit]: ')
    if user_input != 'q':
        web_open("http://talkpython.fm" +
                 id_to_article_dict[int(user_input)].url,
                 new=2)
    logger.info('End of execution')

예제 #19
0
파일: main.py 프로젝트: zig2015/dbop
from tornado import tcpserver
from tornado import gen

from configs import db as m_configs_db
from configs import logs as m_configs_logs
from configs.codec import *

import drivers
from drivers import redis as m_d_redis
from drivers import mysql as m_d_mysql

logbook.set_datetime_format("local")
logbook.RotatingFileHandler(
    m_configs_logs.logpath,
    level=m_configs_logs.loglevel,
    backup_count=100,
    max_size=1024 * 1024 * 10,
    format_string=
    '[{record.time}] {record.level_name} {record.filename} {record.lineno}: {record.message}'
).push_application()

LOG = logbook.Logger("***Main***")

__id__ = 0
__registered_dbs__ = {
    # handle id: {
    #   dbname: db name
    #   tables: {
    #       tablename: table name
    #       columns: {
    #           colname: (bare/hash/list/set/sset, pos, int/text)
    #       }
예제 #20
0
from flask import jsonify
from gevent.pywsgi import LoggingLogAdapter
from gevent.pywsgi import WSGIServer
from sqlalchemy_utils import create_database
from sqlalchemy_utils import database_exists

import config
from api import users_blueprint
from app import app
from app import db

# Create log directory
pathlib.Path(config.LOG_PATH).mkdir(parents=True, exist_ok=True)

logbook.StreamHandler(sys.stdout).push_application()
logbook.RotatingFileHandler(config.LOGFILE, max_size=52428800,
                            bubble=True).push_application()
logbook.compat.redirect_logging(set_root_logger_level=False)
logging.root.setLevel(config.LOGLEVEL)

logger = logbook.Logger('[SERVER]', getattr(logbook, config.LOGLEVEL))

wsgi_logger = LoggingLogAdapter(logging.getLogger('wsgi'), level=logging.DEBUG)
wsgi_server = WSGIServer((config.API_IP, config.API_PORT),
                         app,
                         log=wsgi_logger,
                         error_log=wsgi_logger)

app.register_blueprint(users_blueprint)


@app.errorhandler(404)
예제 #21
0
import logbook

h1 = logbook.StderrHandler(bubble=True)

h1.push_application()

h2 = logbook.RotatingFileHandler("a.log", bubble=True)

h2.push_application()

logger = logbook.Logger("cli")

logger.debug("info")
예제 #22
0
def get_logger(setting_getter, name, fail_to_local=False, filter=None):
    global got_logger
    if got_logger:
        # Yes, this means that if you try to change your logging within an
        # application, it won't work. This is intentional. You shouldn't do
        # that.
        return got_logger

    if filter:

        def log_filter(r, h):
            if server_pipe_log_filter_re.search(r.message):
                return False
            return filter(r, h)
    else:

        def log_filter(r, h):
            return not server_pipe_log_filter_re.search(r.message)

    logger_name = 'penguindome-' + name
    file_safe_logger_name = logger_name.replace(os.sep, '_')

    logger = logbook.Logger(logger_name)

    internal_log_dir = os.path.join(var_dir, 'log')
    internal_log_file = os.path.join(internal_log_dir,
                                     file_safe_logger_name + '.log')

    os.makedirs(internal_log_dir, 0o0700, exist_ok=True)

    # We always do local debug logging, regardless of whether we're also
    # logging elsewhere.
    logbook.RotatingFileHandler(internal_log_file,
                                bubble=True,
                                filter=log_filter).push_application()

    handler_name = setting_getter('logging:handler')
    if handler_name:
        handler_name = handler_name.lower()
        handler_name += 'handler'
        handler_name = next(d for d in dir(logbook)
                            if d.lower() == handler_name)
        handler = logbook.__dict__[handler_name]
        kwargs = {'bubble': True, 'filter': log_filter}
        level = setting_getter('logging:level')
        kwargs['level'] = logbook.__dict__[level.upper()]
        if handler_name == 'SyslogHandler':
            kwargs['facility'] = setting_getter('logging:syslog:facility')
            hostname = setting_getter('logging:syslog:host')
            if hostname:
                port = setting_getter('logging:syslog:port')
                try:
                    addrinfo = socket.getaddrinfo(hostname, port,
                                                  socket.AF_INET,
                                                  socket.SOCK_STREAM)[0]
                except:
                    if not fail_to_local:
                        raise
                    logger.warn(
                        'Failed to resolve {}:{}, falling back to '
                        'local-only logging', hostname, port)
                    handler = None
                else:
                    kwargs['socktype'] = addrinfo[1]
                    kwargs['address'] = addrinfo[4]

        if handler:
            if fail_to_local:
                try:
                    with ThreadingTimeout(5, swallow_exc=False):
                        handler = handler(**kwargs)
                except:
                    logger.warn(
                        'Failed to create {}, falling back to '
                        'local-only logging', handler_name)
                else:
                    handler.push_application()
            else:
                handler(**kwargs).push_application()

    logbook.compat.redirect_logging()
    got_logger = logger
    return got_logger
예제 #23
0
import feedparser
import logbook

logbook.RotatingFileHandler('parser.log').push_application()
logger = logbook.Logger('feed_parser')


def print_feed(feed, selected_tag):
    logger.debug("Trying to print 'published', 'title' and 'link'...")
    selected_feeds = [
        entry for entry in feed.entries if selected_tag in entry.tags[0].term
    ]
    for entry in selected_feeds:
        try:
            print(
                f'[{entry.tags[0].term}] {entry.published} - {entry.title}: {entry.link}'
            )
        except AttributeError as error:
            logger.error(
                f'Could not print some of the attributes of the feed.')
            print(f'Cannot find some of the attributes: {error}')


def main():
    logger.info('Starting the application...')
    selected_tag = input('Preferred tag name:')
    # day1_file = 'my_file_day1.xml'
    # feed = feedparser.parse(day1_file)
    day2_file = 'my_file_day2.xml'
    feed = feedparser.parse(day2_file)
    print_feed(feed, selected_tag)
예제 #24
0
def main():  # pragma: no cover
    config = SafeConfigParser()
    dirs = ('.', '/etc', '/usr/local/etc')
    if not config.read([os.path.join(dir, config_file) for dir in dirs]):
        sys.exit('Could not find {} in {}'.format(config_file, dirs))

    try:
        logfile = config.get('logging', 'file')
        rotating = config.getboolean('logging', 'rotate', fallback=False)
        if rotating:
            max_size = config.get('logging', 'max_size', fallback=1048576)
            backup_count = config.get('logging', 'backup_count', fallback=5)
            handler = logbook.RotatingFileHandler(logfile,
                                                  max_size=max_size,
                                                  backup_count=backup_count)
        else:
            handler = logbook.FileHandler(logfile)
        handler.push_application()
    except Exception:
        logbook.StderrHandler().push_application()

    try:
        kwargs = dict(config.items('mongodb'))
    except NoSectionError:
        sys.exit('No "mongodb" section in config file')
    args = []
    for arg in ('hosts', 'database', 'username', 'password'):
        try:
            args.append(config.get('mongodb', arg))
        except NoOptionError:
            sys.exit(
                'No "{}" setting in "mongodb" section of config file'.format(
                    arg))
        kwargs.pop(arg)
    args[0] = [s.strip() for s in args[0].split(',')]
    store = MongoStore(*args, **kwargs)

    try:
        email_sender = config.get('email', 'sender')
    except NoSectionError:
        sys.exit('No "email" section in config file')
    except NoOptionError:
        sys.exit('No "sender" setting in "email" section of config file')

    business_logic = BusinessLogic(store, email_sender)

    try:
        listen_port = int(config.get('wsgi', 'port'))
        log.info('Binding to port {}'.format(listen_port))
    except Exception:
        listen_port = 80
        log.info('Binding to default port {}'.format(listen_port))

    try:
        auth_key = config.get('wsgi', 'auth_key')
        log.info('Server authentication enabled')
    except Exception:
        log.warning('Server authentication DISABLED')
        auth_key = None

    httpd = make_server('',
                        listen_port,
                        partial(application, business_logic, auth_key),
                        handler_class=LogbookWSGIRequestHandler)
    business_logic.schedule_next_deadline()
    httpd.serve_forever()
예제 #25
0
import struct

import logbook
#import gevent

from time import strftime, localtime

from utils import get_conf

logbook.set_datetime_format("local")

logger = logbook.Logger('AKSrv')

#log = logbook.FileHandler('heka_tcp.log')

log = logbook.RotatingFileHandler('logs/ak_server.log', max_size=102400, backup_count=5, \
                    bubble=True)

log.push_application()

# static
STX = 0x02
ETX = 0x03
BLANK = 0x20  #


def pack(cmd):
    """ pack """

    clen = len(cmd)

    dt = strftime("%Y-%m-%d %H:%M:%S", localtime())
예제 #26
0
import logbook
import api

logbook.RotatingFileHandler('sportal_scraper.log').push_application()
logger = logbook.Logger('Main')


def main():
    logger.info('Starting application...')
    api.download_site('https://www.sportal.bg')
    top_news = api.get_top_news()
    api.look_for_pattern(top_news, 'Мадрид')
    api.look_for_pattern(top_news, 'Барселона')

if __name__ == '__main__':
    main()
예제 #27
0
import re
from pprint import pprint
import logbook

import requests
import bs4

logbook.RotatingFileHandler('scrapper.log',
                            level=logbook.TRACE).push_application()
logger = logbook.Logger('scraper')


def get_site(URL):
    logger.debug(f'Getting url {URL}')
    response = requests.get(URL)
    response.raise_for_status()
    logger.debug(f'Successfully retrieved url {URL}')
    return response.text


def main():
    logger.info('Start scraping...')
    site = get_site('http://codechalleng.es/challenges/')
    soup = bs4.BeautifulSoup(site, 'html.parser')

    css_class = '.challengeTitle'
    logger.debug('Looking for class {css_class}')
    challenges = soup.select(css_class)

    challenges_names = []
    logger.info('Iterating over challenges...')