Exemplo n.º 1
0
def setup_logger():
    """
    Install a logger into the global namespace
    """

    global _logger
    global _has_logbook

    if _has_logbook:
        _logger = Logger('UoM_WIFI')
        try:
            log_path = join(sys.argv[1], '%s.log' % USERNAME)
        except IndexError:
            log_path = join(split(abspath(__file__))[0], '%s.log' % USERNAME)

        # because the log file is owned by root, if this program is ran by a
        # regular user, we need to prevent it from crashing by writing to a file
        # owned by root
        try:
            # create the handler
            log_handler = RotatingFileHandler(log_path)

            # push the context object to the application stack
            log_handler.push_application()
        except IOError:
            _has_logbook = False
Exemplo n.º 2
0
    def _get_event_handler(self):
        """
        Retrieves the correct event handler. Returns a Stream Handler
        if the event should write to STDOUT, otherwise it will return
        a ready RotatingFileHandler.

        Both subclasses inherit from the StreamHandler base class.

        :return: Event handler
        :rtype: StreamHandler
        """
        if self.filename.upper() == 'STDOUT':
            return StreamHandler(sys.stdout)
        else:
            fs.create_dir(os.path.dirname(self.filename))
            previous_log_file_exists = os.path.exists(self.filename)

            event_handler = RotatingFileHandler(
                filename=self.filename,
                max_size=Configuration['max_eventlog_file_fize'],
                backup_count=Configuration['max_eventlog_file_backups'])
            if previous_log_file_exists:
                event_handler.perform_rollover(
                )  # force starting a new eventlog file on application startup

            return event_handler
Exemplo n.º 3
0
    def _get_event_handler(self):
        """
        Retrieves the correct event handler. Returns a Stream Handler
        if the event should write to STDOUT, otherwise it will return
        a ready RotatingFileHandler.

        Both subclasses inherit from the StreamHandler base class.

        :return: Event handler
        :rtype: StreamHandler
        """
        if self.filename.upper() == "STDOUT":
            return StreamHandler(sys.stdout)
        else:
            fs.create_dir(os.path.dirname(self.filename))
            previous_log_file_exists = os.path.exists(self.filename)

            event_handler = RotatingFileHandler(
                filename=self.filename,
                max_size=Configuration["max_eventlog_file_fize"],
                backup_count=Configuration["max_eventlog_file_backups"],
            )
            if previous_log_file_exists:
                event_handler.perform_rollover()  # force starting a new eventlog file on application startup

            return event_handler
Exemplo n.º 4
0
def setup_logger():
    """
    Install a logger into the global namespace
    """

    global _logger
    global _has_logbook

    if _has_logbook:
        _logger = Logger('UoM_WIFI')
        try:
            log_path = join(sys.argv[1], '%s.log' % USERNAME)
        except IndexError:
            log_path = join(split(abspath(__file__))[0], '%s.log' % USERNAME)

        # because the log file is owned by root, if this program is ran by a
        # regular user, we need to prevent it from crashing by writing to a file
        # owned by root
        try:
            # create the handler
            log_handler = RotatingFileHandler(log_path)

            # push the context object to the application stack
            log_handler.push_application()
        except IOError:
            _has_logbook = False
Exemplo n.º 5
0
def main(botcls, config):
    if 'logfile' in config.data:
        handler = RotatingFileHandler(os.path.expanduser(config.logfile))
    else:
        handler = StreamHandler(sys.stdout)

    handler.push_application()

    bot = botcls(config)
    bot.run()
Exemplo n.º 6
0
def setup_logger(config):
    if config.has_option(SLACK_SECTION_NAME, "log_output"):
        output_path = config.get(SLACK_SECTION_NAME, "log_output")
        dir_path, file_name = os.path.split(output_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        file_handler = RotatingFileHandler(output_path, backup_count=5)
        file_handler.push_application()
    else:
        stream_handler = StreamHandler(sys.stdout)
        stream_handler.push_application()
Exemplo n.º 7
0
def setup_logger(config):
    if config.has_option(SLACK_SECTION_NAME, "log_output"):
        output_path = config.get(SLACK_SECTION_NAME, "log_output")
        dir_path, file_name = os.path.split(output_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        file_handler = RotatingFileHandler(output_path, backup_count=5)
        file_handler.push_application()
    else:
        stream_handler = StreamHandler(sys.stdout)
        stream_handler.push_application()
Exemplo n.º 8
0
    def __getattr__(self, level):
        if level not in self.levels:
            if not os.path.isdir(self.log_dir):
                os.makedirs(self.log_dir)

            log_file = os.path.join(self.log_dir,
                                    '{}.{}'.format(self.name, level))
            file_handler = RotatingFileHandler(filename=log_file,
                                               level=getattr(
                                                   logbook, level.upper()),
                                               max_size=1000000000)

            file_handler.formatter = self.user_handler_log_formatter
            self.logger.handlers.append(file_handler)

        return getattr(self.logger, level)
Exemplo n.º 9
0
    def _setup_file_log(self):
        """Add a file log handler."""
        file_path = os.path.expandvars(
            fs.abspath(self.app.config.get('log', 'file')))
        log_dir = os.path.dirname(file_path)
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        if logbook.lookup_level(self.get_level()) == logbook.DEBUG:
            fmt_string = self._meta.debug_format
        else:
            fmt_string = self._meta.file_format

        if self.app.config.get('log', 'rotate'):
            from logbook import RotatingFileHandler
            file_handler = RotatingFileHandler(
                file_path,
                max_size=int(self.app.config.get('log', 'max_bytes')),
                backup_count=int(self.app.config.get('log', 'max_files')),
                format_string=fmt_string,
                level=logbook.lookup_level(self.get_level()),
                bubble=True,
            )
        else:
            from logbook import FileHandler
            file_handler = FileHandler(
                file_path,
                format_string=fmt_string,
                level=logbook.lookup_level(self.get_level()),
                bubble=True,
            )

        self._file_handler = file_handler
        self.backend.handlers.append(file_handler)
Exemplo n.º 10
0
    def __init__(self,
                 log_folder,
                 log_name=None,
                 max_size=100,
                 backup_count=10):
        """
        log_folder:     日志文件夹
        log_name:       日志文件名称
        max_size:       单个日志文件的大小,单位 MB
        backup_count:   总备份数量,默认为 5
        log_path:       日志文件全路径

        注意:所有 handler 中的 bubble 表示记录是否给下个 handler 用。
        """
        # 设置日志信息时间的格式
        set_datetime_format('local')

        self.log_folder = log_folder
        self.log_name = str(log_name) if log_name else 'pms'
        self.log_path = self.__file_path()

        # 检查存放日志的文件夹是否存在,不存在则创建
        self.__check_path()

        self.log_ = Logger(self.log_name.split('.')[0])
        self.log_.handlers.append(
            RotatingFileHandler(filename=self.log_path,
                                mode='a',
                                level='INFO',
                                max_size=max_size * 1024**2,
                                backup_count=backup_count,
                                bubble=True))
        self.log_.handlers.append(ColorizedStderrHandler(bubble=False))
Exemplo n.º 11
0
def setup_logging(config):
    log_file = os.path.join(config['daemon']['app_path'],
                            config['daemon']['log']['file'])
    # if running in debug mode, disable log rotation because it makes
    # things confusing
    if config['daemon']['debug']:
        log_handler = FileHandler(log_file)
    else:
        max_size = config['daemon']['log']['rotate_size']
        backup_count = config['daemon']['log']['rotate_count']
        log_handler = RotatingFileHandler(log_file,
                                          max_size=max_size,
                                          backup_count=backup_count)
    log_handler.push_application()
    log = Logger('edgy_crits')
    return log
Exemplo n.º 12
0
def setup_logger(conf):
    """ setup logger for app

    take conf, and set it.
    :param conf: a dict of logbook conf.

    """
    level_dict = {
        'notset': 9,
        'debug': 10,
        'info': 11,
        'warning': 12,
        'error': 13,
        'critical': 14
    }
    debug = conf['debug']
    logfile = conf['logfile']
    backup_count = conf['backup_count']
    max_size = conf['max_size']
    format_string = conf['format_string']
    level = level_dict[conf['level']]

    if debug:
        StreamHandler(sys.stdout, format_string=format_string,
                      level=level).push_application()
    else:

        full_log_path = os.path.abspath(logfile)
        dir_path = os.path.dirname(full_log_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        RotatingFileHandler(logfile,
                            mode='a',
                            encoding='utf-8',
                            level=level,
                            format_string=format_string,
                            delay=False,
                            max_size=max_size,
                            backup_count=backup_count,
                            filter=None,
                            bubble=True).push_application()
Exemplo n.º 13
0
def initialize(eventlog_file=None):
    """
    Initialize the analytics output. This will cause analytics events to be output to either a file or stdout.

    If this function is not called, analytics events will not be output. If it is called with a filename, the events
    will be output to that file. If it is called with 'STDOUT' or None, the events will be output to stdout.

    :param eventlog_file: The filename to output events to, 'STDOUT' to output to stdout, None to disable event logging
    :type eventlog_file: str | None
    """
    global _analytics_logger, _eventlog_file

    _eventlog_file = eventlog_file
    if not eventlog_file:
        _analytics_logger = None
        return

    if eventlog_file.upper() == 'STDOUT':
        event_handler = StreamHandler(sys.stdout)
    else:
        fs.create_dir(os.path.dirname(eventlog_file))
        previous_log_file_exists = os.path.exists(eventlog_file)

        event_handler = RotatingFileHandler(
            filename=eventlog_file,
            max_size=Configuration['max_eventlog_file_size'],
            backup_count=Configuration['max_eventlog_file_backups'],
        )
        if previous_log_file_exists:
            event_handler.perform_rollover(
            )  # force starting a new eventlog file on application startup

    event_handler.format_string = '{record.message}'  # only output raw log message -- no timestamp or log level
    handler = TaggingHandler(
        {'event': event_handler
         },  # enable logging to the event_handler with the event() method
        bubble=True,
    )
    handler.push_application()

    _analytics_logger = TaggingLogger('analytics', ['event'])
Exemplo n.º 14
0
def setup_log(conf):
    """ setup logger for app """
    debug = conf['debug']

    logfile = conf['logfile']
    backup_count = conf['backup_count']
    max_size = conf['max_size']
    format_string = conf['format_string']

    if debug:
        StreamHandler(sys.stdout, format_string=format_string).push_application()
    else:

        full_log_path = os.path.abspath(logfile)
        dir_path = os.path.dirname(full_log_path)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)

        RotatingFileHandler(logfile, mode='a', encoding='utf-8',
                            level=0, format_string=format_string, delay=False, max_size=max_size,
                            backup_count=backup_count, filter=None, bubble=True).push_application()
Exemplo n.º 15
0
def setup_logger(conf):
    """
    setup logbook
    :param conf:  toml[logging]
    :return: None
    """
    console = conf['console']  # console output
    console_level = conf['console_level']  # choose console log level to print
    file = conf['file']  # local log file output
    file_level = conf['file_level']  # choose log file level to save
    logfile = conf['log_file']  # local log file save position
    backup_count = conf['backup_count']  # count of local log files
    max_size = conf['max_size']  # size of each local log file
    format_string = conf['format_string']  # log message format
    # open console print
    if console:
        StreamHandler(sys.stdout,
                      level=console_level,
                      format_string=format_string,
                      bubble=True).push_application()
    # open local log file output
    if file:
        dir_path = os.path.dirname(logfile)
        if not os.path.exists(dir_path):
            os.makedirs(dir_path)
        RotatingFileHandler(logfile,
                            mode='a',
                            encoding='utf-8',
                            level=file_level,
                            format_string=format_string,
                            delay=False,
                            max_size=max_size,
                            backup_count=backup_count,
                            filter=None,
                            bubble=True).push_application()

    return None
Exemplo n.º 16
0
def log_setup():
    log = RotatingFileHandler(applicationconfig.LOG_PATH,
                              max_size=104857600,
                              backup_count=5)
    log.push_application()
Exemplo n.º 17
0
from configparser import ConfigParser
from time import sleep
import traceback

import tweepy
import ujson as json
from logbook import Logger, RotatingFileHandler
from redis import StrictRedis
from requests import ConnectionError, Timeout
from requests.packages.urllib3.exceptions import ProtocolError, ReadTimeoutError

from tasks_workers import find_keywords_and_groups

RotatingFileHandler("twitter.log", backup_count=5).push_application()
log = Logger("main")
log.info("Started")

redis = StrictRedis()


class StreamListener(tweepy.StreamListener):
    """Tweepy will continuously receive notices from Twitter and dispatches
    them to one of the event handlers.
    """
    def __init__(self, api):
        self.api = api

    def on_status(self, status):
        """Handle arrival of a new tweet."""
        j = filter_tweet(clean_tweet(status._json))
        redis.set("t:" + j["id_str"], json.dumps(j))
Exemplo n.º 18
0
Arquivo: log.py Projeto: mnms/LTCLI
stream_handler.push_application()

# for rolling file log
p = os.environ['FBPATH']
if not os.path.isdir(p):
    os.system('mkdir -p {}'.format(p))
file_path = os.path.expanduser(os.path.join(p, 'logs'))
if os.path.isdir(file_path):
    backup_count = 7
    max_size = 1024 * 1024 * 1024  # 1Gi
    file_level = DEBUG
    each_size = max_size / (backup_count + 1)
    filename = os.path.join(file_path, 'ltcli-rotate.log')
    rotating_file_handler = RotatingFileHandler(filename=filename,
                                                level=file_level,
                                                bubble=True,
                                                max_size=each_size,
                                                backup_count=backup_count)
    rotating_file_handler.format_string = formatter['file']
    rotating_file_handler.push_application()
    logger.debug('start logging on file: {}'.format(filename))
else:
    try:
        os.mkdir(file_path)
    except Exception:
        logger.error("CreateDirError: {}".format(file_path))
        msg = message.get('error_logging_in_file')
        logger.warning(msg)


def set_level(level):
Exemplo n.º 19
0
from hashlib import sha256
from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError
from logbook import Logger, RotatingFileHandler
from botocore.exceptions import ClientError
import os
import yaml
import pandas as pd
import pyarrow as pa
import pyarrow.parquet as pq
import s3fs
import sys

app_name = sys.argv[1]

RotatingFileHandler("{}_etl.log".format(app_name)).push_application()
log = Logger("{}_etl".format(app_name))

try:
    with open(
            os.path.join(os.path.dirname(__file__),
                         "{}_settings.yml".format(app_name))) as etl_settings:
        settings = yaml.safe_load(etl_settings)
except FileNotFoundError:
    log.error(
        "No settings file present. Please add {}_settings.yml and try again.".
        format(app_name))
    exit(1)

dest_file = "{}_user_map.parquet".format(app_name)
Exemplo n.º 20
0
        elapsed = end_time - start_time
        pom_seconds = elapsed.seconds

        info(
            "Ending session. Hit 'i' if this was an interrupt. Anything else to count this pom."
        )
        interrupt_prompt = readchar.readchar()
        if interrupt_prompt.lower() is not 'i':
            finished_one_pom = start_time + timedelta(minutes=pom_length)
            if datetime.now() > finished_one_pom:
                completed_poms = completed_poms + 1

        info(f"Pom seconds: {pom_seconds} Finished poms: {completed_poms}")

        Pom['end_time'] = str(end_time)
        Pom['elapsed'] = str(elapsed)
        Pom['completed'] = completed_poms

        Pomodoros.append(Pom)
        with open(poms_file, 'w') as pfile:
            json.dump(Pomodoros, pfile)


if __name__ == '__main__':
    RotatingFileHandler("Pompom.log").push_application()
    StreamHandler(sys.stderr, level='INFO', bubble=True).push_application()

    args = parse_arguments()
    main(args.short_break_time, args.long_break_time)
Exemplo n.º 21
0
 def __init__(self, log_file):
     super().__init__()
     self.handlers.append(RotatingFileHandler(log_file, bubble=True))
     self.handlers.append(StreamHandler(sys.stdout))
Exemplo n.º 22
0
datetime = datetime.now()
date_suffix = datetime.strftime('%Y%m%d')
dir_path = os.path.dirname(os.path.realpath(__file__))

# Read settings_file and load env
try:
    settings = json.load(open(os.path.join(dir_path,
                                           './settings.json')))  # noqa: WPS515
    load_envbash('/edx/app/edxapp/edxapp_env')
except IOError:
    sys.exit('[-] Failed to read settings file')

# Configure logbook logging
logger = RotatingFileHandler(
    settings['Logs']['logfile'],
    max_size=int(settings['Logs']['max_size']),
    backup_count=int(settings['Logs']['backup_count']),
    level=int(settings['Logs']['level']),
)
logger.push_application()
logger = Logger('mitx_etl')

# Set some needed variables
course_ids = []
exported_courses_folder = settings['Paths']['courses'] + date_suffix + '/'
daily_folder = settings['Paths']['csv_folder'] + date_suffix + '/'


def set_environment_variables():
    """Set some of the read settings as environment variables."""
    os.environ['AWS_ACCESS_KEY_ID'] = settings['AWS']['AWS_ACCESS_KEY_ID']
    os.environ['AWS_SECRET_ACCESS_KEY'] = settings['AWS'][
Exemplo n.º 23
0
import pywinauto
import os.path
import sys
from pprint import pprint, pformat
from logbook import Logger, RotatingFileHandler, StreamHandler

# print("./{}.log".format(os.path.basename(__file__)))
# 将日志输出到轮换日志中(设定&应用)
RotatingFileHandler("./{}.log".format(os.path.basename(__file__)),
                    max_size=10000).push_application()
# 将日志输出到sys.stdout
StreamHandler(sys.stdout).push_application()
log = Logger('test')

# log.info("xaaa")
# 新建一个对象
app = pywinauto.Application()
# 通过 find_window来查找窗口
handles = pywinauto.findwindows.find_windows(title_re=".+BitComet.+")
log.debug("匹配到主窗口的handle:{}".format(list(map(lambda x: hex(x), handles))))
handle = handles[0]
# 通过Application来新建对象,并连接这个句柄
# app = pywinauto.Application().connect(handle=handle)
# top_window = app.window()
childe_handle = pywinauto.findwindows.find_windows(title_re="splitter",
                                                   top_level_only=False,
                                                   parent=handle)
log.debug("方法一:匹配到子窗口的handle:{}".format(
    list(map(lambda x: hex(x), childe_handle))))

# 方法二
Exemplo n.º 24
0
def log_setup():
    log = RotatingFileHandler(applicationconfig.LOG_PATH, max_size=104857600, backup_count=5)
    log.push_application()
Exemplo n.º 25
0
from hashlib import sha256

import pandas as pd
import pyarrow as pa
import s3fs
import yaml
from botocore.exceptions import ClientError
from logbook import Logger, RotatingFileHandler
from pyarrow import parquet as pq

from sqlalchemy import create_engine
from sqlalchemy.exc import OperationalError

app_name = sys.argv[1]

RotatingFileHandler('{0}_etl.log'.format(app_name)).push_application()
log = Logger('{0}_etl'.format(app_name))

try:
    with open(
            os.path.join(os.path.dirname(__file__),
                         '{0}_settings.yml'.format(app_name))) as etl_settings:
        settings = yaml.safe_load(etl_settings)
except FileNotFoundError:
    log.error(
        'No settings file present. Please add {0}_settings.yml and try again.'.
        format(app_name))
    sys.exit(1)

dest_file = '{0}_user_map.parquet'.format(app_name)
Exemplo n.º 26
0
USER_DOCS_DIR = user_documents_dir()
ROGAME_PATH = USER_DOCS_DIR / Path("My Games\\Rising Storm 2\\ROGame")
CACHE_DIR = ROGAME_PATH / Path("Cache")
PUBLISHED_DIR = ROGAME_PATH / Path("Published")
LOGS_DIR = ROGAME_PATH / Path("Logs")
WW_INT_PATH = ROGAME_PATH / Path("Localization\\INT\\WinterWar.int")
WW_INI_PATH = ROGAME_PATH / Path("Config\\ROGame_WinterWar.ini")
ROUI_INI_PATH = ROGAME_PATH / Path("Config\\ROUI.ini")
SCRIPT_LOG_PATH = LOGS_DIR / Path("LaunchWinterWar.log")
SWS_WW_CONTENT_PATH = Path("steamapps\\workshop\\content\\418460\\") / str(WW_WORKSHOP_ID)

logger = Logger(__name__)
if LOGS_DIR.exists():
    logbook.set_datetime_format("local")
    _rfh_bubble = False if hasattr(sys, "frozen") else True
    _rfh = RotatingFileHandler(SCRIPT_LOG_PATH, level="INFO", bubble=_rfh_bubble)
    _rfh.format_string = (
        "[{record.time}] {record.level_name}: {record.channel}: "
        "{record.func_name}(): {record.message}"
    )
    _rfh.push_application()
    logger.handlers.append(_rfh)

# Check if running as PyInstaller generated frozen executable.
FROZEN = True if hasattr(sys, "frozen") else False

# No console window in frozen mode.
if FROZEN:
    logger.info("not adding stdout logging handler in frozen mode")
else:
    _sh = StreamHandler(sys.stdout, level="INFO")
Exemplo n.º 27
0
    metavar="FILE",
)
args = parser.parse_args()
settings_file = args.settings_file

# Read settings_file
config = ConfigParser(interpolation=ExtendedInterpolation())
try:
    config.read(settings_file)
except IOError:
    sys.exit("[-] Failed to read settings file")

# Configure logbook logging
logger = RotatingFileHandler(
    config["Logs"]["logfile"],
    max_size=int(config["Logs"]["max_size"]),
    backup_count=int(config["Logs"]["backup_count"]),
    level=int(config["Logs"]["level"]),
)
logger.push_application()
logger = Logger(__name__)

# Get Computer name
computer_name = os.environ["COMPUTERNAME"]


def set_environment_variables():
    """
    Set some of the read settings as environment variables.
    """
    os.environ["AWS_ACCESS_KEY_ID"] = config["AWS"]["AWS_ACCESS_KEY_ID"]
    os.environ["AWS_SECRET_ACCESS_KEY"] = config["AWS"]["AWS_SECRET_ACCESS_KEY"]