Esempio n. 1
0
def _create_logger(log_dir_path, logger_name, log_file_enabled, log_params):
    logger = logging.getLogger(logger_name)

    # Setup format
    formatter = Formatter(fmt='%(asctime)s %(levelname)7s: %(message)s', datefmt='%Y/%m/%d %p %I:%M:%S',)

    # Enable console output
    console = logging.StreamHandler()
    console.level = log_params['level']
    console.formatter = formatter
    logger.addHandler(console)

    # Enable file output with rotation
    if log_file_enabled:
        log_file_path = os.path.join(log_dir_path, logger_name + ".log")
        file_handler = RotatingFileHandler(
            filename=log_file_path,
            mode='a',
            maxBytes=log_params['max_bytes'],
            backupCount=log_params['backup_count']
        )
        file_handler.level = log_params['level']
        file_handler.formatter = formatter
        logger.addHandler(file_handler)

    logging.getLogger().setLevel(logging.DEBUG)

    return logger
Esempio n. 2
0
def init_logging(bot_name,
                 path=None,
                 level=logging.INFO,
                 encoding="gbk",
                 format_string=_FORMATTER,
                 **kwargs):
    global _MAX_BYTES, _BACKUP_COUNT, _FILE_DELAY

    if isinstance(level, str):
        level = getattr(logging, level.upper())

    if "maxBytes" in kwargs.keys():
        _MAX_BYTES = kwargs["maxBytes"]

    if "backupCount" in kwargs.keys():
        _BACKUP_COUNT = kwargs["backupCount"]

    if "delay" in kwargs.keys():
        _FILE_DELAY = kwargs["delay"]

    fn = "scrapy-{}.log".format(bot_name)
    if path:
        ph = join(path, fn)
    else:
        ph = fn

    formatter = logging.Formatter(format_string)

    ch = logging.StreamHandler(sys.stdout)
    ch.name = "ext_ch"
    ch.setFormatter(formatter)

    fh = RotatingFileHandler(
        filename=ph,
        maxBytes=_MAX_BYTES,
        backupCount=_BACKUP_COUNT,
        encoding=encoding,
        delay=_FILE_DELAY,
    )

    fh.name = "ext_fh"
    fh.level = level
    fh.setFormatter(formatter)

    if fh.name not in [h.name for h in logging.getLogger("scrapy").handlers]:
        logging.getLogger("scrapy").addHandler(fh)
Esempio n. 3
0
def init_logger(app: Flask):
    """
    This method initialize the logger
    @param app: the Flask app
    """
    sqlalchemy_logger = logging.getLogger('sqlalchemy.engine')

    log_level = logging.INFO

    if app.config['LOGLEVEL']:
        log_level = logging._nameToLevel[app.config['LOGLEVEL']]

    app.logger.setLevel(log_level)
    # set logging level to one level more than given level, because INFO level of sqlalchemy is too verbose
    if log_level == 10:
        sqlalchemy_logger.setLevel(log_level)
    else:
        sqlalchemy_logger.setLevel(log_level + 10)

    stdout_handler = logging.StreamHandler(sys.stdout)
    stdout_handler.setFormatter(
        logging.Formatter(
            '%(asctime)s %(levelname)s: %(message)s '
            '[in %(pathname)s:%(lineno)d]'
        )
    )

    app.logger.addHandler(stdout_handler)
    sqlalchemy_logger.addHandler(stdout_handler)

    # if we are not in debug mode add log file
    if not app.debug:
        # create a logger file handler to store logs
        file_handler = RotatingFileHandler(app.config['API_LOGFILE'], encoding='utf-8', maxBytes=500)
        file_handler.level = log_level
        file_handler.setFormatter(Formatter(
            '%(asctime)s %(levelname)s: %(message)s '
            '[in %(pathname)s:%(lineno)d]'
        ))

        app.logger.addHandler(file_handler)
        sqlalchemy_logger.addHandler(file_handler)

    app.logger.info('Logger correctly set')
    app.logger.info('Encoding : ' + getpreferredencoding())
Esempio n. 4
0
    def __init__(
            self,
            name=sys.argv[0],  # type: Optional[str]
            *,
            stdout=None,  # type: Optional[bool]
            filepath=None,  # type: Optional[str]
            level='',  # type: str
            propagate=False  # type: bool
    ):
        # type: (...) -> None
        """
        Args:
            name: Set Logger name. When you want to use root logger, set None.
            stdout: Set True when needing stdout.
            filepath: Set file path to output logs.
            level: Choose from `Logger.LEVELS`.
            propagate: Set logger.propagate when some configurations setted.

        Exceptions:
            TypeError: Log level is empty when setting some configurations,
                       because empty level would make bug.
        """

        self._logger = getLogger(str(name))

        if not level and (stdout or filepath):
            raise TypeError("Set log level when setting some configurations")
        elif level:
            self._logger.setLevel(self.LEVELS[level])
            self._logger.propagate = propagate

        if stdout is not None and stdout:
            stdout_handler = SH(sys.stdout)
            stdout_handler.setFormatter(Formatter(self.LOGFMT))
            stdout_handler.setLevel(self.LEVELS[level])
            self._logger.addHandler(stdout_handler)
            self._logger.propagate = propagate

        if filepath is not None and filepath:
            file_handler = RFH(filepath, 'a+', 100000, 100)
            file_handler.setFormatter(Formatter(self.LOGFMT))
            file_handler.level = self.LEVELS[level]
            self._logger.addHandler(file_handler)
            self._logger.propagate = propagate
import time
import splunk.entity as entity
import csv
import logging
from logging.handlers import RotatingFileHandler

logger = logging.getLogger('ts_webhook_alert')
logger.setLevel(logging.DEBUG)
try:
    fh = RotatingFileHandler('%s/var/log/splunk/ts_webhook.log' %
                             os.environ['SPLUNK_HOME'],
                             backupCount=3)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
    fh.setFormatter(formatter)
    fh.level = logging.DEBUG
    logger.addHandler(fh)
except Exception:
    pass


def gunzip(gzfile):
    """
    Uncompress the results file and sanitize it for import.
    Splunk results file by default contains columns which don't play nice with TS import
    """

    logger.info("Processing results file %s" % gzfile)
    if gzfile.endswith(".gz"):
        results_file = gzip.open(gzfile, 'rb')
    else:
Esempio n. 6
0
import json

from config import windowsoptions
from login import login
from effects import *
from childpages import *
from guiutil import set_skin, set_bg
import utildialog


#主日志保存在log/ifpms.log
logging.root.setLevel(logging.INFO)
logging.root.propagate = 0
loghandler = RotatingFileHandler(os.path.join("log", "config.log"), maxBytes=10 * 1024 * 1024, backupCount=100)
loghandler.setFormatter(logging.Formatter('%(asctime)s %(levelname)8s [%(filename)16s:%(lineno)04s] %(message)s'))
loghandler.level = logging.INFO
logging.root.addHandler(loghandler)
logger = logging.root
logger.propagate = 0


class MetroWindow(QtGui.QWidget):

    def __init__(self, parent=None):
        super(MetroWindow, self).__init__(parent)

        self.page_tag = windowsoptions['mainwindow']['centralwindow']['page_tag']
        self.page_tag_zh = windowsoptions['mainwindow']['centralwindow']['page_tag_zh']
        self.initUI()

    def initUI(self):