Example #1
0
def setup():
    """Setup for the startstop logger

    This configures the startstop logger, adding the handlers and
    setting the log levels.

    """
    logger.setLevel(level=logging.DEBUG)

    # Make sure the directory exists
    LOG_DIRNAME = '../../persistent/logs/startstop/'
    if not os.access(LOG_DIRNAME, os.F_OK):
        os.makedirs(LOG_DIRNAME)
    LOG_FILENAME = os.path.join(LOG_DIRNAME, 'startstop')

    # Remove any existing handlers
    logger.handlers = []

    # Use UTC time
    logging.Formatter.converter = time.gmtime

    # Add file handler
    handler = TimedRotatingFileHandler(LOG_FILENAME, when='midnight',
                                       backupCount=14, utc=True)
    handler.setLevel(level=logging.DEBUG)
    handler.setFormatter(logging.Formatter('%(asctime)s %(name)s.%(funcName)s.'
                                           '%(levelname)s: %(message)s',
                                           '%Y-%m-%d %H:%M:%S'))
    logger.addHandler(handler)

    # Add handler which prints to the screen
    handler = logging.StreamHandler()
    handler.setLevel(level=logging.INFO)
    handler.setFormatter(logging.Formatter('%(message)s'))
    logger.addHandler(handler)
Example #2
0
def setup_logger(name=__name__, cfg_path="."):
    """setup logging

    """
    if not os.path.isdir(cfg_path):
        os.makedirs(cfg_path)
    LOGFILE = os.path.join(cfg_path, "placement.log")
    commandslog = TimedRotatingFileHandler(LOGFILE, when="D", interval=1, backupCount=7)

    commandslog.setLevel("DEBUG")
    commandslog.setFormatter(
        logging.Formatter(
            "[%(levelname)-4s: %(asctime)s, " "%(filename)s:%(lineno)d] %(message)s", datefmt="%m-%d %H:%M:%S"
        )
    )

    logger = logging.getLogger("")
    logger.setLevel("DEBUG")

    no_filter = os.environ.get("PLACEMENT_NOFILTER", None)
    if no_filter is None:
        f = logging.Filter(name="placement")
        commandslog.addFilter(f)

    logger.addHandler(commandslog)

    return logger
Example #3
0
 def _app_logging(self):
     logfmt = logging.Formatter('%(asctime)sUTC - %(threadName)s - %(levelname)s - %(message)s')
     logfmt.converter = time.gmtime
     if 'file:logging' in self._config.sections():
         log_type = 'file:logging'
         app_log = self.config.get(log_type, 'file')
         app_retention = self.config.getint(log_type, 'retention')
         log_handler = TimedRotatingFileHandler(app_log, 'd', 1, app_retention)
         log_handler.setFormatter(logfmt)
         aap_level = self.config.get(log_type, 'level')
         log_handler.setLevel(aap_level)
         self.log.addHandler(log_handler)
         self.log.debug("file logger is up")
     if 'syslog:logging' in self._config.sections():
         log_type = 'syslog:logging'
         facility = self.config.get(log_type, 'syslog_facility')
         address_string = self.config.get(log_type, 'address')
         address_split = address_string.split(":")
         if len(address_split) == 2:
             address = (address_split[0], address_split[0])
         else:
             address = (address_split[0])
         log_handler = SysLogHandler(address=address, facility=facility)
         aap_level = self.config.get(log_type, 'level')
         log_handler.setLevel(aap_level)
         self.log.addHandler(log_handler)
         self.log.debug("syslog logger is up")
Example #4
0
def set_logger_params(app):
    global _logger
    _logger = app.logger
    handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='D', interval=1)
    handler.setLevel(logging.DEBUG)
    handler.setFormatter(logging.Formatter(app.config['LOG_FORMAT']))
    _logger.addHandler(handler)
Example #5
0
def setup_logger(config):
    from logging.handlers import TimedRotatingFileHandler
    global LOGGER

    # Log file rotation scheduling
    when, interval, backupCount = config.LOG_ROTATION_TIME, \
        config.LOG_ROTATION_INTERVAL, config.LOG_BACKUP_COUNT

    # Defensive assertions
    assert when.lower() in ('s', 'm', 'h', 'd', 'midnight',
                            'w0', 'w1', 'w2', 'w3', 'w4', 'w5', 'w6',)
    assert interval > 0
    assert backupCount > 0

    if not os.path.exists(config.LOG_DIR):
        os.mkdir(config.LOG_DIR)
    log_file_path = os.path.join(config.LOG_DIR, config.LOG_FILENAME)

    formatter = logging.Formatter(config.LOG_FORMAT_STR)

    file_handler = TimedRotatingFileHandler(
        log_file_path,
        when=when,
        interval=interval,
        backupCount=backupCount)
    file_handler.setLevel(config.FILE_LOG_LEVEL)
    file_handler.setFormatter(formatter)

    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(config.CONSOLE_LOG_LEVEL)
    console_handler.setFormatter(formatter)

    LOGGER.addHandler(file_handler)
    LOGGER.addHandler(console_handler)
    LOGGER.setLevel(config.LOG_LEVEL)
Example #6
0
def create_app(config_name):
    app = Flask(__name__)
    app.config.from_object(config[config_name])

    bootstrap.init_app(app)
    mongo.init_app(app)
    login_manager.init_app(app)
    babel.init_app(app)
    moment.init_app(app)

    config[config_name].init_app(app)

    login_manager.session_protection = 'strong'
    login_manager.login_view = 'login'

    if not app.debug:
        import logging
        from logging.handlers import TimedRotatingFileHandler
        warn_file_handler = TimedRotatingFileHandler(filename="pyblog.warn.log", when='midnight', interval=1,
                                                     encoding="utf8")
        warn_file_handler.setLevel(logging.WARNING)
        app.logger.addHandler(warn_file_handler)

        error_file_handler = TimedRotatingFileHandler(filename="pyblog.error.log", when='midnight', interval=1,
                                                      encoding="utf8")
        error_file_handler.setLevel(logging.ERROR)
        app.logger.addHandler(error_file_handler)

    return app
Example #7
0
def setup_logging(debugging=False):

    os.makedirs(CRASH_DIR, exist_ok=True)
    error_handler = logging.handlers.TimedRotatingFileHandler(
        filename=os.path.join(CRASH_DIR, 'crash'), when='s', interval=1, delay=True)
    error_handler.setLevel(logging.ERROR)

    os.makedirs(QUERY_DIR, exist_ok=True)
    querry_handler = TimedRotatingFileHandler(
        filename=os.path.join(QUERY_DIR, 'queries'), when='midnight')
    querry_handler.setLevel(logging.INFO)

    querry_handler.addFilter(query_filter)
    querry_handler.setFormatter(logging.Formatter("%(asctime)s: %(filename)s: %(message)s"))

    logger = logging.getLogger()
    logger.addHandler(error_handler)
    logger.addHandler(querry_handler)
    if debugging:
        console_handler = logging.StreamHandler()
        console_handler.setLevel(logging.DEBUG)
        console_handler.setFormatter(logging.Formatter("%(levelname)s:%(name)s [%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"))
        logger.addHandler(console_handler)
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    return logger
Example #8
0
def configure_loggers(min_level, log_file, log_format=None):
    DEFAULT_LOG_FORMAT = '%(asctime)s(%(name)s)[%(process)d--%(threadName)s]::'\
        '%(levelname)s - %(funcName)s(%(message)s)'
    blue_pref = '\x1b[' + BLUE
    red_pref = '\x1b[' + RED
    green_pref = '\x1b[' + GREEN
    yellow_pref = '\x1b[' + YELLOW
    suffix = '\x1b[0m'
    COLOR_LOG_FORMAT = '%(asctime)s(' + \
        blue_pref + '%(name)s' + suffix + \
        ')[%(process)d--%(threadName)s]::' + \
        red_pref + '%(levelname)s ' + suffix + '- ' + \
        green_pref + '%(funcName)s' + suffix + \
        yellow_pref + '(%(message)s)' + suffix

    if not os.path.exists(os.path.dirname(log_file)):
        os.makedirs(os.path.dirname(log_file))

    if log_format:
        _format = log_format
    else:
        if "NO_COLORS" in os.environ:
            _format = DEFAULT_LOG_FORMAT
        else:
            _format = COLOR_LOG_FORMAT
    logging.basicConfig(level=min_level, format=_format)

    formatter = logging.Formatter(DEFAULT_LOG_FORMAT)
    file_handler = TimedRotatingFileHandler(log_file, when='midnight')
    file_handler.setLevel(min_level)
    file_handler.setFormatter(formatter)

    logging.getLogger('').addHandler(file_handler)
    logging.getLogger('').setLevel(min_level)
Example #9
0
  def get_log_handler(self, command):
    """Configure and return log handler.

    :param command: The command to load the configuration for. All options will
      be looked up in the `[COMMAND.command]` section. This is currently only
      used for configuring the file handler for logging. If logging is disabled
      for the command, a :class:`NullHandler` will be returned, else a
      :class:`TimedRotatingFileHandler`.

    """
    section = '%s.command' % (command, )
    path = osp.join(gettempdir(), '%s.log' % (command, ))
    level = lg.DEBUG
    if self.has_section(section):
      key = 'log.disable'
      if self.has_option(section, key) and self.getboolean(section, key):
        return NullHandler()
      if self.has_option(section, 'log.path'):
        path = self.get(section, 'log.path') # Override default path.
      if self.has_option(section, 'log.level'):
        level = getattr(lg, self.get(section, 'log.level').upper())
    file_handler = TimedRotatingFileHandler(
      path,
      when='midnight', # Daily backups.
      backupCount=1,
      encoding='utf-8',
    )
    fmt = '%(asctime)s\t%(name)-16s\t%(levelname)-5s\t%(message)s'
    file_handler.setFormatter(lg.Formatter(fmt))
    file_handler.setLevel(level)
    return file_handler
def run():
    """
    Run the server.
    """

    # Set up the logger.
    if not os.path.isdir(os.path.join(script_dir, 'logs')):
        os.makedirs(os.path.join(script_dir, 'logs'))
    # Format the logs.
    formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    # Enable the logs to split files at midnight.
    handler = TimedRotatingFileHandler(
            os.path.join(script_dir, 'logs', 'TorSpider.log'),
            when='midnight', backupCount=7, interval=1)
    handler.setLevel(app.config['LOG_LEVEL'])
    handler.setFormatter(formatter)
    log = logging.getLogger('werkzeug')
    log.setLevel(app.config['LOG_LEVEL'])
    log.addHandler(handler)
    app.logger.addHandler(handler)
    app.logger.setLevel(app.config['APP_LOG_LEVEL'])

    # Set up the app server, port, and configuration.
    port = int(environ.get('PORT', app.config['LISTEN_PORT']))
    addr = environ.get('LISTEN_ADDR', app.config['LISTEN_ADDR'])
    if app.config['USETLS']:
        context = (app.config['CERT_FILE'], app.config['CERT_KEY_FILE'])
        app.run(host=addr, port=port, threaded=True, ssl_context=context)
    else:
        app.run(host=addr, port=port, threaded=True)
Example #11
0
    def create_logger(self):
        """
        创建一个日志对象logger,同时打印文件日志和终端日志,其中Debug级别的日志只在终端打印
        :param spidername
        :return: logger object
        """
        LOG_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                "logs", "{}.log".format(date.today()))
        formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s[line:%(lineno)d] - %(message)s',
                                      datefmt='%Y-%m-%d %H:%M:%S')   # 格式化日志
        file_handler = TimedRotatingFileHandler(LOG_FILE,'D', 1, 0)  # 实例化handler
        file_handler.suffix = "{}-%Y-%m-%d.log".format(self.config.Name)
        file_handler.setFormatter(formatter)
        file_handler.setLevel(logging.INFO)        # 设置文件日志打印级别

        console_handler = logging.StreamHandler()  # 设置终端日志打印
        console_handler.setLevel(logging.DEBUG)    # 设置终端日志打印级别
        console_handler.setFormatter(formatter)    # 设置终端日志打印格式

        logger = logging.getLogger(self.config.Name)     # 获取名为log_name的logger
        logger.addHandler(file_handler)            # 添加Handler
        logger.addHandler(console_handler)         # 添加Handler
        logger.setLevel(logging.INFO)              # 设置日志级别为DEBUG(级别最低)

        return logger
Example #12
0
def create_application():
	global app

	if not config.check():
		return None

	if not os.path.exists(config.get('base', 'cache_dir')):
		os.makedirs(config.get('base', 'cache_dir'))

	app = Flask(__name__)
	app.secret_key = '?9huDM\\H'

	app.teardown_appcontext(teardown_db)

	if config.get('base', 'log_file'):
		import logging
		from logging.handlers import TimedRotatingFileHandler
		handler = TimedRotatingFileHandler(config.get('base', 'log_file'), when = 'midnight')
		handler.setLevel(logging.WARNING)
		app.logger.addHandler(handler)

	from supysonic import frontend
	from supysonic import api

	return app
Example #13
0
def setup_logging(app, logfile, debug=False):
    cmdslog = TimedRotatingFileHandler(logfile,
                                       when='D',
                                       interval=1,
                                       backupCount=7)
    if debug:
        env = logging.DEBUG
        cmdslog.setFormatter(logging.Formatter(
            "%(name)s: [%(levelname)s] %(filename)s:%(lineno)d - %(message)s"))
    else:
        env = logging.INFO
        cmdslog.setFormatter(logging.Formatter(
            "%(name)s: [%(levelname)s] %(message)s"))

    cmdslog.setLevel(env)

    logger = logging.getLogger(app)
    logger.setLevel(env)
    logger.addHandler(cmdslog)
    if os.path.exists('/dev/log'):
        st_mode = os.stat('/dev/log').st_mode
        if stat.S_ISSOCK(st_mode):
            syslog_h = SysLogHandler(address='/dev/log')
            syslog_h.set_name(app)
            logger.addHandler(syslog_h)

    return _log(app, logger)
Example #14
0
def create_application():
	global app

	if not config.check():
		return None

	if not os.path.exists(config.get('webapp', 'cache_dir')):
		os.makedirs(config.get('webapp', 'cache_dir'))

	app = Flask(__name__)
	app.secret_key = '?9huDM\\H'

	app.teardown_appcontext(teardown_db)

	if config.get('webapp', 'log_file'):
		import logging
		from logging.handlers import TimedRotatingFileHandler
		handler = TimedRotatingFileHandler(config.get('webapp', 'log_file'), when = 'midnight')
		if config.get('webapp', 'log_level'):
			mapping = {
				'DEBUG':   logging.DEBUG,
				'INFO':    logging.INFO,
				'WARNING': logging.WARNING,
				'ERROR':   logging.ERROR,
				'CRTICAL': logging.CRITICAL
			}
			handler.setLevel(mapping.get(config.get('webapp', 'log_level').upper(), logging.NOTSET))
		app.logger.addHandler(handler)

	from supysonic import frontend
	from supysonic import api

	return app
Example #15
0
def create_timed_file_handler(level, format, ttl, filename, path):
	'''
	Creates a TimedRotatingFileHandler for the logging module
	that outputs log records to a file. This file will roll over
	given the ttl (time to live) which will create a new log file
	and back up the existing one.

	@param path The path of the log file (e.g. /logs/system.log)
	@param level The logging level of the file handler
	@param format The format of the file output for each LogRecord
	@param ttl The time to live for the the log file before it rolls over

	@return A new TimedRotatingFileHandler
	'''
	# Create all the directories in the path
	print (path.split('/'))
	for directory in path.split('/'):
		if not os.path.exists(directory):
			os.mkdir(directory)

	# Configure the TimedRotatingFileHandler
	file_handler = TimedRotatingFileHandler(path + '/' + filename, ttl)
	file_handler.setLevel(level)
	file_handler.setFormatter(logging.Formatter(format))
	return file_handler
Example #16
0
class RotatingLog(object):

    # Get a logger with the provided name when initializing
    # this class and use a handler that rotates the logfiles
    # based on time as seen below.
    def __init__(self, logger_name):
        self._logger = logging.getLogger(logger_name)

        # We only want one handler, so only add a handler
        # if there isn't already one configured. 
        if len(self._logger.handlers) == 0:
            # The messages shouldn't be sent to other logs 
            self._logger.propagate = False

            # We only use one logger and don't differentiate
            # between the importance of different messages
            # and therefore use DEBUG as the only logg-level.
            self._logger.setLevel(logging.DEBUG)

            # Rotate the log, if not empty, at midnight
            # and save up to 90 days of log-files.
            self._handler = TimedRotatingFileHandler(
                LOGFILE, when = ROTATE_TIME, backupCount = BACKUP_DAYS, encoding = u'UTF-8')
            
            self._handler.setLevel(logging.DEBUG)
            self._handler.setFormatter(logging.Formatter(LOGFORMAT, TIMEFORMAT))
            self._logger.addHandler(self._handler)

    # Write the message, if not empty, to the log-file
    def write(self, message):
        if not message.lstrip().rstrip() == u'':
            self._logger.debug(message)
Example #17
0
def configure_logging(app_mode, app):
    logHandler = None
    if app_mode == 'DEBUG':
        # create console handler
        logHandler = logging.StreamHandler()
    elif app_mode == 'PROD':
        # create file time rotating handler
        logHandler = TimedRotatingFileHandler(
            filename=os.environ.get('APP_LOG_FILENAME', 'app.log'),
            when='D',
            backupCount=5,
            encoding='UTF-8'
        )
    if logHandler is None:
        return
    logHandler.setLevel(logging.DEBUG)
    logHandler.setFormatter(logging.Formatter(
        fmt='%(asctime)s %(name)-10s %(levelname)-7s %(message)s',
        datefmt='%H:%M:%S'))
    # get root logger
    logger = logging.getLogger()
    logger.addHandler(logHandler)
    logger.setLevel(logging.DEBUG)
    app.logger.addHandler(logHandler)
    app.logger.setLevel(logging.DEBUG)
    return
Example #18
0
def init_logger(name):
    """Initialize the logging system for a given module/process.

        Args:
            name: name of the logger to be initialized.
        Returns:
            logger: the logger object.
    """
    log_file_name = name + '.log'
    formatter     = logging.Formatter('%(asctime)s %(levelname)-8s - %(module)s - %(message)s')

    # prepare file name
    current_directory = os.path.abspath(os.path.dirname(__file__))
    log_directory = os.path.join(current_directory, 'logs', log_file_name)
    if not os.path.exists(log_directory):
        os.makedirs(log_directory)
    file_name = os.path.join(log_directory, log_file_name)

    # create and configure a rotating file handler
    file_handler = TimedRotatingFileHandler(filename=file_name, when='H', interval=8, backupCount=6)
    file_handler.setFormatter(formatter)
    file_handler.setLevel(logging.DEBUG)

    # create logger and add handlers
    logger = logging.getLogger(name)
    logger.addHandler(file_handler)
    logger.setLevel(logging.DEBUG)
    return logger
Example #19
0
def _setup_task_logger(logger):
    """Configure a task logger to generate site- and task-specific logs."""
    if logger.handlers:  # Already processed
        return

    parts = logger.name.split(".")
    if len(parts) < 4:  # Malformed
        return
    site = parts[2]
    task = parts[3]

    _ensure_dirs(os.path.join(_log_dir, site))

    formatter = Formatter(
        fmt="[%(asctime)s %(levelname)-7s] %(message)s",
        datefmt=_DATE_FORMAT)

    infohandler = TimedRotatingFileHandler(
        os.path.join(_log_dir, site, task + ".log"), "midnight", 1, 30)
    infohandler.setLevel("INFO")

    debughandler = FileHandler(
        os.path.join(_log_dir, site, task + ".log.verbose"), "w")
    debughandler.setLevel("DEBUG")

    errorhandler = RotatingFileHandler(
        os.path.join(_log_dir, site, task + ".err"), maxBytes=1024**2,
        backupCount=4)
    errorhandler.setLevel("WARNING")

    for handler in [infohandler, debughandler, errorhandler]:
        handler.setFormatter(formatter)
        logger.addHandler(handler)
Example #20
0
def get_logger():
    global logger, _logging_level

    if logger:
        return logger

    logger = logging.getLogger("MongoctlLogger")

    log_file_name="mongoctl.log"
    conf_dir = mongoctl_globals.DEFAULT_CONF_ROOT
    log_dir = utils.resolve_path(os.path.join(conf_dir, LOG_DIR))
    utils.ensure_dir(log_dir)


    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(levelname)8s | %(asctime)s | %(message)s")
    logfile = os.path.join(log_dir, log_file_name)
    fh = TimedRotatingFileHandler(logfile, backupCount=50, when="midnight")

    fh.setFormatter(formatter)
    fh.setLevel(logging.DEBUG)
    # add the handler to the root logger
    logging.getLogger().addHandler(fh)

    global _log_to_stdout
    if _log_to_stdout:
        sh = logging.StreamHandler(sys.stdout)
        std_formatter = logging.Formatter("%(message)s")
        sh.setFormatter(std_formatter)
        sh.setLevel(_logging_level)
        logging.getLogger().addHandler(sh)

    return logger
Example #21
0
def init_logger(config):
    """
        Initializes and returns a logger object
    """
    LEVELS = {'debug': logging.DEBUG,
              'info': logging.INFO,
              'warning': logging.WARNING,
              'error': logging.ERROR,
              'critical': logging.CRITICAL}

    logfile = config.get('global', 'LOG_FILE')
    loglevel_console = config.get('global', 'LOG_LEVEL_CONSOLE')
    loglevel_file = config.get('global', 'LOG_LEVEL_FILE')

    logger = logging.getLogger("murray")
    logger.setLevel(logging.DEBUG)
    fh = TimedRotatingFileHandler(logfile, when='D', interval=1,
        backupCount=5)
    fh.setLevel(LEVELS[loglevel_file])
    ch = logging.StreamHandler()
    ch.setLevel(LEVELS[loglevel_console])
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - " +
                                  "%(message)s")
    fh.setFormatter(formatter)
    ch.setFormatter(formatter)
    logger.addHandler(fh)
    logger.addHandler(ch)
    return logger
Example #22
0
def get_logger(logger_name='General'):
    if logger_name not in Logger.manager.loggerDict:
        _logger = logging.getLogger(logger_name)
        _logger.setLevel(logging.DEBUG)
        # handler all
        all_handler = TimedRotatingFileHandler(__all_log_file,
                                           when='midnight',
                                           backupCount=7)
        all_formatter = logging.Formatter(__log_formatter, __date_formatter)
        all_handler.setFormatter(all_formatter)
        all_handler.setLevel(logging.INFO)
        _logger.addHandler(all_handler)
        # handler error
        error_handler = TimedRotatingFileHandler(__error_log_file,
                                           when='midnight',
                                           backupCount=7)
        error_formatter = logging.Formatter(__log_formatter, __date_formatter)
        error_handler.setFormatter(error_formatter)
        error_handler.setLevel(logging.ERROR)
        _logger.addHandler(error_handler)

        if __TESTING__:
            console_handler = logging.StreamHandler()
            console_formatter = logging.Formatter(__simple_formatter)
            console_handler.setFormatter(console_formatter)
            console_handler.setLevel(logging.DEBUG)
            _logger.addHandler(console_handler)

    _logger = logging.getLogger(logger_name)
    return _logger
Example #23
0
File: app.py Project: willingc/mu
def setup_logging():
    """
    Configure logging.
    """
    if not os.path.exists(LOG_DIR):
        os.makedirs(LOG_DIR)

    # set logging format
    log_fmt = ('%(asctime)s - %(name)s:%(lineno)d(%(funcName)s) '
               '%(levelname)s: %(message)s')
    formatter = logging.Formatter(log_fmt)

    # define log handlers such as for rotating log files
    handler = TimedRotatingFileHandler(LOG_FILE, when='midnight',
                                       backupCount=5, delay=0,
                                       encoding=ENCODING)
    handler.setFormatter(formatter)
    handler.setLevel(logging.DEBUG)

    # set up primary log
    log = logging.getLogger()
    log.setLevel(logging.DEBUG)
    log.addHandler(handler)
    sys.excepthook = excepthook
    print(_('Logging to {}').format(LOG_FILE))
Example #24
0
def create_app(config_name):
	app = Flask(__name__)
	app.config.from_object(config[config_name])
	config[config_name].init_app(app)
	db.init_app(app)
	from .mainpage import mainpage as main_blueprint
	app.register_blueprint(main_blueprint)

	from .user import user as user_blueprint
	app.register_blueprint(user_blueprint,url_prefix='/user')

	from .admin import admin as admin_blueprint
	app.register_blueprint(admin_blueprint,url_prefix='/admin')

	from .api import api as api_blueprint
	app.register_blueprint(api_blueprint,url_prefix='/api')

	formatter = logging.Formatter('%(name)-12s %(asctime)s level-%(levelname)-8s thread-%(thread)-8d %(message)s')   # 每行日志的前缀设
	fileTimeHandler = TimedRotatingFileHandler("logmessage", "M", 5, 0)
	fileTimeHandler.suffix = "%Y%m%d.log"  #设置切分后日志文件名的时间格式 默认 filename+"." + suffix 如果需要更改需要改logging 源码
	fileTimeHandler.setFormatter(formatter)
	fileTimeHandler.setLevel(logging.INFO)
	app.logger.addHandler(fileTimeHandler)


	return app
Example #25
0
def setup_logger():
    logger = logging.getLogger('UploadFarm')
    log_level = LOGGING_LEVELS[os.environ.get('SDV_LOGGING_LEVEL', 'info')]
    logger.setLevel(log_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.DEBUG)

    if not os.path.isdir('logs'):
        os.mkdir('logs')

    log_file = 'logs/sdv.log'
    file_handler = TimedRotatingFileHandler(log_file, when='midnight', interval=1)
    file_handler.setLevel(log_level)

    file_handler.suffix = "%Y%m%d"

    formatter = logging.Formatter('%(asctime)s [%(levelname)s] %(name)s - %(message)s')

    console_handler.setFormatter(formatter)
    file_handler.setFormatter(formatter)

    logger.addHandler(console_handler)
    logger.addHandler(file_handler)

    return logger
Example #26
0
def setup_logging(log_file):
    # TODO: more advanced filters, logging info like when rooms go live to console
    # https://docs.python.org/3/library/logging.config.html#logging-config-dictschema
    log_backup_time = dt_time(tzinfo=TOKYO_TZ)
    log_filter = logging.Filter(name="showroom")

    file_log_handler = TimedRotatingFileHandler(log_file, encoding='utf8',
                                                when='midnight', atTime=log_backup_time)
    file_log_formatter = logging.Formatter(fmt='%(asctime)s %(name)-12s %(levelname)-8s %(threadName)s:\n%(message)s',
                                           datefmt='%m-%d %H:%M:%S')
    file_log_handler.setFormatter(file_log_formatter)
    # leave this in local time?
    file_log_handler.addFilter(log_filter)
    file_log_handler.setLevel(logging.DEBUG)

    console_handler = logging.StreamHandler()
    console_formatter = logging.Formatter(fmt='%(asctime)s %(message)s', datefmt=HHMM_FMT)
    console_formatter.converter = tokyotime

    console_handler.setLevel(logging.INFO)
    console_handler.setFormatter(console_formatter)
    console_handler.addFilter(log_filter)

    logger = logging.getLogger('showroom')
    logger.setLevel(logging.DEBUG)
    logger.propagate = False

    # at this moment, there shouldn't be any handlers in the showroom logger
    # however, i can't preclude the possibility of there ever being such handlers
    for handler in (file_log_handler, console_handler):
        if handler not in logger.handlers:
            logger.addHandler(handler)
Example #27
0
def get_logger(name):
    """ Get the logger for logging events.

        Args:
            name: string. Name of the logger, should be the respective python
                module.

        Returns:
            logger: logger. A logger ready for use.
    """
    # Get the logger.
    logger = logging.getLogger(name)
    logger.setLevel(logging.INFO)

    # The handler. Create new log file every Sunday at 16:00.
    time = datetime.time(16, 0, 0)
    handler = TimedRotatingFileHandler(LOG_FILE, when='W6', atTime=time)
    handler.setLevel(logging.INFO)

    # The formatter.
    log_string = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
    formatter = logging.Formatter(log_string)

    # Set the formatter and handler.
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    return logger
Example #28
0
    def setupLogging(self):
        LOG_DIR = os.path.dirname(os.path.abspath(sys.argv[0]))+"/log"
        logfile = self.config.get("logging", "logfile")
        print "logging to %s/lampstand.log" % LOG_DIR

        self.logger = logging.getLogger('lampstand')
        logging.getLogger('').setLevel(logging.DEBUG)
        formatter = logging.Formatter('%(asctime)s [%(name)s] %(message)s')

        #console = logging.StreamHandler()
        console = logging.getLogger('').handlers[0]
        console.setLevel(logging.DEBUG)
        console.setFormatter(formatter)
        logging.getLogger('').addHandler(console)

        filename = "%s/lampstand.log" % LOG_DIR
        logfile = TimedRotatingFileHandler(
            filename, when='W0', interval=1, utc=True)
        logfile.setLevel(logging.DEBUG)
        logfile.setFormatter(formatter)
        logging.getLogger('').addHandler(logfile)

        self.logger.debug("Hello Debug")
        self.logger.info("Hello Info")
        self.logger.warn("Hello Warn")
        self.logger.error("Hello Error")
Example #29
0
def init(app):
    """
    Initialize logging for the application, (only if its not in debug mode)
    """
    if not app.debug:
        from logging import Formatter
        from logging.handlers import SMTPHandler, TimedRotatingFileHandler
        
        # File handler
        file_formatter = Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
        file_handler = TimedRotatingFileHandler(app.config['LOG_FILE_NAME'], when='midnight', backupCount=31)
        file_handler.setFormatter(file_formatter)
        file_handler.setLevel(app.config['LOG_FILE_LEVEL'])
        app.logger.addHandler(file_handler)
        
        # Email handler
        mail_formatter = Formatter('''
Message type:       %(levelname)s
Location:           %(pathname)s:%(lineno)d
Module:             %(module)s
Function:           %(funcName)s
Time:               %(asctime)s

Message:

%(message)s
''')
        mail_handler = SMTPHandler(app.config['LOG_EMAIL_SERVER'], app.config['LOG_EMAIL_SENDER'],
                                   app.config['ADMIN_EMAILS'].split(','), '[%s] Error' % app.config['HOST_DOMAIN'])
        mail_handler.setFormatter(mail_formatter)
        mail_handler.setLevel(app.config['LOG_EMAIL_LEVEL'])
        app.logger.addHandler(mail_handler)
        
Example #30
0
def main():
    # TODO:  delete this default dict after testing fallback.
    # default_confs = {
    #    "filter_query" : "status eq Active or status eq 'Pending - Renewal'",
    #    "csv_backup_filename_root" : "erras_backup_members_",
    #    "csv_filename_temp" : "erras_members_new.csv",
    #    "csv_filename" : "erras_members.csv",
    #    "apricot_response_root" : "wild_apricot_response_",
    #    "members_log_filename" : "erras_members.log",
    #    "keypad_field_names" : "Keypad",
    #    "rfid_field_names" : "RFID",
    #    "loop_delay" : 500,
    #    "csv_prune_max" : 5,
    #    "json_prune_max" : 10
    # }
    # parser = ConfigParser(default_confs)
    # TODO:  delete the above default dict after testing fallback.
    
    parser = ConfigParser()
    config_file_name = 'erras.ini'
    section_name = "erras"
    # This constructs a file path to the config_file_name in the same directory as the script file.
    config_path = str(pathlib.Path(__file__).with_name(config_file_name))
    with open(config_path) as config_file:
        parser.read_file(config_file)
    
    wa_api_client_id = parser.get(section_name, "wa_api_client_id")
    wa_api_client_secret = parser.get(section_name, "wa_api_client_secret")
    credential_name = parser.get(section_name, "credential_name")
    credential_key = parser.get(section_name, "credential_key")
    api_key = parser.get(section_name, "api_key")
    filter_query = parser.get(section_name, "filter_query", fallback="status eq Active or status eq 'Pending - Renewal'")
    request_url_root = parser.get(section_name, "request_url_root")
    csv_backup_filename_root = parser.get(section_name, "csv_backup_filename_root", fallback="erras_backup_members_")
    csv_filename_temp = parser.get(section_name, "csv_filename_temp", fallback="erras_members_new.csv")
    csv_filename = parser.get(section_name, "csv_filename", fallback="erras_members.csv")
    apricot_response_root = parser.get(section_name, "apricot_response_root", fallback="wild_apricot_response_")
    loop_delay = parser.getint(section_name, "loop_delay", fallback=500)
    csv_prune_max = parser.getint(section_name, "csv_prune_max", fallback=5)
    json_prune_max = parser.getint(section_name, "json_prune_max", fallback=5)
    log_filename = parser.get(section_name, "members_log_filename", fallback="erras_members.log")
    keypad_field_names_string = parser.get(section_name, "keypad_field_names", fallback="Keypad")
    rfid_field_names_string = parser.get(section_name, "rfid_field_names", fallback="RFID")
    # Split up the key_fields_string into a list.
    # TODO: look into this later for split with escape
    # https://stackoverflow.com/questions/18092354/python-split-string-without-splitting-escaped-character
    keypad_field_names = keypad_field_names_string.split(",")
    rfid_field_names = rfid_field_names_string.split(",")
    
    # set up logger
    logger_name = "erras_members"
    logger_format = '%(asctime)s %(levelname)s %(message)s'
    log = logging.getLogger(logger_name)
    log.setLevel(logging.DEBUG)
    formatter = logging.Formatter(logger_format)
    
    # https://docs.python.org/2/library/logging.handlers.html
    # how often the log file is rotated is interval * when
    # when = S/M/H/D/W0-W6/midnight
    # so when='S', interval=500 means every 500 seconds.
    handler = TimedRotatingFileHandler(log_filename, when='D', interval=1, backupCount=20)
    handler.setFormatter(formatter)
    # handler.setLevel(logging.INFO)
    handler.setLevel(logging.DEBUG)
    
    log.addHandler(handler)
    # log.addHandler(JournalHandler())
    
    # Log the field names
    for field in keypad_field_names:
        log.debug("keypad field names: %s" % field)
    for field in rfid_field_names:
        log.debug("RFID field names: %s" % field)
    
    errasfiles = ErrasFiles(keypad_field_names, rfid_field_names, log)
    
    api = WaApiClient(wa_api_client_id, wa_api_client_secret, log, debug=True)
    api.authenticate_with_contact_credentials(credential_name, credential_key)
    
    log.info("Starting request loop.")
    while(True):
        log.info("########################### requesting member data ############################")
        params = { '$filter': filter_query,
                   '$async': 'false' }
    
        request_url = request_url_root + '?' + urllib.parse.urlencode(params)
        log.debug("Request url is: %s" % request_url)
    
        contacts = api.execute_request(request_url)
        # each contact is an ApiObject instance
        contact_list = contacts.Contacts
        log.info("There are %d contacts in results." % len(contact_list))
        
        # Save the newly downloaded member data in a backup file
        csv_backup_filename = csv_backup_filename_root + errasfiles.get_timestamp() + ".csv"
        errasfiles.print_contacts_csv(contact_list, csv_backup_filename)
        # And in a temp file.
        errasfiles.print_contacts_csv(contact_list, csv_filename_temp)
    
        # Note, do not use across filesystem boundaries.  File rename is
        # only atomic on unix if both new and old are on the same
        # filesystem.
        os.rename(csv_filename_temp, csv_filename)
        log.info("Member data saved in filename %s" % csv_filename)
        directory = os.getcwd()
        errasfiles.prune(directory, csv_backup_filename_root, ".csv", csv_prune_max)
        errasfiles.prune(directory, apricot_response_root, ".json", json_prune_max)
    
        log.info("Sleeping for %d seconds." % loop_delay)
        time.sleep(loop_delay)
Example #31
0
class LogEngine(object, metaclass=stSingleton):
    # 日志级别
    LEVEL_DEBUG = logging.DEBUG
    LEVEL_INFO = logging.INFO
    LEVEL_WARN = logging.WARN
    LEVEL_ERROR = logging.ERROR
    LEVEL_CRITICAL = logging.CRITICAL

    # ----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.logger = logging.getLogger()
        # self.formatter = logging.Formatter('%(asctime)s  %(levelname)s: %(message)s')
        self.formatter = logging.Formatter(
            "%(levelname)s  [%(asctime)s.%(msecs)d][%(filename)s:%(lineno)d][%(process)d:%(threadName)s] %(message)s"
        )
        self.level = self.LEVEL_CRITICAL

        self.consoleHandler = None
        self.fileHandler = None
        self.timedRotatingFileHandler = None

        # 添加NullHandler防止无handler的错误输出
        nullHandler = logging.NullHandler()
        self.logger.addHandler(nullHandler)

        # 日志级别函数映射
        self.levelFunctionDict = {
            self.LEVEL_DEBUG: self.debug,
            self.LEVEL_INFO: self.info,
            self.LEVEL_WARN: self.warn,
            self.LEVEL_ERROR: self.error,
            self.LEVEL_CRITICAL: self.critical,
        }

    # ----------------------------------------------------------------------
    def setLogLevel(self, level):
        """设置日志级别"""
        self.logger.setLevel(level)
        self.level = level

    # ----------------------------------------------------------------------
    def addConsoleHandler(self):
        """添加终端输出"""
        if not self.consoleHandler:
            self.consoleHandler = logging.StreamHandler()
            self.consoleHandler.setLevel(self.level)
            self.consoleHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.consoleHandler)

    # ----------------------------------------------------------------------
    def addFileHandler(self, filename=''):
        """添加文件输出"""
        if not self.fileHandler:
            if not filename:
                filename = 'st_' + datetime.now().strftime('%Y%m%d') + '.log'
            filepath = getTempPath(filename)
            self.fileHandler = logging.FileHandler(filepath)
            self.fileHandler.setLevel(self.level)
            self.fileHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.fileHandler)

    # ----------------------------------------------------------------------
    def addTimedRotatingFileHandler(self,
                                    filename='',
                                    when='H',
                                    interval=1,
                                    backupCount=0):
        """添加文件输出"""
        if not self.fileHandler:
            if not filename:
                filename = 'stlog'
            filepath = getTempPath(filename)
            self.timedRotatingFileHandler = TimedRotatingFileHandler(
                filename=filepath,
                when=when,
                interval=interval,
                backupCount=backupCount)
            self.timedRotatingFileHandler.setLevel(self.level)
            self.timedRotatingFileHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.timedRotatingFileHandler)

    # ----------------------------------------------------------------------
    def debug(self, msg):
        """开发时用"""
        self.logger.debug(msg)

    # ----------------------------------------------------------------------
    def info(self, msg):
        """正常输出"""
        self.logger.info(msg)

    # ----------------------------------------------------------------------
    def warn(self, msg):
        """警告信息"""
        self.logger.warn(msg)

    # ----------------------------------------------------------------------
    def error(self, msg):
        """报错输出"""
        self.logger.error(msg)

    # ----------------------------------------------------------------------
    def exception(self, msg):
        """报错输出+记录异常信息"""
        self.logger.exception(msg)

    # ----------------------------------------------------------------------
    def critical(self, msg):
        """影响程序运行的严重错误"""
        self.logger.critical(msg)

    # ----------------------------------------------------------------------
    def processLogEvent(self, event):
        """处理日志事件"""
        log = event.dict_['data']
        function = self.levelFunctionDict[log.logLevel]  # 获取日志级别对应的处理函数
        msg = '\t'.join([log.gatewayName, log.logContent])
        function(msg)
Example #32
0
def main():
    global cal, debug_mode, display_meeting_summary, particle, use_remote_notify

    # Logging
    # Setup the basic console logger
    format_str = '%(asctime)s %(levelname)s %(message)s'
    date_format = '%Y-%m-%d %H:%M:%S'
    logging.basicConfig(format=format_str, level=logging.INFO, datefmt=date_format)
    logger = logging.getLogger()
    # Add a file handler as well; roll at midnight and keep 7 copies
    file_handler = TimedRotatingFileHandler("remind_log", when="midnight", backupCount=6)
    log_formatter = logging.Formatter(format_str, datefmt=date_format)
    file_handler.setFormatter(log_formatter)
    # file log always gets debug; console log level set in the config
    file_handler.setLevel(logging.DEBUG)
    logger.addHandler(file_handler)


    # tell the user what we're doing...
    print('\n')
    print(HASHES)
    print(HASH, 'Pi Remind HD Notify                      ', HASH)
    print(HASH, 'By John M. Wargo (https://johnwargo.com) ', HASH)
    print(HASHES)
    print('From: ' + PROJECT_URL + '\n')

    settings = Settings.get_instance()
    settings.validate_config_options()

    debug_mode = settings.get_debug_mode()
    if debug_mode:
        logging.info('Remind: Enabling debug mode')
        logger.setLevel(logging.DEBUG)

    display_meeting_summary = settings.get_display_meeting_summary()

    use_remote_notify = settings.get_use_remote_notify()
    if use_remote_notify:
        logging.info('Remind: Remote Notify Enabled')
        access_token = settings.get_access_token()
        device_id = settings.get_device_id()
        # Check to see if the string values we need are populated
        if len(access_token) < 1 or len(device_id) < 1:
            logging.error('One or more values are missing from the project configuration file')
            logging.error(CONFIG_ERROR_STR)
            sys.exit(0)
        logging.debug('Remind: Creating Particle object')
        particle = ParticleCloud(access_token, device_id)

        logging.info('Remind: Resetting Remote Notify status')
        particle.set_status(Status.FREE.value)
        time.sleep(1)
        particle.set_status(Status.OFF.value)

    # is the reboot counter in play?
    use_reboot_counter = settings.get_use_reboot_counter()
    if use_reboot_counter:
        # then get the reboot counter limit
        reboot_counter_limit = settings.get_reboot_counter_limit()
        # and tell the user the feature is enabled
        logging.info('Remind: Reboot enabled ({} retries)'.format(reboot_counter_limit))

    logging.info('Remind: Initializing Google Calendar interface')
    try:
        cal = GoogleCalendar()
        # Set the timeout for the rest of the Google API calls.
        # need this at its default during the registration process.
        socket.setdefaulttimeout(5)  # seconds
    except Exception as e:
        logging.error('Remind: Unable to initialize Google Calendar API')
        logging.error('Exception type: {}'.format(type(e)))
        logging.error('Error: {}'.format(sys.exc_info()[0]))
        unicorn.set_all(unicorn.FAILURE_COLOR)
        time.sleep(5)
        unicorn.off()
        sys.exit(0)

    logging.info('Remind: Application initialized')

    # flash some random LEDs just for fun...
    unicorn.flash_random(5, 0.5)
    # blink all the LEDs GREEN to let the user know the hardware is working
    unicorn.flash_all(3, 0.10, unicorn.GREEN)
    # get to work
    processing_loop()
Example #33
0
# 配置日志
logFormatter = logging.Formatter(
    "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s][%(threadName)s] - %(message)s"
)
rootLogger = logging.getLogger('werkzeug')
rootLogger.handlers.clear()

# time rotating handler
rotatingHandler = TimedRotatingFileHandler(Path(base_dir, 'logs',
                                                'takehome.log'),
                                           when="D",
                                           interval=1,
                                           backupCount=30,
                                           encoding='utf-8')
rotatingHandler.setFormatter(logFormatter)
rotatingHandler.setLevel(logging.DEBUG)
logging.getLogger().handlers.clear()
logging.getLogger().addHandler(rotatingHandler)
# stream handler
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
consoleHandler.setLevel(logging.INFO)
rootLogger.addHandler(consoleHandler)

# ORM文件
DB_MODEL_FILE = Path(base_dir, 'backend', 'sqllib', 'models',
                     'backend/sqllib/models/db_model.py')


def allowed_file(filename):
    return filename.rsplit(
Example #34
0
    return jsonify(description="Firecrest's parameters",
                   out=parameters_list), 200


if __name__ == "__main__":
    # log handler definition
    # timed rotation: 1 (interval) rotation per day (when="D")
    logHandler = TimedRotatingFileHandler('/var/log/status.log',
                                          when='D',
                                          interval=1)

    logFormatter = logging.Formatter(
        '%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s',
        '%Y-%m-%dT%H:%M:%S')
    logHandler.setFormatter(logFormatter)
    logHandler.setLevel(logging.DEBUG)

    # get app log (Flask+werkzeug+python)
    logger = logging.getLogger()

    # set handler to logger
    logger.addHandler(logHandler)

    # run app
    if USE_SSL:
        app.run(debug=debug,
                host='0.0.0.0',
                port=STATUS_PORT,
                ssl_context=(SSL_CRT, SSL_KEY))
    else:
        app.run(debug=debug, host='0.0.0.0', port=STATUS_PORT)
mail_buffer = StringIO()

if not root_logger.handlers:
    log_format = logging.Formatter('%(asctime)s %(levelname)s:  %(message)s')
    root_logger.setLevel(logging.INFO)

    log_dir = os.path.abspath('logs')
    if os.path.exists(log_dir):
        if not os.path.isdir(log_dir):
            raise(Exception("Logging directory {d} exists but as a file.".format(d=log_dir)))
    else:
        os.makedirs(log_dir)

    file_handler = TimedRotatingFileHandler('logs/safeway_coupon.log', when="midnight", interval=1, backupCount=30)
    file_handler.setFormatter(log_format)
    file_handler.setLevel(logging.INFO)
    root_logger.addHandler(file_handler)

    console_handler = logging.StreamHandler()
    console_handler.setFormatter(log_format)
    root_logger.addHandler(console_handler)

    mail_handler = logging.StreamHandler(mail_buffer)
    mail_formatter = logging.Formatter('%(message)s')
    mail_handler.setFormatter(mail_formatter)
    mail_handler.setLevel(logging.INFO)
    root_logger.addHandler(mail_handler)


sleep_multiplier = 1.0
Example #36
0
LOG_HOME = '/data/log/jiemo-html'

if not os.path.exists(LOG_HOME):
    os.makedirs(LOG_HOME)

# logging.basicConfig(level=logging.INFO,
#                     format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
#                     datefmt='%a, %d %b %Y %H:%M:%S',
#                     filename='{0}/main.log'.format(LOG_HOME),
#                     filemode='w')

logging.basicConfig(level=logging.INFO, filemode='w')

Rthandler = TimedRotatingFileHandler('{0}/main.log'.format(LOG_HOME), 'D', 1, 0, encoding='utf8')
Rthandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s [line:%(lineno)d] %(levelname)s %(message)s')
Rthandler.setFormatter(formatter)
logging.getLogger('').addHandler(Rthandler)

TEXT_LENGTH = 1


def download(url):
    headers = {'User-agent':
                   'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
               # 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1'
               }
    page = None
    err_info = None
    try:
Example #37
0
def configure_file_logging():
    if LOG_FILE:
        file_handler = TimedRotatingFileHandler(LOG_FILE, when="midnight")
        file_handler.setFormatter(formatter)
        file_handler.setLevel(logging.DEBUG)
        logger.addHandler(file_handler)
Example #38
0
def create_app(config_name='default', jobs_enabled=True):
    """
    Set up the Flask Application context.

    :param config_name: Configuration for specific application context.

    :return: Flask application
    """

    app = Flask(__name__)
    app.config.from_object(config[config_name])
    config[config_name].init_app(app)

    # TODO: handler_info, handler_debug, handler_warn
    mail_handler = SMTPHandler(mailhost=(app.config['MAIL_SERVER'],
                                         app.config['MAIL_PORT']),
                               fromaddr=app.config['MAIL_SENDER'],
                               toaddrs=OPENRECORDS_DL_EMAIL,
                               subject='OpenRecords Error')
    mail_handler.setLevel(logging.ERROR)
    mail_handler.setFormatter(
        Formatter('''
    Message Type:       %(levelname)s
    Location:           %(pathname)s:%(lineno)d
    Module:             %(module)s
    Function:           %(funcName)s
    Time:               %(asctime)s
    
    Message:
    %(message)s
    '''))
    app.logger.addHandler(mail_handler)

    handler_error = TimedRotatingFileHandler(os.path.join(
        app.config['LOGFILE_DIRECTORY'],
        'openrecords_{}_error.log'.format(app.config['APP_VERSION_STRING'])),
                                             when='midnight',
                                             interval=1,
                                             backupCount=60)
    handler_error.setLevel(logging.ERROR)
    handler_error.setFormatter(
        Formatter(
            '------------------------------------------------------------------------------- \n'
            '%(asctime)s %(levelname)s: %(message)s '
            '[in %(pathname)s:%(lineno)d]\n'))
    app.logger.addHandler(handler_error)

    app.jinja_env.filters[
        'format_event_type'] = jinja_filters.format_event_type
    app.jinja_env.filters[
        'format_response_type'] = jinja_filters.format_response_type
    app.jinja_env.filters[
        'format_response_privacy'] = jinja_filters.format_response_privacy
    app.jinja_env.filters[
        'format_ultimate_determination_reason'] = jinja_filters.format_ultimate_determination_reason

    recaptcha.init_app(app)
    bootstrap.init_app(app)
    es.init_app(app,
                use_ssl=app.config['ELASTICSEARCH_USE_SSL'],
                verify_certs=app.config['ELASTICSEARCH_VERIFY_CERTS'])
    db.init_app(app)
    csrf.init_app(app)
    moment.init_app(app)
    login_manager.init_app(app)
    mail.init_app(app)
    celery.conf.update(app.config)
    sentry.init_app(app, logging=app.config["USE_SENTRY"], level=logging.INFO)

    if jobs_enabled:
        scheduler.init_app(app)

    with app.app_context():
        from app.models import Anonymous
        login_manager.login_view = 'auth.login'
        login_manager.anonymous_user = Anonymous
        KVSessionExtension(session_redis, app)

    # schedule jobs
    if jobs_enabled:
        # NOTE: if running with reloader, jobs will execute twice
        import jobs
        scheduler.add_job(
            'update_request_statuses',
            jobs.update_request_statuses,
            name="Update requests statuses every day at 3 AM.",
            trigger=CronTrigger(hour=3),
        )
        scheduler.add_job(
            'check_sanity',
            jobs.check_sanity,
            name="Check if scheduler is running every morning at 8 AM.",
            # trigger=IntervalTrigger(minutes=1)  # TODO: switch to cron below after testing
            trigger=CronTrigger(hour=8))

        scheduler.start()

    # Error Handlers
    @app.errorhandler(400)
    def bad_request(e):
        return render_template("error/generic.html",
                               status_code=400,
                               message=e.description or None)

    @app.errorhandler(403)
    def forbidden(e):
        return render_template("error/generic.html", status_code=403)

    @app.errorhandler(404)
    def page_not_found(e):
        return render_template("error/generic.html", status_code=404)

    @app.errorhandler(500)
    def internal_server_error(e):
        error_id = str(uuid.uuid4())
        app.logger.error("""Request:   {method} {path}
    IP:        {ip}
    User:      {user}
    Agent:     {agent_platform} | {agent_browser} {agent_browser_version}
    Raw Agent: {agent}
    Error ID:  {error_id}
            """.format(method=flask_request.method,
                       path=flask_request.path,
                       ip=flask_request.remote_addr,
                       agent_platform=flask_request.user_agent.platform,
                       agent_browser=flask_request.user_agent.browser,
                       agent_browser_version=flask_request.user_agent.version,
                       agent=flask_request.user_agent.string,
                       user=current_user,
                       error_id=error_id),
                         exc_info=e)
        return render_template("error/generic.html",
                               status_code=500,
                               error_id=error_id)

    @app.errorhandler(503)
    def maintenance(e):
        with open(os.path.join(app.instance_path, 'maintenance.json')) as f:
            maintenance_info = json.load(f)
        return render_template('error/maintenance.html',
                               description=maintenance_info['description'],
                               outage_time=maintenance_info['outage_time'])

    @app.before_request
    def check_maintenance_mode():
        if os.path.exists(os.path.join(app.instance_path, 'maintenance.json')):
            if not flask_request.cookies.get('authorized_maintainer', None):
                return abort(503)

    @app.context_processor
    def add_session_config():
        """Add current_app.permanent_session_lifetime converted to milliseconds
        to context. The config variable PERMANENT_SESSION_LIFETIME is not
        used because it could be either a timedelta object or an integer
        representing seconds.
        """
        return {
            'PERMANENT_SESSION_LIFETIME_MS':
            (app.permanent_session_lifetime.seconds * 1000),
        }

    @app.context_processor
    def add_debug():
        """Add current_app.debug to context."""
        return dict(debug=app.debug)

    # Register Blueprints
    from .main import main
    app.register_blueprint(main)

    from .auth import auth
    app.register_blueprint(auth, url_prefix="/auth")

    from .request import request
    app.register_blueprint(request, url_prefix="/request")

    from .request.api import request_api_blueprint
    app.register_blueprint(request_api_blueprint,
                           url_prefix="/request/api/v1.0")

    from .report import report
    app.register_blueprint(report, url_prefix="/report")

    from .response import response
    app.register_blueprint(response, url_prefix="/response")

    from .upload import upload
    app.register_blueprint(upload, url_prefix="/upload")

    from .user import user
    app.register_blueprint(user, url_prefix="/user")

    from .agency import agency
    app.register_blueprint(agency, url_prefix="/agency")

    from .agency.api import agency_api_blueprint
    app.register_blueprint(agency_api_blueprint, url_prefix="/agency/api/v1.0")

    from .search import search
    app.register_blueprint(search, url_prefix="/search")

    from .admin import admin
    app.register_blueprint(admin, url_prefix="/admin")

    from .user_request import user_request
    app.register_blueprint(user_request, url_prefix="/user_request")

    from .permissions import permissions
    app.register_blueprint(permissions, url_prefix="/permissions/api/v1.0")

    # exit handling
    if jobs_enabled:
        atexit.register(lambda: scheduler.shutdown())

    return app
Example #39
0
app.config.from_pyfile('config.py')

# HTTP security headers
Talisman(app, content_security_policy={})

# CSRF protection. settings fitting Angular $httpProvider
app.config['CSRF_COOKIE_NAME'] = 'XSRF-TOKEN'
app.config['CSRF_HEADER_NAME'] = 'X-XSRF-TOKEN'
app.config['CSRF_COOKIE_PATH'] = '/locator-tool/'
SeaSurf(app)

logfile = TimedRotatingFileHandler(filename='locator-tool.log',
                                   when='midnight')
logfile.setFormatter(
    logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s'))
logfile.setLevel(logging.INFO)
app.logger.addHandler(logfile)
logging.getLogger('werkzeug').addHandler(logfile)

mwoauth = MWOAuth(base_url='https://commons.wikimedia.org/w',
                  clean_url='https://commons.wikimedia.org/wiki',
                  consumer_key=app.config['OAUTH_CONSUMER_KEY'],
                  consumer_secret=app.config['OAUTH_CONSUMER_SECRET'])
app.register_blueprint(mwoauth.bp)


@app.route('/')
def index():
    return app.send_static_file('index.html')

Example #40
0
def get_file_handler():
    file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
    file_handler.setFormatter(FORMATTER)
    file_handler.setLevel(logging.WARNING)
    return file_handler
Example #41
0
import subprocess

from toolbox import mask_is_valid, ipv6_is_valid, ipv4_is_valid, resolve, save_cache_pickle, load_cache_pickle, get_asname_from_whois, unescape

from dns.resolver import NXDOMAIN
from flask import Flask, render_template, jsonify, redirect, session, request, abort, Response, Markup
import pydot

app = Flask(__name__)
app.config.from_pyfile('lg.cfg')
app.secret_key = app.config["SESSION_KEY"]
app.debug = app.config["DEBUG"]

file_handler = TimedRotatingFileHandler(filename=app.config["LOG_FILE"],
                                        when="midnight")
file_handler.setLevel(getattr(logging, app.config["LOG_LEVEL"].upper()))
app.logger.addHandler(file_handler)

memcache_server = app.config.get("MEMCACHE_SERVER", "127.0.0.1:11211")
memcache_expiration = int(app.config.get("MEMCACHE_EXPIRATION",
                                         "1296000"))  # 15 days by default
mc = memcache.Client([memcache_server])

default_lookup_table = " table %s" % app.config.get("DEFAULT_TABLE", "master")


def get_asn_from_as(n):
    asn_zone = app.config.get("ASN_ZONE", "asn.cymru.com")
    try:
        data = resolve("AS%s.%s" % (n, asn_zone),
                       "TXT").replace("'", "").replace('"', '')
Example #42
0
__all__ = [
    'event_pack_tick_data',
]

# LOG_TIME_FMT = "%Y-%m-%d %H:%M:%S"

# logging.basicConfig(filename=LOG_FILE)
# step 1: create a logger
neulog = logging.getLogger()
# step 2: set logger level
neulog.setLevel(logging.INFO)
# step 3: create a handler
Time_Handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
# step 4: set handler level
Time_Handler.setLevel(logging.INFO)
# step 5: create log format
LOG_FORMAT = "%(asctime)s [%(levelname)s]: %(message)s"
# step 6: add log format to handler
neu_format = logging.Formatter(LOG_FORMAT, LOG_TIME_FMT)
Time_Handler.setFormatter(neu_format)
# step 7: add handler to logger


def info_log(msg: str):
    neulog.addHandler(Time_Handler)
    neulog.info(msg)
    neulog.removeHandler(Time_Handler)


def error_log(msg: str):
Example #43
0
def create_logger(loggername: str = 'logger', levelname: str = 'DEBUG'):
    filename = 'logger.log'
    logger = logging.getLogger(loggername)
    logger.setLevel(levels[levelname])

    logger_format = logging.Formatter(
        "[%(asctime)s][%(levelname)s][%(filename)s][%(funcName)s][%(lineno)03s]: %(message)s"
    )
    console_format = logging.Formatter("%(message)s")

    handler_console = logging.StreamHandler()
    handler_console.setFormatter(console_format)
    handler_console.setLevel(logging.INFO)

    now = time.strftime("%Y%m%d")
    common_filename = path / 'LOG' / f'{now}.log'
    handler_common = logging.FileHandler(str(common_filename),
                                         mode='a+',
                                         encoding='utf-8')
    handler_common.setLevel(levels[levelname])
    handler_common.setFormatter(logger_format)

    for key in levels:
        # now = time.strftime("%Y%m%d")
        filename = path / key / f'logger.log'
        handler = TimedRotatingFileHandler(filename,
                                           encoding='utf-8',
                                           when='D',
                                           interval=1,
                                           backupCount=7)
        handler.suffix = "%Y-%m-%d.log"
        handler.setFormatter(logger_format)
        handler.setLevel(levels[key])
        flt = logging.Filter()
        flt.filter = lambda record: record.levelno == levels[key]
        handler.addFilter(flt)
        logger.addHandler(handler)

    # handler_debug = TimedRotatingFileHandler(os.path.join(basedir, 'DEBUG', filename), encoding='utf-8', when='D', interval=1, backupCount=7)
    # handler_debug.suffix = "%Y-%m-%d.log"  # "%Y-%m-%d_%H-%M-%S.log"
    # handler_debug.setFormatter(logger_format)
    # handler_debug.setLevel(logging.DEBUG)
    # filter_debug = logging.Filter()
    # filter_debug.filter = lambda record: record.levelno == logging.DEBUG
    # handler_debug.addFilter(filter_debug)

    # handler_info = TimedRotatingFileHandler(os.path.join(basedir, 'INFO', filename), encoding='utf-8', when='D', interval=1, backupCount=7)
    # handler_info.suffix = "%Y-%m-%d.log"  # "%Y-%m-%d_%H-%M-%S.log"
    # handler_info.setFormatter(logger_format)
    # handler_info.setLevel(logging.INFO)
    # filter_info = logging.Filter()
    # filter_info.filter = lambda record: record.levelno == logging.INFO
    # handler_info.addFilter(filter_info)

    # handler_warning = TimedRotatingFileHandler(os.path.join(basedir, 'WARNING', filename), encoding='utf-8', when='D', interval=1, backupCount=7)
    # handler_warning.suffix = "%Y-%m-%d.log"  # "%Y-%m-%d_%H-%M-%S.log"
    # handler_warning.setFormatter(logger_format)
    # handler_warning.setLevel(logging.WARNING)
    # filter_warning = logging.Filter()
    # filter_warning.filter = lambda record: record.levelno == logging.WARNING
    # handler_warning.addFilter(filter_warning)

    # handler_error = TimedRotatingFileHandler(os.path.join(basedir, 'ERROR', filename), encoding='utf-8', when='D', interval=1, backupCount=7)
    # handler_error.suffix = "%Y-%m-%d.log"  # "%Y-%m-%d_%H-%M-%S.log"
    # handler_error.setFormatter(logger_format)
    # handler_error.setLevel(logging.ERROR)
    # filter_error = logging.Filter()
    # filter_error.filter = lambda record: record.levelno == logging.ERROR
    # handler_error.addFilter(filter_error)

    # handler_critical = TimedRotatingFileHandler(os.path.join(basedir, 'CRITICAL', filename), encoding='utf-8', when='D', interval=1, backupCount=7)
    # handler_critical.suffix = "%Y-%m-%d.log"  # "%Y-%m-%d_%H-%M-%S.log"
    # handler_critical.setFormatter(logger_format)
    # handler_critical.setLevel(logging.CRITICAL)
    # filter_critical = logging.Filter()
    # filter_critical.filter = lambda record: record.levelno == logging.CRITICAL
    # handler_critical.addFilter(filter_critical)

    logger.addHandler(handler_console)
    logger.addHandler(handler_common)

    return logger
Example #44
0
def configure_bluesky_logging(ipython, appdirs_appname="bluesky"):
    """
    Configure a TimedRotatingFileHandler log handler and attach it to
    bluesky, ophyd, caproto, and nslsii loggers.

    The log file path is taken from environment variable BLUESKY_LOG_FILE, if
    that variable has been set. If not the default log file location is determined
    by the appdirs package. The default log directory will be created if it does
    not exist.

    Parameters
    ----------
    ipython: InteractiveShell
        IPython InteractiveShell used to attach bluesky log handler to ipython
    appdirs_appname: str
        appname passed to appdirs.user_log_dir() when the BLUESKY_LOG_FILE
        environment variable has not been set; use the default for production,
        set to something else for testing

    Returns
    -------
    bluesky_log_file_path: Path
        log file path

    """
    global bluesky_log_file_path

    if "BLUESKY_LOG_FILE" in os.environ:
        bluesky_log_file_path = Path(os.environ["BLUESKY_LOG_FILE"])
        print(
            f"bluesky log file path configured from environment variable"
            f" BLUESKY_LOG_FILE: '{bluesky_log_file_path}'",
            file=sys.stderr,
        )
    else:
        bluesky_log_dir = Path(appdirs.user_log_dir(appname=appdirs_appname))
        if not bluesky_log_dir.exists():
            bluesky_log_dir.mkdir(parents=True, exist_ok=True)
        bluesky_log_file_path = bluesky_log_dir / Path("bluesky.log")
        print(
            f"environment variable BLUESKY_LOG_FILE is not set,"
            f" using default log file path '{bluesky_log_file_path}'",
            file=sys.stderr,
        )

    log_file_handler = TimedRotatingFileHandler(
        filename=str(bluesky_log_file_path), when="W0", backupCount=10
    )
    log_file_handler.setLevel("INFO")
    log_file_format = (
        "[%(levelname)1.1s %(asctime)s.%(msecs)03d %(name)s"
        "  %(module)s:%(lineno)d] %(message)s"
    )
    log_file_handler.setFormatter(logging.Formatter(fmt=log_file_format))
    logging.getLogger("bluesky").addHandler(log_file_handler)
    logging.getLogger("caproto").addHandler(log_file_handler)
    logging.getLogger("ophyd").addHandler(log_file_handler)
    logging.getLogger("nslsii").addHandler(log_file_handler)
    ipython.log.addHandler(log_file_handler)
    # set the loggers to send INFO and higher log
    # messages to their handlers
    logging.getLogger("bluesky").setLevel("INFO")
    logging.getLogger("caproto").setLevel("INFO")
    logging.getLogger("ophyd").setLevel("INFO")
    logging.getLogger("nslsii").setLevel("INFO")
    ipython.log.setLevel("INFO")

    return bluesky_log_file_path
Example #45
0
        loglevel = logging.getLevelName("INFO")
    logger = logging.getLogger()
    logger.setLevel(loglevel)

    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
    console_handler = logging.StreamHandler()
    console_handler.setLevel(loglevel)
    console_handler.setFormatter(formatter)
    logger.addHandler(console_handler)
    file_handler = TimedRotatingFileHandler(
        '/var/log/backup/backup.log',
        when='d',
        interval=1,
        backupCount=5
    )
    file_handler.setLevel(loglevel)
    file_handler.setFormatter(formatter)
    logger.addHandler(file_handler)
    
    # Load and check settings
    configService = Config("config")
    
    # Just check parameters
    if len(sys.argv) > 1 and sys.argv[1] == "--checkParameters":
        
        try:
            getAndcheckAllParameters()
        except Exception as e:
            logger.error("Error - %s", e.message)
            sys.exit(1)
    
Example #46
0
 def set_file_handler(self):
     handler = TimedRotatingFileHandler(
         self.log_file, when='D', interval=1, backupCount=3)
     handler.setFormatter(self.formatter)
     handler.setLevel(logging.DEBUG)
     return handler
import logging
from logging.handlers import TimedRotatingFileHandler
import time

formatter = logging.Formatter("%(asctime)s:%(name)s:%(levelname)s:%(message)s")

timed_handler = TimedRotatingFileHandler(filename="timed.log",
                                         when="S",
                                         interval=1,
                                         backupCount=1)
timed_handler.setFormatter(formatter)
timed_handler.setLevel(logging.INFO)

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(timed_handler)

start = now = time.time()
ind = 0

while (now - start) < 5:
    logger.info(f"Ind: {ind}, Time passed: {round(now-start, 2)}")
    time.sleep(0.25)
    now = time.time()
    ind += 1
    # Create one file for each day. Delete logs over 7 days old.
    file_handler = TimedRotatingFileHandler(filename=app.config["ERROR_LOG_PATH"], when="D", backupCount=7)

    # Use a multi-line format for this logger, for easier scanning
    file_formatter = logging.Formatter('''
    Time: %(asctime)s
    Level: %(levelname)s
    Method: %(method)s
    Path: %(url)s
    IP: %(ip)s
    Message: %(message)s
    ---------------------''')

    # Filter out all log messages that are lower than Error.
    file_handler.setLevel(logging.DEBUG)

    # file_handler.addFormatter(file_formatter)
    app.logger.addHandler(file_handler)


# Initalize all Flask API views
from api.views import CsvSimple
from api.views import DataAnalysis

flask_api.add_resource(CsvSimple, '/api/csv')
flask_api.add_resource(DataAnalysis, '/api/analyze/<string:experiment_id>', endpoint = 'analyze')
#flask_api.add_resource(DownloadCsv, '/api/download/<int:id>')

from app.machine_learning.wrangle import api_serialize, api_test
Example #49
0
 def __fileHandler(logObj, logFile, format):
     handler = TimedRotatingFileHandler(filename=logFile, when="D", interval=1, backupCount=7)
     handler.setLevel(logging.INFO)
     handler.setFormatter(format)
     logObj.addHandler(handler)
Example #50
0
'''
  简单得实时更新日志记录文件
'''

import logging
from logging.handlers import TimedRotatingFileHandler
# create logger with 'spam_application'
logger = logging.getLogger('study and test')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
#fh = logging.FileHandler('myapp.log')
#fh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
#保存时间备份按照秒来计算
th = TimedRotatingFileHandler(filename="myapp1.log",backupCount=5,when="h")
th.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
th.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
logger.addHandler(th)

logger.info("hello12!")
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#

from pathlib import Path
import logging
from logging.handlers import TimedRotatingFileHandler
from .loadconfig import config, get_path

logging.basicConfig(level=logging.DEBUG,
                    format=config['log_format'],
                    datefmt=config['date_time_format'])
logger = logging.getLogger()

if config['log_to_file']:
    log_dir = get_path('log_folder')
    Path(log_dir).mkdir(parents=True, exist_ok=True)

    file_path = log_dir.joinpath(config['log_file'])
    fl = TimedRotatingFileHandler(file_path,
                                  when='midnight',
                                  interval=1,
                                  backupCount=31)
    fl.setLevel(logging.INFO)
    formatter = logging.Formatter(config['log_format'])
    fl.setFormatter(formatter)
    logger.addHandler(fl)
Example #52
0
 def add_handler(filename, level):
     ensure_dir(os.path.dirname(filename))
     h = TimedRotatingFileHandler(filename, "D", encoding="utf8")
     h.setLevel(level)
     h.setFormatter(formatter)
     handlers.append(h)
Example #53
0
logger = logging.getLogger("application")

logger.setLevel(logging.DEBUG)
logger.propagate = False

sh = logging.StreamHandler()
sh.setLevel(logging.ERROR)

sh.setFormatter(
    logging.Formatter(
        '%(asctime)s %(name)-12s %(levelname)-8s CONSOLE %(message)s'))
logger.addHandler(sh)

# INFO LOGGER
fh_info = TimedRotatingFileHandler(file_path_info, backupCount=5)
fh_info.setLevel(logging.INFO)
fh_info.setFormatter(
    logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s  %(message)s'))
logger.addHandler(fh_info)

# ERROR LOGGER
fh_error = TimedRotatingFileHandler(file_path_error, backupCount=5)
fh_error.setLevel(logging.ERROR)
fh_error.setFormatter(
    logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s'))
logger.addHandler(fh_error)

# DEBUG LOGGER
fh_debug = TimedRotatingFileHandler(file_path_debug, backupCount=5)
fh_debug.setLevel(logging.DEBUG)
fh_debug.setFormatter(
Example #54
0
incubatorsactive = [True, True, True, True]

###Initialize global variables
startupdate = datetime.now()
weeklytest = False #Whether we are in the weekly heartbeat announcement
alarm = False #Whether we are in an alarm state
curAlarmRepeat = 0 #The number of main loop iterations since we last sent a reminder of an ongoing alarm condition
status = [1, 1, 1, 1] # State vector of the incubators this cycle; initialize to all clear
priorstatus = status # State vector of the incubators last cycle

###Configure logging
logger = logging.getLogger("IncubatorAlarmLog")
logger.setLevel(logging.DEBUG)

handler1 = TimedRotatingFileHandler(basedir + 'Log/incubatoralarm.log', when="D", interval=30, backupCount=1000)
handler1.setLevel(logging.INFO)
logger.addHandler(handler1)

handler2 = logging.StreamHandler()
logger.addHandler(handler2)

#alarmname will be updated after reading config file
handler1.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s ' + alarmname + ' %(message)s', datefmt='%Y-%m-%d %H:%M:%S'))
handler2.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s ' + alarmname + ' %(message)s', datefmt='%Y-%m-%d %H:%M:%s'))

logger.info("Beginning startup")

#Set up handler to log a shutdown notice
def sigterm_handler(_signo, _stack_frame):
    #logger is a global variable
    logger.critical("Alarm monitor shutting down!")
Example #55
0
from logging.handlers import TimedRotatingFileHandler
from tools import os_tool
"""
封装log方法
"""
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
root_path = os_tool.get_root_path() + 'logs/'
os_tool.mkdir(root_path)

handler = TimedRotatingFileHandler(root_path + 'info.log',
                                   when='d',
                                   interval=1,
                                   backupCount=30,
                                   encoding='utf-8')
handler.setLevel(logging.INFO)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

handler2 = TimedRotatingFileHandler(root_path + 'error.log',
                                    when='d',
                                    interval=1,
                                    backupCount=30,
                                    encoding='utf-8')
handler2.setLevel(logging.ERROR)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler2.setFormatter(formatter)
logger.addHandler(handler2)
Example #56
0
import json
import time
import re
import requests

import logging
from logging.handlers import TimedRotatingFileHandler
from config import Config

logger = logging.getLogger('es mapping backup logger')
logger.setLevel(logging.DEBUG)
handler = TimedRotatingFileHandler('./logs/es_mapping_backup.log',
                                   when='midnight',
                                   interval=1,
                                   backupCount=30)
handler.setLevel(logging.DEBUG)
handler.setFormatter(
    logging.Formatter(
        fmt='[%(asctime)s.%(msecs)03d] [%(levelname)s]: %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S'))
handler.suffix = "%Y%m%d"
handler.extMatch = re.compile(r"^\d{8}$")
logger.addHandler(handler)

rs = requests.session()
_http_headers = {'Content-Type': 'application/json'}
_es_size = 100
_es_type = '_doc'


def store_mapping(mapping, file_name):
Example #57
0
from dockerplace import app

if __name__ == '__main__':
    if not app.debug:
        import logging
        from logging.handlers import TimedRotatingFileHandler
        file_handler = TimedRotatingFileHandler(
            "dockerplace.log",
            when="D",
            backupCount=10)
        file_handler.setLevel(logging.WARNING)
        app.logger.addHandler(file_handler)

    app.debug = app.config['DEBUG']
    app.run( port=app.config['PORT'])
Example #58
0
def main():
    logger.info("代理服务(Proxy Server)启动")
    factory = ProxyServerFactory()
    from twisted.internet import reactor
    proxy_port = int(config["proxyServer"]["proxy_port"])
    port = reactor.listenTCP(proxy_port, factory)
    logger.info('Proxy Serving 监听端口(proxy_port): %d' % port.getHost().port)
    from twisted.internet import reactor
    reactor.suggestThreadPoolSize(40)
    reactor.run()


logger = logging.getLogger('Proxy')
logger.setLevel(logging.DEBUG)

real_path = os.path.split(os.path.realpath(__file__))[0]
fn = real_path + '/log/log.log'

fh = TimedRotatingFileHandler(fn, when='D', interval=1, backupCount=10, encoding='utf-8')
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)

config = configparser.ConfigParser(delimiters='=')
config.read(real_path + "/config.conf", encoding="utf-8")

if __name__ == '__main__':
    main()
Example #59
0
import logging
import coloredlogs
from logging.handlers import TimedRotatingFileHandler

# log formatter
formatter_pattern = "%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s"
time_pattern = '%Y-%m-%d %H:%M:%S'
formatter = coloredlogs.ColoredFormatter(formatter_pattern, time_pattern)

# 定义一个流处理器StreamHandler,将INFO及以上的日志打印到标准错误(stream_handler输出)
stream_handler = logging.StreamHandler()
stream_handler.addFilter(coloredlogs.HostNameFilter())
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(formatter)

# 按时间切分的TimeRotatingFileHandler
rotate_handler = TimedRotatingFileHandler(filename='./log/scrapy.log',
                                          when='midnight',
                                          interval=1,
                                          backupCount=10)
rotate_handler.setLevel(logging.INFO)
rotate_handler.suffix = '%Y%m%d.log'
rotate_handler.setFormatter(formatter)

# logger,全局使用
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
logger.addHandler(stream_handler)
logger.addHandler(rotate_handler)
Example #60
-1
def configure_logging(app):
    subject = '[Error] %s encountered errors on %s' % (app.config['DOMAIN'], datetime.now().strftime('%Y/%m/%d'))
    subject += (' [DEV]' if app.debug else '')
    mail_config = [(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                   app.config['MAIL_DEFAULT_SENDER'], app.config['ADMINS'],
                   subject,
                   (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])]
    if app.config['MAIL_USE_SSL']:
        mail_handler = SSLSMTPHandler(*mail_config)
    else:
        mail_handler = SMTPHandler(*mail_config)

    mail_handler.setLevel(logging.ERROR)
    app.logger.addHandler(mail_handler)

    formatter = logging.Formatter(
        '%(asctime)s %(process)d-%(thread)d %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')

    debug_log = os.path.join(app.root_path, app.config['DEBUG_LOG'])
    debug_file_handler = TimedRotatingFileHandler(debug_log, when='midnight', interval=1, backupCount=90)
    debug_file_handler.setLevel(logging.DEBUG)
    debug_file_handler.setFormatter(formatter)
    app.logger.addHandler(debug_file_handler)

    error_log = os.path.join(app.root_path, app.config['ERROR_LOG'])
    error_file_handler = TimedRotatingFileHandler(error_log, when='midnight', interval=1, backupCount=90)
    error_file_handler.setLevel(logging.ERROR)
    error_file_handler.setFormatter(formatter)
    app.logger.addHandler(error_file_handler)

    # Flask运行在产品模式时, 只会输出ERROR, 此处使之输入INFO
    if not app.config['DEBUG']:
        app.logger.setLevel(logging.INFO)