Exemplo n.º 1
0
def _setup_task_logger(logger):
    """Configure a task logger to generate site- and task-specific logs."""
    if logger.handlers:  # Already processed
        return

    parts = logger.name.split(".")
    if len(parts) < 4:  # Malformed
        return
    site = parts[2]
    task = parts[3]

    _ensure_dirs(os.path.join(_log_dir, site))

    formatter = Formatter(
        fmt="[%(asctime)s %(levelname)-7s] %(message)s",
        datefmt=_DATE_FORMAT)

    infohandler = TimedRotatingFileHandler(
        os.path.join(_log_dir, site, task + ".log"), "midnight", 1, 30)
    infohandler.setLevel("INFO")

    debughandler = FileHandler(
        os.path.join(_log_dir, site, task + ".log.verbose"), "w")
    debughandler.setLevel("DEBUG")

    errorhandler = RotatingFileHandler(
        os.path.join(_log_dir, site, task + ".err"), maxBytes=1024**2,
        backupCount=4)
    errorhandler.setLevel("WARNING")

    for handler in [infohandler, debughandler, errorhandler]:
        handler.setFormatter(formatter)
        logger.addHandler(handler)
Exemplo n.º 2
0
def get_logger(logger_name, logger_module, has_formatter=True):
    """申明logger

    Args:
        logger: str
        logger_moduls: logger所属的模块,方便后面按照不同的路径进行切分
    """
    log_dir = "%s/unknown/" % (config.LOG_DIR)
    if logger_module:
        log_dir = "%s/%s/" % (config.LOG_DIR, logger_module)
    if not os.path.exists(log_dir):
        os.mkdir(log_dir)

    log = logging.getLogger(logger_name)
    file_handler = TimedRotatingFileHandler("%s/%s.log" % (log_dir, logger_name), 'W0')
    if has_formatter:
        logger_format = '[%(asctime)s][%(thread)d][%(levelname)s][%(filename)s][line:%(lineno)d] [func:%(funcName)s] [msg:%(message)s]'
        formatter = logging.Formatter(logger_format)
        file_handler.setFormatter(formatter)
    else:
        # 没有格式,只打印时间
        formatter = logging.Formatter('%(asctime)s\t%(message)s')
        file_handler.setFormatter(formatter)

    log.addHandler(file_handler)
    return log
Exemplo n.º 3
0
    def set_file_logger(self,
                        path='logs', filename='backend.log',
                        when="D", interval=1, backupCount=6):  # pragma: no cover
        """
        Configure handler for file logging ...

        :param test: test mode for application
        :type test: boolean
        """
        try:
            os.makedirs(path)
        except OSError:
            if not os.path.isdir(path):
                raise

        # Store logs in a daily file, keeping 6 days along ...
        fh = TimedRotatingFileHandler(
            filename=os.path.join(path, filename),
            when=when, interval=interval,
            backupCount=backupCount
        )

        # create formatter and add it to the handler
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)

        # add the handler to logger
        self.log.addHandler(fh)
Exemplo n.º 4
0
    def _set_defaults(self,
                      logLevel='WARNING',
                      logFileLevel='INFO',
                      logFilePath='~/.totoro/totoro.log',
                      mode='append',
                      wrapperLength=70):
        """Reset logger to its initial state."""

        # Remove all previous handlers
        for handler in self.handlers[:]:
            self.removeHandler(handler)

        # Set levels
        self.setLevel('DEBUG')

        # Set up the stdout handler
        self.sh = logging.StreamHandler()
        self.sh.emit = self._stream_formatter
        self.addHandler(self.sh)

        self.wrapperLength = wrapperLength

        # Set up the main log file handler if requested (but this might fail if
        # configuration directory or log file is not writeable).

        logFilePath = os.path.expanduser(logFilePath)
        logDir = os.path.dirname(logFilePath)
        if not os.path.exists(logDir):
            os.mkdir(logDir)

        try:
            if mode.lower() == 'overwrite':
                self.fh = FileHandler(logFilePath, mode='w')
            elif mode.lower() == 'append':
                self.fh = TimedRotatingFileHandler(
                    logFilePath, when='midnight', utc=True)
            else:
                raise TotoroError('logger mode {0} not recognised'
                                  .format(mode))
        except (IOError, OSError) as e:
            warnings.warn(
                'log file {0!r} could not be opened for writing: '
                '{1}'.format(logFilePath, unicode(e)), RuntimeWarning)
        else:
            self.fh.setFormatter(fmt)
            self.addHandler(self.fh)

        # Adds a header only to the file handler
        self.sh.setLevel(logging.CRITICAL)
        self.fh.setLevel(logging.DEBUG)
        self.debug('')
        self.debug('--------------------------------')
        self.debug('----- Restarting logger. -------')
        self.debug('--------------------------------')

        self.sh.setLevel(logLevel)
        self.fh.setLevel(logFileLevel)

        self.logFilename = logFilePath
        warnings.showwarning = self._showwarning
Exemplo n.º 5
0
 def __init__(self):
   #Setup log files
   self.infoLogFile = os.path.join(settings.LOG_DIR,"info.log")
   self.exceptionLogFile = os.path.join(settings.LOG_DIR,"exception.log")
   self.cacheLogFile = os.path.join(settings.LOG_DIR,"cache.log")
   self.renderingLogFile = os.path.join(settings.LOG_DIR,"rendering.log")
   self.metricAccessLogFile = os.path.join(settings.LOG_DIR,"metricaccess.log")
   #Setup loggers
   self.infoLogger = logging.getLogger("info")
   self.infoLogger.setLevel(logging.INFO)
   self.exceptionLogger = logging.getLogger("exception")
   self.cacheLogger = logging.getLogger("cache")
   self.renderingLogger = logging.getLogger("rendering")
   self.metricAccessLogger = logging.getLogger("metric_access")
   #Setup formatter & handlers
   self.formatter = logging.Formatter("%(asctime)s :: %(message)s","%a %b %d %H:%M:%S %Y")
   self.infoHandler = Rotater(self.infoLogFile,when="midnight",backupCount=1)
   self.infoHandler.setFormatter(self.formatter)
   self.infoLogger.addHandler(self.infoHandler)
   self.exceptionHandler = Rotater(self.exceptionLogFile,when="midnight",backupCount=1)
   self.exceptionHandler.setFormatter(self.formatter)
   self.exceptionLogger.addHandler(self.exceptionHandler)
   if settings.LOG_CACHE_PERFORMANCE:
     self.cacheHandler = Rotater(self.cacheLogFile,when="midnight",backupCount=1)
     self.cacheHandler.setFormatter(self.formatter)
     self.cacheLogger.addHandler(self.cacheHandler)
   if settings.LOG_RENDERING_PERFORMANCE:
     self.renderingHandler = Rotater(self.renderingLogFile,when="midnight",backupCount=1)
     self.renderingHandler.setFormatter(self.formatter)
     self.renderingLogger.addHandler(self.renderingHandler)
   if settings.LOG_METRIC_ACCESS:
     self.metricAccessHandler = Rotater(self.metricAccessLogFile,when="midnight",backupCount=10)
     self.metricAccessHandler.setFormatter(self.formatter)
     self.metricAccessLogger.addHandler(self.metricAccessHandler)
Exemplo n.º 6
0
def setup_logger(config):
    from logging.handlers import TimedRotatingFileHandler
    global LOGGER

    # Log file rotation scheduling
    when, interval, backupCount = config.LOG_ROTATION_TIME, \
        config.LOG_ROTATION_INTERVAL, config.LOG_BACKUP_COUNT

    # Defensive assertions
    assert when.lower() in ('s', 'm', 'h', 'd', 'midnight',
                            'w0', 'w1', 'w2', 'w3', 'w4', 'w5', 'w6',)
    assert interval > 0
    assert backupCount > 0

    if not os.path.exists(config.LOG_DIR):
        os.mkdir(config.LOG_DIR)
    log_file_path = os.path.join(config.LOG_DIR, config.LOG_FILENAME)

    formatter = logging.Formatter(config.LOG_FORMAT_STR)

    file_handler = TimedRotatingFileHandler(
        log_file_path,
        when=when,
        interval=interval,
        backupCount=backupCount)
    file_handler.setLevel(config.FILE_LOG_LEVEL)
    file_handler.setFormatter(formatter)

    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(config.CONSOLE_LOG_LEVEL)
    console_handler.setFormatter(formatter)

    LOGGER.addHandler(file_handler)
    LOGGER.addHandler(console_handler)
    LOGGER.setLevel(config.LOG_LEVEL)
Exemplo n.º 7
0
def create_multiprocess_logger(logger_name, persist_logger_name, log_level, log_format, log_queue, log_file_path,
                               when_to_rotate, keep_log_days):
    """
    Creates queue logger and persist logger.

    Queue logger should be used to log into. It is Thread and Process safe.
    Persist logger is logger which persist data to disk. LogCollector moves data from queue log into persist log.
    """

    queue_log_formatter = logging.Formatter(log_format)
    queue_log_handler = QueueHandler(log_queue, persist_logger_name)
    queue_log_handler.setFormatter(queue_log_formatter)
    queue_logger = logging.getLogger(logger_name)
    queue_logger.setLevel(log_level)
    queue_logger.handlers = []
    queue_logger.addHandler(queue_log_handler)
    queue_logger.propagate = False

    persist_log_formatter = logging.Formatter('%(message)s')
    persist_log_handler = TimedRotatingFileHandler(log_file_path, when=when_to_rotate, interval=1, backupCount=keep_log_days)
    persist_log_handler.setFormatter(queue_log_formatter)
    persist_logger = logging.getLogger(persist_logger_name)
    persist_logger.setLevel(log_level)
    persist_logger.handlers = []
    persist_logger.addHandler(persist_log_handler)
    persist_logger.propagate = False
Exemplo n.º 8
0
def set_logger_params(app):
    global _logger
    _logger = app.logger
    handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='D', interval=1)
    handler.setLevel(logging.DEBUG)
    handler.setFormatter(logging.Formatter(app.config['LOG_FORMAT']))
    _logger.addHandler(handler)
Exemplo n.º 9
0
    def __init__(self, config, SMPPClientFactory, amqpBroker, redisClient, RouterPB=None, interceptorpb_client=None):
        self.config = config
        self.SMPPClientFactory = SMPPClientFactory
        self.SMPPOperationFactory = SMPPOperationFactory(self.SMPPClientFactory.config)
        self.amqpBroker = amqpBroker
        self.redisClient = redisClient
        self.RouterPB = RouterPB
        self.interceptorpb_client = interceptorpb_client
        self.submit_sm_q = None
        self.qos_last_submit_sm_at = None
        self.rejectTimers = {}
        self.submit_retrials = {}
        self.qosTimer = None

        # Set pickleProtocol
        self.pickleProtocol = SMPPClientPBConfig(self.config.config_file).pickle_protocol

        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY)
        if len(self.log.handlers) != 1:
            self.log.setLevel(self.config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file,
                                               when=self.config.log_rotate)
            formatter = logging.Formatter(self.config.log_format, self.config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False
Exemplo n.º 10
0
  def get_log_handler(self, command):
    """Configure and return log handler.

    :param command: The command to load the configuration for. All options will
      be looked up in the `[COMMAND.command]` section. This is currently only
      used for configuring the file handler for logging. If logging is disabled
      for the command, a :class:`NullHandler` will be returned, else a
      :class:`TimedRotatingFileHandler`.

    """
    section = '%s.command' % (command, )
    path = osp.join(gettempdir(), '%s.log' % (command, ))
    level = lg.DEBUG
    if self.has_section(section):
      key = 'log.disable'
      if self.has_option(section, key) and self.getboolean(section, key):
        return NullHandler()
      if self.has_option(section, 'log.path'):
        path = self.get(section, 'log.path') # Override default path.
      if self.has_option(section, 'log.level'):
        level = getattr(lg, self.get(section, 'log.level').upper())
    file_handler = TimedRotatingFileHandler(
      path,
      when='midnight', # Daily backups.
      backupCount=1,
      encoding='utf-8',
    )
    fmt = '%(asctime)s\t%(name)-16s\t%(levelname)-5s\t%(message)s'
    file_handler.setFormatter(lg.Formatter(fmt))
    file_handler.setLevel(level)
    return file_handler
Exemplo n.º 11
0
  def get_file_handler(self, name):
    """Create and configure logging file handler.

    :param name: Section name used to find the path to the log file. If no
      `log` option exists in this section, the path will default to
      `<name>.log`.

    The default path can be configured via the `default.log` option in the
    `hdfs` section.

    """
    try:
      handler_path = self.parser.get(name, 'log')
    except (NoOptionError, NoSectionError):
      handler_path = osp.join(gettempdir(), '%s.log' % (name, ))
    try:
      handler = TimedRotatingFileHandler(
        handler_path,
        when='midnight', # daily backups
        backupCount=1,
        encoding='utf-8',
      )
    except IOError:
      wr.warn('Unable to write to log file at %s.' % (handler_path, ))
    else:
      handler_format = (
        '%(asctime)s | %(levelname)4.4s | %(name)s > %(message)s'
      )
      handler.setFormatter(lg.Formatter(handler_format))
      return handler
Exemplo n.º 12
0
def start():
    #NOTE bots is always on PYTHONPATH!!! - otherwise it will not start.
    #***command line arguments**************************
    configdir = 'config'
    for arg in sys.argv[1:]:
        if not arg:
            continue
        if arg.startswith('-c'):
            configdir = arg[2:]
            if not configdir:
                print 'Configuration directory indicated, but no directory name.'
                sys.exit(1)
        elif arg in ["?", "/?"] or arg.startswith('-'):
            showusage()
        else:
            showusage()
    
    #***init general: find locating of bots, configfiles, init paths etc.***********************
    botsinit.generalinit(configdir)

    #***initialise logging. This logging only contains the logging from bots-webserver, not from cherrypy.
    botsglobal.logger = logging.getLogger('bots-webserver')
    botsglobal.logger.setLevel(logging.DEBUG)
    h = TimedRotatingFileHandler(botslib.join(botsglobal.ini.get('directories','logging'),'webserver.log'), backupCount=10)
    fileformat = logging.Formatter("%(asctime)s %(levelname)-8s: %(message)s",'%Y%m%d %H:%M:%S')
    h.setFormatter(fileformat)
    botsglobal.logger.addHandler(h)
    
    #***init cherrypy as webserver*********************************************
    #global configuration for cherrypy
    cherrypy.config.update({'global': {'log.screen': False, 'server.environment': botsglobal.ini.get('webserver','environment','production')}})
    #cherrypy handling of static files
    conf = {'/': {'tools.staticdir.on' : True,'tools.staticdir.dir' : 'media' ,'tools.staticdir.root': botsglobal.ini.get('directories','botspath')}}
    servestaticfiles = cherrypy.tree.mount(None, '/media', conf)    #None: no cherrypy application (as this only serves static files)
    #cherrypy handling of django
    servedjango = WSGIHandler()     #was: servedjango = AdminMediaHandler(WSGIHandler())  but django does not need the AdminMediaHandler in this setup. is much faster.
    #cherrypy uses a dispatcher in order to handle the serving of static files and django.
    dispatcher = wsgiserver.WSGIPathInfoDispatcher({'/': servedjango, '/media': servestaticfiles})
    botswebserver = wsgiserver.CherryPyWSGIServer(bind_addr=('0.0.0.0', botsglobal.ini.getint('webserver','port',8080)), wsgi_app=dispatcher, server_name=botsglobal.ini.get('webserver','name','bots-webserver'))
    botsglobal.logger.info(_(u'Bots web-server started.'))
    #handle ssl: cherrypy < 3.2 always uses pyOpenssl. cherrypy >= 3.2 uses python buildin ssl (python >= 2.6 has buildin support for ssl).
    ssl_certificate = botsglobal.ini.get('webserver','ssl_certificate',None)
    ssl_private_key = botsglobal.ini.get('webserver','ssl_private_key',None)
    if ssl_certificate and ssl_private_key:
        if cherrypy.__version__ >= '3.2.0':
            adapter_class = wsgiserver.get_ssl_adapter_class('builtin')
            botswebserver.ssl_adapter = adapter_class(ssl_certificate,ssl_private_key)
        else:
            #but: pyOpenssl should be there!
            botswebserver.ssl_certificate = ssl_certificate
            botswebserver.ssl_private_key = ssl_private_key
        botsglobal.logger.info(_(u'Bots web-server uses ssl (https).'))
    else:
        botsglobal.logger.info(_(u'Bots web-server uses plain http (no ssl).'))
    
    #***start the cherrypy webserver.
    try:
        botswebserver.start()
    except KeyboardInterrupt:
        botswebserver.stop()
Exemplo n.º 13
0
    def __init__(self, envir):
        super(Logger, self).__init__(envir)
        logger = logging.getLogger()
        hdlr = TimedRotatingFileHandler("log/sensor.log", "midnight", 1, 10)
        formatter = logging.Formatter('\n%(levelname)-7s | %(asctime)-23s | === %(message)s ===')
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)
        logger.setLevel(logging.NOTSET)
        logger.info("begin")

        formatter = logging.Formatter('%(levelname)-7s | %(asctime)-23s | %(message)s')
        hdlr.setFormatter(formatter)
        self._logger = logger

        handlers = {
            "start": self.info("start"),
            "post": self.info("post"),
            "register": self.info("register"),
            "start post": self.info("start post"),
            "call register": self.info("call register"),
            "set period": self.info("set period"),
            "cancel category": self.info("cancel category"),
            "display": self.info("display"),
            "init info": self.info("init"),
            "exit": self.exit,
            "warning": self.warning("warning"),
            "error": self.error("error"),
        }
        for event, handler in handlers.iteritems():
            self.listen(event, handler)
        self.state = "running"
        self.trace_IO()
Exemplo n.º 14
0
	def __init__(self, dbName, uri, dataPath, logPath):

		"""
		Initialization of a database.

		Args:
			dbName: Name of the database.
			uri: Database connection uri.
			dataPath: Path to where auxiliary file will be created.
			logPath: Path to where log files will be created.
		"""
		self.uri = uri
		self.dbName = dbName

		logFile = logPath + 'mongodb.log'
		handler = TimedRotatingFileHandler(logFile, when="midnight", backupCount=6)
		handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s : %(message)s'))

		logger.setLevel(logging.INFO)
		logger.addHandler(handler)

		try:
			self.client=pymongo.MongoClient(uri)
		except Exception as e:
			print ("Could not connect to MongoDB: %s" % e)

		self.jsonFile = dataPath + 'dataToUpdate.json'
		if not os.path.exists(self.jsonFile):
			with open(self.jsonFile, 'w+') as f:
				f.write('[]')
		self.existsDataToDump = self.dumpJson()
Exemplo n.º 15
0
def configure_loggers(min_level, log_file, log_format=None):
    DEFAULT_LOG_FORMAT = '%(asctime)s(%(name)s)[%(process)d--%(threadName)s]::'\
        '%(levelname)s - %(funcName)s(%(message)s)'
    blue_pref = '\x1b[' + BLUE
    red_pref = '\x1b[' + RED
    green_pref = '\x1b[' + GREEN
    yellow_pref = '\x1b[' + YELLOW
    suffix = '\x1b[0m'
    COLOR_LOG_FORMAT = '%(asctime)s(' + \
        blue_pref + '%(name)s' + suffix + \
        ')[%(process)d--%(threadName)s]::' + \
        red_pref + '%(levelname)s ' + suffix + '- ' + \
        green_pref + '%(funcName)s' + suffix + \
        yellow_pref + '(%(message)s)' + suffix

    if not os.path.exists(os.path.dirname(log_file)):
        os.makedirs(os.path.dirname(log_file))

    if log_format:
        _format = log_format
    else:
        if "NO_COLORS" in os.environ:
            _format = DEFAULT_LOG_FORMAT
        else:
            _format = COLOR_LOG_FORMAT
    logging.basicConfig(level=min_level, format=_format)

    formatter = logging.Formatter(DEFAULT_LOG_FORMAT)
    file_handler = TimedRotatingFileHandler(log_file, when='midnight')
    file_handler.setLevel(min_level)
    file_handler.setFormatter(formatter)

    logging.getLogger('').addHandler(file_handler)
    logging.getLogger('').setLevel(min_level)
Exemplo n.º 16
0
    def __init__(self, config, auth_portal, RouterPB = None, SMPPClientManagerPB = None):
        self.config = config
        # A dict of protocol instances for each of the current connections,
        # indexed by system_id 
        self.bound_connections = {}
        self._auth_portal = auth_portal
        self.RouterPB = RouterPB
        self.SMPPClientManagerPB = SMPPClientManagerPB

        # Setup statistics collector
        self.stats = SMPPServerStatsCollector().get(cid = self.config.id)
        self.stats.set('created_at', datetime.now())

        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY_SERVER_BASE+".%s" % config.id)
        if len(self.log.handlers) != 1:
            self.log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file, 
                when = self.config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        self.msgHandler = self.submit_sm_event
Exemplo n.º 17
0
    def __init__(self, config, msgHandler = None):
        self.reconnectTimer = None
        self.smpp = None
        self.connectionRetry = True
        self.config = config

        # Setup statistics collector
        self.stats = SMPPClientStatsCollector().get(cid = self.config.id)
        self.stats.set('created_at', datetime.now())
                
        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY_CLIENT_BASE+".%s" % config.id)
        if len(self.log.handlers) != 1:
            self.log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file, 
                when = self.config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        if msgHandler is None:
            self.msgHandler = self.msgHandlerStub
        else:
            self.msgHandler = msgHandler
Exemplo n.º 18
0
def create_application():
	global app

	if not config.check():
		return None

	if not os.path.exists(config.get('base', 'cache_dir')):
		os.makedirs(config.get('base', 'cache_dir'))

	app = Flask(__name__)
	app.secret_key = '?9huDM\\H'

	app.teardown_appcontext(teardown_db)

	if config.get('base', 'log_file'):
		import logging
		from logging.handlers import TimedRotatingFileHandler
		handler = TimedRotatingFileHandler(config.get('base', 'log_file'), when = 'midnight')
		handler.setLevel(logging.WARNING)
		app.logger.addHandler(handler)

	from supysonic import frontend
	from supysonic import api

	return app
Exemplo n.º 19
0
def run():
    """
    Run the server.
    """

    # Set up the logger.
    if not os.path.isdir(os.path.join(script_dir, 'logs')):
        os.makedirs(os.path.join(script_dir, 'logs'))
    # Format the logs.
    formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    # Enable the logs to split files at midnight.
    handler = TimedRotatingFileHandler(
            os.path.join(script_dir, 'logs', 'TorSpider.log'),
            when='midnight', backupCount=7, interval=1)
    handler.setLevel(app.config['LOG_LEVEL'])
    handler.setFormatter(formatter)
    log = logging.getLogger('werkzeug')
    log.setLevel(app.config['LOG_LEVEL'])
    log.addHandler(handler)
    app.logger.addHandler(handler)
    app.logger.setLevel(app.config['APP_LOG_LEVEL'])

    # Set up the app server, port, and configuration.
    port = int(environ.get('PORT', app.config['LISTEN_PORT']))
    addr = environ.get('LISTEN_ADDR', app.config['LISTEN_ADDR'])
    if app.config['USETLS']:
        context = (app.config['CERT_FILE'], app.config['CERT_KEY_FILE'])
        app.run(host=addr, port=port, threaded=True, ssl_context=context)
    else:
        app.run(host=addr, port=port, threaded=True)
Exemplo n.º 20
0
 def __init__(self,
              filename,
              when='h',
              interval=1,
              encoding=None,
              delay=False,
              utc=False,
              account_name=None,
              account_key=None,
              protocol='https',
              container='logs',
              zip_compression=False,
              max_connections=1,
              max_retries=5,
              retry_wait=1.0,
              is_emulated=False):
     meta = {'hostname': gethostname(), 'process': os.getpid()}
     TimedRotatingFileHandler.__init__(self,
                                       filename % meta,
                                       when=when,
                                       interval=interval,
                                       backupCount=1,
                                       encoding=encoding,
                                       delay=delay,
                                       utc=utc)
     _BlobStorageFileHandler.__init__(self,
                                      account_name=account_name,
                                      account_key=account_key,
                                      protocol=protocol,
                                      container=container,
                                      zip_compression=zip_compression,
                                      max_connections=max_connections,
                                      max_retries=max_retries,
                                      retry_wait=retry_wait,
                                      is_emulated=is_emulated)
Exemplo n.º 21
0
def create_timed_file_handler(level, format, ttl, filename, path):
	'''
	Creates a TimedRotatingFileHandler for the logging module
	that outputs log records to a file. This file will roll over
	given the ttl (time to live) which will create a new log file
	and back up the existing one.

	@param path The path of the log file (e.g. /logs/system.log)
	@param level The logging level of the file handler
	@param format The format of the file output for each LogRecord
	@param ttl The time to live for the the log file before it rolls over

	@return A new TimedRotatingFileHandler
	'''
	# Create all the directories in the path
	print (path.split('/'))
	for directory in path.split('/'):
		if not os.path.exists(directory):
			os.mkdir(directory)

	# Configure the TimedRotatingFileHandler
	file_handler = TimedRotatingFileHandler(path + '/' + filename, ttl)
	file_handler.setLevel(level)
	file_handler.setFormatter(logging.Formatter(format))
	return file_handler
Exemplo n.º 22
0
def create_application():
	global app

	if not config.check():
		return None

	if not os.path.exists(config.get('webapp', 'cache_dir')):
		os.makedirs(config.get('webapp', 'cache_dir'))

	app = Flask(__name__)
	app.secret_key = '?9huDM\\H'

	app.teardown_appcontext(teardown_db)

	if config.get('webapp', 'log_file'):
		import logging
		from logging.handlers import TimedRotatingFileHandler
		handler = TimedRotatingFileHandler(config.get('webapp', 'log_file'), when = 'midnight')
		if config.get('webapp', 'log_level'):
			mapping = {
				'DEBUG':   logging.DEBUG,
				'INFO':    logging.INFO,
				'WARNING': logging.WARNING,
				'ERROR':   logging.ERROR,
				'CRTICAL': logging.CRITICAL
			}
			handler.setLevel(mapping.get(config.get('webapp', 'log_level').upper(), logging.NOTSET))
		app.logger.addHandler(handler)

	from supysonic import frontend
	from supysonic import api

	return app
Exemplo n.º 23
0
    def __init__(self, config):
        self.config = config

        # Check if callbacks are defined in child class ?
        if self.callback is None:
            self.callback = self.throwing_callback
        if self.errback is None:
            self.errback = self.throwing_errback

        # For these values to None since they must be defined through .addSmpps()
        self.smpps = None
        self.smpps_access = None

        # Set up a dedicated logger
        self.log = logging.getLogger(self.log_category)
        if len(self.log.handlers) != 1:
            self.log.setLevel(self.config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file,
                                               when=self.config.log_rotate)
            formatter = logging.Formatter(self.config.log_format, self.config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        self.log.info('Thrower configured and ready.')
Exemplo n.º 24
0
    def _configLogs(self, sFile):
        """
        This method configure the logs for this object. 

        Two handlers are used, one to print to stdout and one to log in a file. For file, TimedRotatingFileHandler is used to rotate the log file every day. 

        @param sFile: Log file name
        @type sFile: string
        @todo: Set log level from config or user input
        """
        config = CNBConfig.getInstance()

        # Print output if not in daemon mode
        if not config.get('global', 'daemon'):
            ch = logging.StreamHandler()
            ch.setFormatter(logging.Formatter(config.get('global', 'log-format')))
            self.log.addHandler(ch)

        # Write also to file
        fh = TimedRotatingFileHandler(\
            os.path.join(config.get('global', 'log-dir'), sFile), \
            backupCount=0, \
            when='d', \
            interval=1)
        fh.setFormatter(logging.Formatter(config.get('global', 'log-format')))
        self.log.addHandler(fh)
       
        # Must stay at DEBUG to log errors
        self.log.setLevel(logging.DEBUG)
Exemplo n.º 25
0
class RotatingLog(object):

    # Get a logger with the provided name when initializing
    # this class and use a handler that rotates the logfiles
    # based on time as seen below.
    def __init__(self, logger_name):
        self._logger = logging.getLogger(logger_name)

        # We only want one handler, so only add a handler
        # if there isn't already one configured. 
        if len(self._logger.handlers) == 0:
            # The messages shouldn't be sent to other logs 
            self._logger.propagate = False

            # We only use one logger and don't differentiate
            # between the importance of different messages
            # and therefore use DEBUG as the only logg-level.
            self._logger.setLevel(logging.DEBUG)

            # Rotate the log, if not empty, at midnight
            # and save up to 90 days of log-files.
            self._handler = TimedRotatingFileHandler(
                LOGFILE, when = ROTATE_TIME, backupCount = BACKUP_DAYS, encoding = u'UTF-8')
            
            self._handler.setLevel(logging.DEBUG)
            self._handler.setFormatter(logging.Formatter(LOGFORMAT, TIMEFORMAT))
            self._logger.addHandler(self._handler)

    # Write the message, if not empty, to the log-file
    def write(self, message):
        if not message.lstrip().rstrip() == u'':
            self._logger.debug(message)
Exemplo n.º 26
0
    def __init__(self, RouterPB, SMPPClientManagerPB, config, interceptor=None):
        Resource.__init__(self)

        # Setup stats collector
        stats = HttpAPIStatsCollector().get()
        stats.set('created_at', datetime.now())

        # Set up a dedicated logger
        log = logging.getLogger(LOG_CATEGORY)
        if len(log.handlers) != 1:
            log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=config.log_file, when=config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            log.addHandler(handler)
            log.propagate = False

        # Set http url routings
        log.debug("Setting http url routing for /send")
        self.putChild('send', Send(config, RouterPB, SMPPClientManagerPB, stats, log, interceptor))
        log.debug("Setting http url routing for /rate")
        self.putChild('rate', Rate(config, RouterPB, stats, log, interceptor))
        log.debug("Setting http url routing for /balance")
        self.putChild('balance', Balance(RouterPB, stats, log))
        log.debug("Setting http url routing for /ping")
        self.putChild('ping', Ping(log))
Exemplo n.º 27
0
 def __init__(self, config, SMPPClientManagerPB, RouterPB, loadConfigProfileWithCreds = {'username', 'password'}):
     self.config = config
     self.pb = {'smppcm': SMPPClientManagerPB, 'router': RouterPB}
     # Protocol sessions are kept here:
     self.sessions = {}
     self.sessionRef = 0
     self.sessionsOnline = 0
     # When defined, configuration profile will be loaded on startup
     self.loadConfigProfileWithCreds = loadConfigProfileWithCreds
           
     # Set up and configure a dedicated logger
     self.log = logging.getLogger('jcli')
     if len(self.log.handlers) != 1:
         self.log.setLevel(config.log_level)
         handler = TimedRotatingFileHandler(filename=self.config.log_file, 
             when = self.config.log_rotate)
         formatter = logging.Formatter(config.log_format, config.log_date_format)
         handler.setFormatter(formatter)
         self.log.addHandler(handler)
     
     
     # Init protocol
     self.protocol = lambda: JCliTelnetTransport(TelnetBootstrapProtocol,
                                                 insults.ServerProtocol,
                                                 JCliProtocol)
Exemplo n.º 28
0
        def configure_from_relative_path(path):
            config_path = os.path.join(ROOT, 'configs', path)

            with open(config_path) as fd:
                self.__config__ = json.load(fd)

            logger.setLevel(getattr(logging, self.get('log_level', 'INFO')))

            formatter = logging.Formatter(
                '[L:%(lineno)d]# %(levelname)-8s [%(asctime)s]  %(message)s',
                datefmt='%d-%m-%Y %H:%M:%S'
            )

            # StreamHandler
            """
            sh = logging.StreamHandler()
            sh.setFormatter(formatter)
            logger.addHandler(sh)
            """

            # FileHandler
            fh = TimedRotatingFileHandler(
                os.path.join(ROOT, 'logs', 'server.log'),
                when="midnight"
            )
            fh.setFormatter(formatter)
            logger.addHandler(fh)
Exemplo n.º 29
0
def configure_logging(app_mode, app):
    logHandler = None
    if app_mode == 'DEBUG':
        # create console handler
        logHandler = logging.StreamHandler()
    elif app_mode == 'PROD':
        # create file time rotating handler
        logHandler = TimedRotatingFileHandler(
            filename=os.environ.get('APP_LOG_FILENAME', 'app.log'),
            when='D',
            backupCount=5,
            encoding='UTF-8'
        )
    if logHandler is None:
        return
    logHandler.setLevel(logging.DEBUG)
    logHandler.setFormatter(logging.Formatter(
        fmt='%(asctime)s %(name)-10s %(levelname)-7s %(message)s',
        datefmt='%H:%M:%S'))
    # get root logger
    logger = logging.getLogger()
    logger.addHandler(logHandler)
    logger.setLevel(logging.DEBUG)
    app.logger.addHandler(logHandler)
    app.logger.setLevel(logging.DEBUG)
    return
Exemplo n.º 30
0
from flask import Flask
import logging
from logging.handlers import TimedRotatingFileHandler

from flask import request

server_port = 5000
app = Flask(__name__)
formatter = logging.Formatter("%(asctime)s - %(levelname)-7s - %(message)s")
handler = TimedRotatingFileHandler('log/flask_server.log',
                                   when="midnight",
                                   interval=1,
                                   encoding='utf8')
handler.suffix = "%Y-%m-%d"
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(handler)

mylogger = logging.getLogger('myapp')
my_formatter = logging.Formatter(
    "[%(asctime)s][%(name)s][%(process)d][%(thread)d][%(message)s][[in %(pathname)s:%(lineno)d]"
)
my_handler = TimedRotatingFileHandler("log/flask_app.log",
                                      when="D",
                                      interval=1,
                                      backupCount=15,
                                      encoding="UTF-8",
                                      delay=False,
                                      utc=True)
app.logger.addHandler(handler)
Exemplo n.º 31
0
"""
Hi, I am ObsPy's docs deploy bot. I request github runs and
extract uploaded doc artifacts to the ObsPy server.
Outdated PR docs older than 90 days will be deleted.
"""
import logging
from logging.handlers import TimedRotatingFileHandler
import sched
import signal
import sys

import requests

handlers = [TimedRotatingFileHandler('log.txt', 'D', 30, 5)]
format_ = '%(levelname)s:%(name)s:%(asctime)s %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
logging.basicConfig(level=logging.INFO,
                    format=format_,
                    datefmt=datefmt,
                    handlers=handlers)

from deploy_docs import deploy
from remove_old_pr_docs import remove_old_docs

log = logging.getLogger('docsdeploybot')
log.info(' '.join(__doc__.strip().splitlines()))
T1 = 60
T2 = 24 * 3600


def sdeploy():
Exemplo n.º 32
0
def access_to_website(url):
    chrome.get(url)


#Pour envoyer un mail après l'export
from email import encoders
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText

# Permet d'enregistrer les logs dans un fichier et de faire La rotation

formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
rotation_handler = TimedRotatingFileHandler(
    filename=r"C://Users//alain.singaye//Documents//GLPI//Logs//log.txt",
    when='H',
    interval=5)
rotation_handler.setFormatter(formatter)
logger = logging.getLogger()
logger.addHandler(rotation_handler)
logger.setLevel(logging.DEBUG)

# Le dossier dans lequel sera saugarder l'export.
download_dir = r"C:\Users\alain.singaye\Documents\GLPI"
chrome_options = webdriver.ChromeOptions()
path = r'C:/bin/chromedriver.exe'
preferences = {
    "download.default_directory": download_dir,
    "directory_upgrade": True,
    "safebrowsing.enabled": True,
    "plugins.always_open_pdf_externally": True
Exemplo n.º 33
0
    def __init__(
        self,
        name: str,
        filename: str = None,
        root: str = None,
        cmd_output: bool = True,
        level: str = "INFO",
        colors=None,
        database=None,
        excludes=None,
        config={}
    ):
        self.date_str: str = ""
        self.database_name: str = database
        self.database = None
        self.excludes = excludes
        self.config = config

        if "ALPHA_LOG_CMD_OUTPUT" in os.environ:
            cmd_output = "Y" in os.environ["ALPHA_LOG_CMD_OUTPUT"].upper()

        if filename is None:
            filename = name
        if root is None:
            """
            parentframe     = inspect.stack()[1]
            module          = inspect.getmodule(parentframe[0])
            root            = os.path.abspath(module.__file__).replace(module.__file__,'')"""
            root = _utils.get_alpha_logs_root()

        self.root = _utils.check_root(root)
        log_path = self.root + os.sep + filename + ".log"

        # Create logger
        self.logger = logging.getLogger(name)
        self.set_level(level)

        # File handler
        if config is not None and len(config) != 0:
            handler = TimedRotatingFileHandler(
                log_path, **config
            )
        else:
            handler = TimedRotatingFileHandler(
                log_path, when="midnight", interval=1, backupCount=90
            )

        if PLATFORM == "windows":
            handler = ConcurrentRotatingFileHandler(log_path, "a", 512 * 1024, 5)
        # handler.suffix  = "%Y%m%d"

        self.logger.addHandler(handler)

        if cmd_output:
            handler = logging.StreamHandler(sys.stdout)
            if colors:
                handler.addFilter(_colorations.ColorFilter(colors))
            self.logger.addHandler(handler)

        if self.excludes and len(self.excludes):
            self.logger.addFilter(
                NoParsingFilter(excludes=self.excludes, level=self.level)
            )

        self.pid = os.getpid()
        self.name = name
        # self.cmd_output     = cmd_output if cmd_output is not None else True

        self.last_level = None
        self.last_message = None
Exemplo n.º 34
0
from lexi.core.util.util import read_blacklist_words
from lexi.server.util import statuscodes
from lexi.server.util.html import process_html
from lexi.server.util.communication import make_response

SCRIPTDIR = os.path.dirname(os.path.realpath(__file__))

# os.makedirs(MODELS_DIR, exist_ok=True)

# LOGGING
# os.makedirs(LOG_DIR, exist_ok=True)
logger = logging.getLogger('lexi')
log_level = logging.DEBUG
# get logging level from CL argument, if set
logger.setLevel(log_level)
fh = TimedRotatingFileHandler(LOG_DIR+'/lexi.log', when="midnight",
                              interval=1, encoding="UTF-8")
fh.suffix = "%Y-%m-%d"
fh.setLevel(log_level)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(log_level)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - '
                              '{%(filename)s:%(lineno)d} '
                              '%(levelname)s - %(message)s')
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(fh)
logger.addHandler(ch)
Exemplo n.º 35
0
from logging.handlers import TimedRotatingFileHandler
import logging
import sys
import os

if not os.path.exists('logs'):
    os.mkdir('logs')

if not os.path.exists('logs/developer_entry_task.log'):
    open("logs/developer_entry_task.log", "w+").close()

formater = logging.Formatter(
    '[%(levelname)s] - %(name)s - %(asctime)s - %(message)s')
file_handler = TimedRotatingFileHandler('logs/developer_entry_task.log',
                                        when='midnight',
                                        backupCount=20)
file_handler.setFormatter(formater)
file_handler.suffix = "%Y-%m-%d"


def get_logger(name):
    log = logging.getLogger(name)
    if '--debug' not in sys.argv:
        log.setLevel(logging.INFO)
    else:
        log.setLevel(logging.DEBUG)
    log.addHandler(file_handler)
    return log
Exemplo n.º 36
0
    os.environ['USERNAME'] = user
    os.environ['UID'] = "%s" % uid
    os.environ['GID'] = "%s" % gid


cnt = 0
url = "http://localhost/go-to-bed/"
pp = pprint.PrettyPrinter(indent=4)
active_crons = {}
testing = False
logger = logging.getLogger("go-to-bed")
logger.setLevel(logging.INFO)
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler = TimedRotatingFileHandler(os.path.expanduser("~/.go-to-bed.log"),
                                   when="midnight",
                                   backupCount=20)
handler.setFormatter(formatter)
logger.addHandler(handler)
session_re = re.compile("(Session[0-9]+)\:")
var_val_re = re.compile("\t(.*)\s\=\s\'(.*)\'")
var_val_bool_re = re.compile("\t(.*)\s\=\s(TRUE|FALSE)")
ps_re = re.compile("(\d+)\s(.*?)\s+(.*)")

if "--test" in sys.argv:
    testing = True
    idx = sys.argv.index("--test")
    if len(sys.argv) > idx + 1:
        testing = sys.argv[idx + 1]
    logger.info("testing:%s", testing)
Exemplo n.º 37
0
import pickle
import urllib3
from bs4 import BeautifulSoup
import time
import logging
from logging.handlers import TimedRotatingFileHandler
from twilio.rest import Client

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)

handler = TimedRotatingFileHandler('########/OINP.log',
                                   when="midnight",
                                   interval=1)

formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)

logger.addHandler(handler)

account_sid = "########"
auth_token = "########"

pickle_in = open("#########/OINP.pickle", "rb")
archieve = pickle.load(pickle_in)
pickle_in.close()

for i in range(26):
    try:
        page_url = 'http://www.ontarioimmigration.ca/en/pnp/OI_PNPNEW.html'
Exemplo n.º 38
0
        """
        add some params for honeypot
        """
        msg['machine_id'] = os.getenv("MACHINE_ID", "")
        msg['honey_name'] = os.getenv("HONEY_NAME", "")
        msg['honey_type'] = os.getenv("HONEY_TYPE", "")
        msg['dst_ip'] = os.getenv("DOCKER_HOST", "")
        msg['dst_port'] = os.getenv("OUT_PORT", "")
        msg = json.dumps(msg)
        return msg


logger = logging.getLogger()
logger.addHandler(logging.StreamHandler())
logger.addHandler(
    TimedRotatingFileHandler(filename=filename, when='D', backupCount=3))

logging = TophantLoggerAdapter(logger, '')

from rdpy.core import rss
from rdpy.protocol.rdp import rdp
from twisted.internet import reactor


class HoneyPotServer(rdp.RDPServerObserver):
    def __init__(self, controller, rssFileSizeList):
        """
        @param controller: {RDPServerController}
        @param rssFileSizeList: {Tuple} Tuple(Tuple(width, height), rssFilePath)
        """
        rdp.RDPServerObserver.__init__(self, controller)
Exemplo n.º 39
0
import logging
from logging.handlers import TimedRotatingFileHandler
import time

from volta_plus.models import VoltaNetwork

logging.basicConfig(level=logging.WARNING,
                    format='[%(levelname)s][%(asctime)s] %(message)s',
                    datefmt='%Y-%m-%d %H:%M:%S',
                    handlers=[
                        TimedRotatingFileHandler('volta_plus.log',
                                                 when='midnight',
                                                 backupCount=3,
                                                 utc=True)
                    ])

if __name__ == '__main__':
    volta_network = VoltaNetwork(poor=True)

    while True:
        try:
            volta_network.update()
            logging.debug("updated Volta Network")
            time.sleep(15)
        except Exception as e:
            logging.exception(e)
            time.sleep(30)
Exemplo n.º 40
0
from logging.handlers import TimedRotatingFileHandler
from logging.handlers import RotatingFileHandler
import traceback
import queue
import tkinter as tk
from tkinter.scrolledtext import ScrolledText
from email_crawler import DEFAULT_SITE, crawl, crawler_main, export_emails, OutputUIInterface

# Debugging
# import pdb;pdb.set_trace()

# Logging
#logging.config.dictConfig(LOGGING)
logger = logging.getLogger("crawler_logger")
logger.setLevel(logging.INFO)
handler = TimedRotatingFileHandler('logs/log','midnight',1,30)
formatter = logging.Formatter('%(asctime)s %(name)-2s %(levelname)-2s %(message)s','%y-%m-%d %H:%M:%S')
handler.setFormatter(formatter)
logger.addHandler(handler)

global main_window
ui_callback_queue = queue.Queue()

class OutputUI(OutputUIInterface):
	def __init__(self, ctrl: tk.Text):
		self._ctrl = ctrl

	def append(self, ls):
		for line in ls:
			self.append_line(line)
Exemplo n.º 41
0
def setup_logging():
    log_dir = create_log_dir()
    logging.root.setLevel(logging.DEBUG)

    # StreamHandler logging to console (stderr) at level INFO
    console_handler = StreamHandler()
    console_handler.setLevel(logging.INFO)
    console_handler.setFormatter(Formatter(CONSOLE_FORMAT))
    logging.root.addHandler(console_handler)

    # FileHandler logging to 'debug.log' at level DEBUG
    debug_file_handler = TimedRotatingFileHandler(
        os.path.join(log_dir, 'debug.log'), when='midnight', backupCount=30)
    debug_file_handler.setLevel(logging.DEBUG)
    debug_file_handler.setFormatter(file_formatter)
    logging.root.addHandler(debug_file_handler)

    # FileHandler logging to 'info.log' at level INFO
    info_file_handler = TimedRotatingFileHandler(
        os.path.join(log_dir, 'info.log'), when='midnight', backupCount=30)
    info_file_handler.setLevel(logging.INFO)
    info_file_handler.setFormatter(file_formatter)
    logging.root.addHandler(info_file_handler)

    # FileHandler logging to 'warn.log' at level WARN
    warn_file_handler = TimedRotatingFileHandler(
        os.path.join(log_dir, 'warn.log'), when='midnight', backupCount=30)
    warn_file_handler.setLevel(logging.WARN)
    warn_file_handler.setFormatter(file_formatter)
    logging.root.addHandler(warn_file_handler)

    # Only log messages from the requests library with at least level WARN
    requests_logger = logging.getLogger('requests')
    requests_logger.setLevel(logging.WARN)
Exemplo n.º 42
0
from flask_moment import Moment
from werkzeug.http import HTTP_STATUS_CODES

from get_staticfeature import find_doc, preprocess_doc
from data_processor import DataProcess
from model import Model, ModelConfig
from feature_extrator import FeatureExtrator
from test_data_collator import TestDataCollator

app = Flask(__name__)
app.config['SECRET_KEY'] = 'dev'

if __name__ != '__main__':
    gunicorn_logger = logging.getLogger('gunicorn.error')
    logHandler = TimedRotatingFileHandler('logs/prod_pipeline.log',
                                          when='midnight',
                                          interval=1,
                                          backupCount=30)
    formatter = logging.Formatter(
        '[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s')
    logHandler.setFormatter(formatter)
    gunicorn_logger.addHandler(logHandler)
    app.logger.handlers = gunicorn_logger.handlers
    app.logger.setLevel(gunicorn_logger.level)

bootstrap = Bootstrap(app)
moment = Moment(app)


def error_response(status_code, message=None):
    payload = {'error': HTTP_STATUS_CODES.get(status_code, 'Unknown error')}
    if message:
Exemplo n.º 43
0
from logging.handlers import TimedRotatingFileHandler
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from tempfile import NamedTemporaryFile
from secrets import *

# Setup logger
logging.basicConfig(
    encoding='utf-8',
    datefmt="%d-%m-%y %H:%M:%S",
    level=logging.INFO,
)
logger = logging.getLogger('moebot')
handler = TimedRotatingFileHandler(
    filename=f'moebot-{strftime("%d-%m-%y")}.log',
    when="D",
    interval=1,
    backupCount=5,
    encoding='utf-8',
    delay=False)
handler.setFormatter(fmt=Formatter(
    "%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)s] %(message)s"))
logger.addHandler(handler)


async def main() -> None:
    """Main loop of the entire bot"""
    logger.info("Starting main loop...")

    # Avoid rate limiting
    retries = 0
Exemplo n.º 44
0
# -*- coding: utf-8 -*-

import logging

from logging.handlers import TimedRotatingFileHandler

from scihub_eva.utils.path_utils import *

DEFAULT_LOGGER = logging.getLogger('default')
DEFAULT_LOGGER.setLevel(logging.INFO)

DEFAULT_LOG_DIRECTORY = logs_dir()
DEFAULT_LOG_FILE = DEFAULT_LOG_DIRECTORY / 'SciHubEVA.log'
DEFAULT_LOG_HANDLER = TimedRotatingFileHandler(
    DEFAULT_LOG_FILE.resolve().as_posix(), when='d', encoding='utf-8')
DEFAULT_LOG_HANDLER.setLevel(logging.INFO)

DEFAULT_LOG_FORMATTER = logging.Formatter(
    '%(asctime)s - %(levelname)s - %(message)s')
DEFAULT_LOG_HANDLER.setFormatter(DEFAULT_LOG_FORMATTER)

DEFAULT_LOGGER.addHandler(DEFAULT_LOG_HANDLER)

LOGGER_SEP = '-' * 100


def format_log_message(message):
    return DEFAULT_LOG_FORMATTER.format(message)


__all__ = [
Exemplo n.º 45
0
def initLogging():
	if not os.path.exists("log"):
		os.mkdir("log")
	logger.setLevel(logging.DEBUG)
	hConsole = logging.StreamHandler()
	# rotating every wednesday at 04:00
	rotSettings = {"when": "W2", "interval" : 1, "atTime": datetime.time(4, 0), 
								"backupCount": 10}
	formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(threadName)s: %(message)s')

	hDebug = TimedRotatingFileHandler("log/debug.txt", **rotSettings)
	hInfo  = TimedRotatingFileHandler("log/info.txt",  **rotSettings)
	hError = TimedRotatingFileHandler("log/error.txt", **rotSettings)
	hCrit  = TimedRotatingFileHandler("log/crit.txt",  **rotSettings)
	
	hConsole.setLevel(logging.INFO)
	hDebug.setLevel(logging.DEBUG)
	hInfo.setLevel(logging.INFO)
	hError.setLevel(logging.ERROR)
	hCrit.setLevel(logging.CRITICAL)

	for h in [hConsole, hDebug, hInfo, hError, hCrit]:
		h.setFormatter(formatter)
		logger.addHandler(h)

	# init performance logger
	perfLogger.setLevel(logging.DEBUG)
	hPerf = TimedRotatingFileHandler("log/perf.txt", **rotSettings)
	hPerf.setLevel(logging.DEBUG)
	perfFormatter = logging.Formatter('%(asctime)s - %(levelname)s - %(threadName)s: %(message)s')
	hPerf.setFormatter(perfFormatter)
	perfLogger.addHandler(hPerf)
	perfLogger.propagate = False
Exemplo n.º 46
0
def init_logger(logger_name, logging_path):
    if logger_name not in Logger.manager.loggerDict:
        logger = logging.getLogger(logger_name)
        logger.setLevel(logging.DEBUG)
        handler = TimedRotatingFileHandler(filename=logging_path + "/all.log",
                                           when='D',
                                           backupCount=7)
        datefmt = '%Y-%m-%d %H:%M:%S'
        format_str = '[%(asctime)s]: %(name)s %(filename)s[line:%(lineno)s] %(levelname)s  %(message)s'
        formatter = logging.Formatter(format_str, datefmt)
        handler.setFormatter(formatter)
        handler.setLevel(logging.INFO)
        logger.addHandler(handler)
        console = logging.StreamHandler()
        console.setLevel(logging.INFO)
        console.setFormatter(formatter)
        logger.addHandler(console)

        handler = TimedRotatingFileHandler(filename=logging_path +
                                           "/error.log",
                                           when='D',
                                           backupCount=7)
        datefmt = '%Y-%m-%d %H:%M:%S'
        format_str = '[%(asctime)s]: %(name)s %(filename)s[line:%(lineno)s] %(levelname)s  %(message)s'
        formatter = logging.Formatter(format_str, datefmt)
        handler.setFormatter(formatter)
        handler.setLevel(logging.ERROR)
        logger.addHandler(handler)
    logger = logging.getLogger(logger_name)
    return logger
Exemplo n.º 47
0
def init_logger(logger_name, log_path=PROJ_LOG_FILE):
    if logger_name not in Logger.manager.loggerDict:
        logger = logging.getLogger(logger_name)
        logger.setLevel(logging.DEBUG)  # 设置最低级别
        df = '%Y-%m-%d %H:%M:%S'
        format_str = '[%(asctime)s]: %(name)s %(levelname)s %(lineno)s %(message)s'
        formatter = logging.Formatter(format_str, df)
        # handler all
        try:
            handler1 = TimedRotatingFileHandler(log_path, when='D', interval=1, backupCount=7)
        except Exception:
            raise IOError('log path error !')
        handler1.setFormatter(formatter)
        handler1.setLevel(logging.DEBUG)
        logger.addHandler(handler1)
        # handler error
        try:
            handler2 = TimedRotatingFileHandler(log_path + '.wf', when='D', interval=1, backupCount=7)
        except Exception:
            raise IOError('log path error !')
        handler2.setFormatter(formatter)
        handler2.setLevel(logging.ERROR)
        logger.addHandler(handler2)

        # console
        console = logging.StreamHandler()
        console.setLevel(logging.DEBUG)
        # 设置日志打印格式
        console.setFormatter(formatter)
        # 将定义好的console日志handler添加到root logger
        logger.addHandler(console)
    
    logger = logging.getLogger(logger_name)
    return logger
Exemplo n.º 48
0
def get_file_handler(logger_name):
    file_handler = TimedRotatingFileHandler(logger_name, when="midnight", backupCount=7)  # Only keep 7 previous logs.
    file_handler.setFormatter(FORMATTER)
    return file_handler
Exemplo n.º 49
0
# from logging.handlers import RotatingFileHandler
from logging.handlers import TimedRotatingFileHandler
import logging

loglevel = logging.INFO
logfile = "./autodial.log"

# logHandler = RotatingFileHandler(logfile, mode='a', maxBytes=50*1024*1024, backupCount=10, encoding=None, delay=0)
logHandler = TimedRotatingFileHandler(logfile,
                                      when='D',
                                      interval=1,
                                      backupCount=20)
#logFormatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-6s %(message)s')
logFormatter = logging.Formatter(
    '%(asctime)s %(filename)-12s[line:%(lineno)d] %(thread)d %(levelname)s %(message)s'
)

logHandler.setFormatter(logFormatter)
logger = logging.getLogger('')
logger.addHandler(logHandler)
logger.setLevel(loglevel)
Exemplo n.º 50
0
    def create_logger(
            self,
            log_file_name='{0}/e2etest_running.log'.format(config.BASE_DIR),
            log_level=logging.DEBUG,
            log_date_format='%Y-%m-%d %H:%M:%S%z',
            log_formater='%(asctime)s %(filename)s:%(funcName)s %(levelname)s [line:%(lineno)d] %(message)s',
            max_log_files=3,
            one_day_one_file=True,
            max_log_file_size=10485760,
            log_to_standard_output=False):
        '''
        @summary: create the logger
        @param log_file_name: the log file name, should be absolute path. default value is /tmp/vamp/videocenter_running.log
                             if the value is None or "", print the log to standard output
        @param log_level: Integer of the log level. default value is logging.DEBUG
        @param max_log_files: the max number of files. It is valid when one_day_one_file equal False. default value is 3
        @param one_day_one_file: whether only create a file in one day. default value is True, one day one log file
        @param max_log_file_size: the max size of the log file. unit is byte. default value is 10 MB
        @param log_date_format: String of log date format. default value is '%Y-%m-%d %H:%M:%S%z', like 2017-06-01 11:44:06+0000
        @param log_to_standard_output: whether print logs into standard output, this argument will ignore log_file_name value
        @return: the logger
        '''
        # initialize log file
        if log_file_name:
            log_file_name = os.path.abspath(
                log_file_name)  # change path to absolute path
            if not os.path.exists(os.path.dirname(log_file_name)):
                os.makedirs(os.path.dirname(log_file_name))

        # write log into file or standard output
        if log_file_name and type(log_file_name) == type(
                '') and log_file_name != '':
            # write log to file
            logger = logging.getLogger(log_file_name)
            logger.setLevel(log_level)

            # write a new log file every day
            if one_day_one_file:
                Rthandler = TimedRotatingFileHandler(log_file_name,
                                                     when='D',
                                                     backupCount=max_log_files)
            else:
                Rthandler = RotatingFileHandler(log_file_name,
                                                maxBytes=max_log_file_size,
                                                backupCount=max_log_files)
            formatter = logging.Formatter(fmt=log_formater,
                                          datefmt=log_date_format)
            Rthandler.setFormatter(formatter)
            logger.addHandler(Rthandler)

            # write log to standard output synchronously
            if log_to_standard_output:
                console = logging.StreamHandler()
                console.setLevel(log_level)
                console.setFormatter(formatter)
                logger.addHandler(console)

        # write log to standard output default
        else:
            logging.basicConfig(level=log_level,
                                format=log_formater,
                                datefmt=log_date_format)
            logger = logging

        return logger
Exemplo n.º 51
0
# coding=utf-8
Exemplo n.º 52
0
db = SQLA(app)

cache = Cache(app, config=app.config.get('CACHE_CONFIG'))

migrate = Migrate(app, db, directory=APP_DIR + "/migrations")

# Logging configuration
logging.basicConfig(format=app.config.get('LOG_FORMAT'))
logging.getLogger().setLevel(app.config.get('LOG_LEVEL'))

if app.config.get('ENABLE_TIME_ROTATE'):
    logging.getLogger().setLevel(app.config.get('TIME_ROTATE_LOG_LEVEL'))
    handler = TimedRotatingFileHandler(
        app.config.get('FILENAME'),
        when=app.config.get('ROLLOVER'),
        interval=app.config.get('INTERVAL'),
        backupCount=app.config.get('BACKUP_COUNT'))
    logging.getLogger().addHandler(handler)


class MyIndexView(IndexView):
    @expose('/')
    def index(self):
        return redirect('/caravel/welcome')


appbuilder = AppBuilder(
    app,
    db.session,
    base_template='caravel/base.html',
Exemplo n.º 53
0
import ConfigParser
import logging
import os
import sys
import json
import urllib
import commands
import urllib2
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler

# log
LOG_FILE = "/home/opvis/opvis_agent/agent_service/log/update.log"
logger = logging.getLogger()
logger.setLevel(logging.INFO)
fh = TimedRotatingFileHandler(LOG_FILE, when='D', interval=1, backupCount=30)
datefmt = '%Y-%m-%d %H:%M:%S'
format_str = '%(asctime)s %(levelname)s %(message)s '
formatter = logging.Formatter(format_str, datefmt)
fh.setFormatter(formatter)
logger.addHandler(fh)


def monitor():
    with open("/home/opvis/opvis_agent/agent_service/agent.lock", "r") as fd:
        jifangip = fd.read()
    url_base = "http://" + jifangip
    # 获取authorized——keys的md5值
    get_md5_url = url_base + "/hostPlugInOperation/getHostTrustRelationshipMD5"
    print(get_md5_url)
    # 上报返回结果
Exemplo n.º 54
0
 def doRollover(self):
     TimedRotatingFileHandler.doRollover(self)
     gid = grp.getgrnam('docker').gr_gid
     os.chown(self.baseFilename, -1, gid)
     os.chmod(self.baseFilename, 0o664)
Exemplo n.º 55
0
from dockerplace import app

if __name__ == '__main__':
    if not app.debug:
        import logging
        from logging.handlers import TimedRotatingFileHandler
        file_handler = TimedRotatingFileHandler(
            "dockerplace.log",
            when="D",
            backupCount=10)
        file_handler.setLevel(logging.WARNING)
        app.logger.addHandler(file_handler)

    app.debug = app.config['DEBUG']
    app.run( port=app.config['PORT'])
def get_file_handler():
    file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
    file_handler.setFormatter(FORMATTER)
    return file_handler
Exemplo n.º 57
0
        format=
        '%(asctime)s %(levelname)s %(module)s - %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S")
    logger = logging.getLogger()

    lshandler = None

    if os.environ["USE_LOGSTASH"] == "true":
        logger.info("Adding logstash appender")
        lshandler = AsynchronousLogstashHandler(
            "logstash", 5001, database_path='logstash_test.db')
        lshandler.setLevel(logging.ERROR)
        logger.addHandler(lshandler)

    handler = TimedRotatingFileHandler("logs/" + MODULE + ".log",
                                       when="d",
                                       interval=1,
                                       backupCount=30)

    logFormatter = logging.Formatter(
        '%(asctime)s.%(msecs)03d %(levelname)s %(module)s - %(funcName)s: %(message)s'
    )
    handler.setFormatter(logFormatter)
    logger.addHandler(handler)

    logger.info("==============================")
    logger.info("Starting: %s" % MODULE)
    logger.info("Module:   %s" % (VERSION))
    logger.info("==============================")

    #>> AMQC
    server = {
        influx_port = int(influx_port)
else:
    raise Exception("Wasn't able to find the 'InfluxDB' section in the config")

if influx_ip is None or influx_port is None or influx_token is None or influx_org is None or influx_bucket is None:
    raise Exception(
        "At least one piece of Influx connection information is missing from the config"
    )

if "Modem" in config:
    modem_ip = config["Modem"].get("ip-address")
    modem_username = config["Modem"].get("username")
    modem_password = config["Modem"].get("password")
else:
    raise Exception("Wasn't able to find the 'Modem' section in the config")

if modem_ip is None or modem_username is None or modem_password is None:
    raise Exception(
        "At least one piece of Modem connection information is missing from the config"
    )

logger = logging.getLogger("Rotating Error Log")
logger.setLevel(logging.ERROR)
handler = TimedRotatingFileHandler("dsl-stats-to-influxdb-3.log",
                                   when="midnight",
                                   backupCount=5)
formatter = logging.Formatter(fmt="%(asctime)s - %(levelname)s - %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
main()
Exemplo n.º 59
0
if not LOG_FILE_FOLDER_DIR.exists():
    os.mkdir(LOG_FILE_FOLDER_DIR)
app.config['UPLOAD_FOLDER_DIR'] = UPLOAD_FOLDER_DIR
app.config['LOG_FILE_FOLDER_DIR'] = LOG_FILE_FOLDER_DIR
# 配置日志
logFormatter = logging.Formatter(
    "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s][%(threadName)s] - %(message)s"
)
rootLogger = logging.getLogger('werkzeug')
rootLogger.handlers.clear()

# time rotating handler
rotatingHandler = TimedRotatingFileHandler(Path(base_dir, 'logs',
                                                'takehome.log'),
                                           when="D",
                                           interval=1,
                                           backupCount=30,
                                           encoding='utf-8')
rotatingHandler.setFormatter(logFormatter)
rotatingHandler.setLevel(logging.DEBUG)
logging.getLogger().handlers.clear()
logging.getLogger().addHandler(rotatingHandler)
# stream handler
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
consoleHandler.setLevel(logging.INFO)
rootLogger.addHandler(consoleHandler)

# ORM文件
DB_MODEL_FILE = Path(base_dir, 'backend', 'sqllib', 'models',
                     'backend/sqllib/models/db_model.py')
Exemplo n.º 60
-1
def configure_logging(app):
    subject = '[Error] %s encountered errors on %s' % (app.config['DOMAIN'], datetime.now().strftime('%Y/%m/%d'))
    subject += (' [DEV]' if app.debug else '')
    mail_config = [(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                   app.config['MAIL_DEFAULT_SENDER'], app.config['ADMINS'],
                   subject,
                   (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])]
    if app.config['MAIL_USE_SSL']:
        mail_handler = SSLSMTPHandler(*mail_config)
    else:
        mail_handler = SMTPHandler(*mail_config)

    mail_handler.setLevel(logging.ERROR)
    app.logger.addHandler(mail_handler)

    formatter = logging.Formatter(
        '%(asctime)s %(process)d-%(thread)d %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')

    debug_log = os.path.join(app.root_path, app.config['DEBUG_LOG'])
    debug_file_handler = TimedRotatingFileHandler(debug_log, when='midnight', interval=1, backupCount=90)
    debug_file_handler.setLevel(logging.DEBUG)
    debug_file_handler.setFormatter(formatter)
    app.logger.addHandler(debug_file_handler)

    error_log = os.path.join(app.root_path, app.config['ERROR_LOG'])
    error_file_handler = TimedRotatingFileHandler(error_log, when='midnight', interval=1, backupCount=90)
    error_file_handler.setLevel(logging.ERROR)
    error_file_handler.setFormatter(formatter)
    app.logger.addHandler(error_file_handler)

    # Flask运行在产品模式时, 只会输出ERROR, 此处使之输入INFO
    if not app.config['DEBUG']:
        app.logger.setLevel(logging.INFO)