Example #1
0
        def configure_from_relative_path(path):
            config_path = os.path.join(ROOT, 'configs', path)

            with open(config_path) as fd:
                self.__config__ = json.load(fd)

            logger.setLevel(getattr(logging, self.get('log_level', 'INFO')))

            formatter = logging.Formatter(
                '[L:%(lineno)d]# %(levelname)-8s [%(asctime)s]  %(message)s',
                datefmt='%d-%m-%Y %H:%M:%S'
            )

            # StreamHandler
            """
            sh = logging.StreamHandler()
            sh.setFormatter(formatter)
            logger.addHandler(sh)
            """

            # FileHandler
            fh = TimedRotatingFileHandler(
                os.path.join(ROOT, 'logs', 'server.log'),
                when="midnight"
            )
            fh.setFormatter(formatter)
            logger.addHandler(fh)
Example #2
0
def get_logger(logger_name, logger_module, has_formatter=True):
    """申明logger

    Args:
        logger: str
        logger_moduls: logger所属的模块,方便后面按照不同的路径进行切分
    """
    log_dir = "%s/unknown/" % (config.LOG_DIR)
    if logger_module:
        log_dir = "%s/%s/" % (config.LOG_DIR, logger_module)
    if not os.path.exists(log_dir):
        os.mkdir(log_dir)

    log = logging.getLogger(logger_name)
    file_handler = TimedRotatingFileHandler("%s/%s.log" % (log_dir, logger_name), 'W0')
    if has_formatter:
        logger_format = '[%(asctime)s][%(thread)d][%(levelname)s][%(filename)s][line:%(lineno)d] [func:%(funcName)s] [msg:%(message)s]'
        formatter = logging.Formatter(logger_format)
        file_handler.setFormatter(formatter)
    else:
        # 没有格式,只打印时间
        formatter = logging.Formatter('%(asctime)s\t%(message)s')
        file_handler.setFormatter(formatter)

    log.addHandler(file_handler)
    return log
Example #3
0
 def _app_logging(self):
     logfmt = logging.Formatter('%(asctime)sUTC - %(threadName)s - %(levelname)s - %(message)s')
     logfmt.converter = time.gmtime
     if 'file:logging' in self._config.sections():
         log_type = 'file:logging'
         app_log = self.config.get(log_type, 'file')
         app_retention = self.config.getint(log_type, 'retention')
         log_handler = TimedRotatingFileHandler(app_log, 'd', 1, app_retention)
         log_handler.setFormatter(logfmt)
         aap_level = self.config.get(log_type, 'level')
         log_handler.setLevel(aap_level)
         self.log.addHandler(log_handler)
         self.log.debug("file logger is up")
     if 'syslog:logging' in self._config.sections():
         log_type = 'syslog:logging'
         facility = self.config.get(log_type, 'syslog_facility')
         address_string = self.config.get(log_type, 'address')
         address_split = address_string.split(":")
         if len(address_split) == 2:
             address = (address_split[0], address_split[0])
         else:
             address = (address_split[0])
         log_handler = SysLogHandler(address=address, facility=facility)
         aap_level = self.config.get(log_type, 'level')
         log_handler.setLevel(aap_level)
         self.log.addHandler(log_handler)
         self.log.debug("syslog logger is up")
class RotatingLog(object):

    # Get a logger with the provided name when initializing
    # this class and use a handler that rotates the logfiles
    # based on time as seen below.
    def __init__(self, logger_name):
        self._logger = logging.getLogger(logger_name)

        # We only want one handler, so only add a handler
        # if there isn't already one configured. 
        if len(self._logger.handlers) == 0:
            # The messages shouldn't be sent to other logs 
            self._logger.propagate = False

            # We only use one logger and don't differentiate
            # between the importance of different messages
            # and therefore use DEBUG as the only logg-level.
            self._logger.setLevel(logging.DEBUG)

            # Rotate the log, if not empty, at midnight
            # and save up to 90 days of log-files.
            self._handler = TimedRotatingFileHandler(
                LOGFILE, when = ROTATE_TIME, backupCount = BACKUP_DAYS, encoding = u'UTF-8')
            
            self._handler.setLevel(logging.DEBUG)
            self._handler.setFormatter(logging.Formatter(LOGFORMAT, TIMEFORMAT))
            self._logger.addHandler(self._handler)

    # Write the message, if not empty, to the log-file
    def write(self, message):
        if not message.lstrip().rstrip() == u'':
            self._logger.debug(message)
Example #5
0
    def set_file_logger(self,
                        path='logs', filename='backend.log',
                        when="D", interval=1, backupCount=6):  # pragma: no cover
        """
        Configure handler for file logging ...

        :param test: test mode for application
        :type test: boolean
        """
        try:
            os.makedirs(path)
        except OSError:
            if not os.path.isdir(path):
                raise

        # Store logs in a daily file, keeping 6 days along ...
        fh = TimedRotatingFileHandler(
            filename=os.path.join(path, filename),
            when=when, interval=interval,
            backupCount=backupCount
        )

        # create formatter and add it to the handler
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)

        # add the handler to logger
        self.log.addHandler(fh)
Example #6
0
def create_multiprocess_logger(logger_name, persist_logger_name, log_level, log_format, log_queue, log_file_path,
                               when_to_rotate, keep_log_days):
    """
    Creates queue logger and persist logger.

    Queue logger should be used to log into. It is Thread and Process safe.
    Persist logger is logger which persist data to disk. LogCollector moves data from queue log into persist log.
    """

    queue_log_formatter = logging.Formatter(log_format)
    queue_log_handler = QueueHandler(log_queue, persist_logger_name)
    queue_log_handler.setFormatter(queue_log_formatter)
    queue_logger = logging.getLogger(logger_name)
    queue_logger.setLevel(log_level)
    queue_logger.handlers = []
    queue_logger.addHandler(queue_log_handler)
    queue_logger.propagate = False

    persist_log_formatter = logging.Formatter('%(message)s')
    persist_log_handler = TimedRotatingFileHandler(log_file_path, when=when_to_rotate, interval=1, backupCount=keep_log_days)
    persist_log_handler.setFormatter(queue_log_formatter)
    persist_logger = logging.getLogger(persist_logger_name)
    persist_logger.setLevel(log_level)
    persist_logger.handlers = []
    persist_logger.addHandler(persist_log_handler)
    persist_logger.propagate = False
Example #7
0
def setup():
    """Setup for the startstop logger

    This configures the startstop logger, adding the handlers and
    setting the log levels.

    """
    logger.setLevel(level=logging.DEBUG)

    # Make sure the directory exists
    LOG_DIRNAME = '../../persistent/logs/startstop/'
    if not os.access(LOG_DIRNAME, os.F_OK):
        os.makedirs(LOG_DIRNAME)
    LOG_FILENAME = os.path.join(LOG_DIRNAME, 'startstop')

    # Remove any existing handlers
    logger.handlers = []

    # Use UTC time
    logging.Formatter.converter = time.gmtime

    # Add file handler
    handler = TimedRotatingFileHandler(LOG_FILENAME, when='midnight',
                                       backupCount=14, utc=True)
    handler.setLevel(level=logging.DEBUG)
    handler.setFormatter(logging.Formatter('%(asctime)s %(name)s.%(funcName)s.'
                                           '%(levelname)s: %(message)s',
                                           '%Y-%m-%d %H:%M:%S'))
    logger.addHandler(handler)

    # Add handler which prints to the screen
    handler = logging.StreamHandler()
    handler.setLevel(level=logging.INFO)
    handler.setFormatter(logging.Formatter('%(message)s'))
    logger.addHandler(handler)
Example #8
0
	def __init__(self):
		# 文件狀態
		self.is_html = False
		self.charset = 'utf-8'
		self.body    = ''
		self.header  = {}

		# 流程狀態
		self.url     = ''      # 目前的 URL
		self.load_ok = False   # 載入狀況
		self.depth   = 0       # 目前深度
		self.count   = 0       # 目前造訪次數
		self.elapsed = 0.0     # 載入 URL 耗費時間
		self.cached  = False   # 是否使用快取
		self.cache_url    = '' # url 快取檔
		self.cache_body   = '' # body 快取檔
		self.cache_header = '' # header 快取檔
		self.visited_urls = [] # 已拜訪路徑

		# Logging
		self.logger.setLevel(logging.DEBUG)
		self.logger.setLevel(logging.INFO)
		#formatter = Formatter("[%(asctime)s] %(levelname)s - %(name)s: %(message)s")
		formatter = Formatter("%(levelname)s: %(message)s")

		# 輸出到檔案
		handler = TimedRotatingFileHandler(self.CACHE_ROOT+'/crawler.log')
		handler.setFormatter(formatter)
		self.logger.addHandler(handler)
		
		# 輸出到 stdin
		handler = StreamHandler()
		handler.setFormatter(formatter)
		self.logger.addHandler(handler)
Example #9
0
def setup_logger(config):
    from logging.handlers import TimedRotatingFileHandler
    global LOGGER

    # Log file rotation scheduling
    when, interval, backupCount = config.LOG_ROTATION_TIME, \
        config.LOG_ROTATION_INTERVAL, config.LOG_BACKUP_COUNT

    # Defensive assertions
    assert when.lower() in ('s', 'm', 'h', 'd', 'midnight',
                            'w0', 'w1', 'w2', 'w3', 'w4', 'w5', 'w6',)
    assert interval > 0
    assert backupCount > 0

    if not os.path.exists(config.LOG_DIR):
        os.mkdir(config.LOG_DIR)
    log_file_path = os.path.join(config.LOG_DIR, config.LOG_FILENAME)

    formatter = logging.Formatter(config.LOG_FORMAT_STR)

    file_handler = TimedRotatingFileHandler(
        log_file_path,
        when=when,
        interval=interval,
        backupCount=backupCount)
    file_handler.setLevel(config.FILE_LOG_LEVEL)
    file_handler.setFormatter(formatter)

    console_handler = logging.StreamHandler(sys.stdout)
    console_handler.setLevel(config.CONSOLE_LOG_LEVEL)
    console_handler.setFormatter(formatter)

    LOGGER.addHandler(file_handler)
    LOGGER.addHandler(console_handler)
    LOGGER.setLevel(config.LOG_LEVEL)
Example #10
0
def set_logger_params(app):
    global _logger
    _logger = app.logger
    handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='D', interval=1)
    handler.setLevel(logging.DEBUG)
    handler.setFormatter(logging.Formatter(app.config['LOG_FORMAT']))
    _logger.addHandler(handler)
Example #11
0
    def __init__(self, config, SMPPClientFactory, amqpBroker, redisClient, RouterPB=None, interceptorpb_client=None):
        self.config = config
        self.SMPPClientFactory = SMPPClientFactory
        self.SMPPOperationFactory = SMPPOperationFactory(self.SMPPClientFactory.config)
        self.amqpBroker = amqpBroker
        self.redisClient = redisClient
        self.RouterPB = RouterPB
        self.interceptorpb_client = interceptorpb_client
        self.submit_sm_q = None
        self.qos_last_submit_sm_at = None
        self.rejectTimers = {}
        self.submit_retrials = {}
        self.qosTimer = None

        # Set pickleProtocol
        self.pickleProtocol = SMPPClientPBConfig(self.config.config_file).pickle_protocol

        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY)
        if len(self.log.handlers) != 1:
            self.log.setLevel(self.config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file,
                                               when=self.config.log_rotate)
            formatter = logging.Formatter(self.config.log_format, self.config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False
Example #12
0
  def get_file_handler(self, name):
    """Create and configure logging file handler.

    :param name: Section name used to find the path to the log file. If no
      `log` option exists in this section, the path will default to
      `<name>.log`.

    The default path can be configured via the `default.log` option in the
    `hdfs` section.

    """
    try:
      handler_path = self.parser.get(name, 'log')
    except (NoOptionError, NoSectionError):
      handler_path = osp.join(gettempdir(), '%s.log' % (name, ))
    try:
      handler = TimedRotatingFileHandler(
        handler_path,
        when='midnight', # daily backups
        backupCount=1,
        encoding='utf-8',
      )
    except IOError:
      wr.warn('Unable to write to log file at %s.' % (handler_path, ))
    else:
      handler_format = (
        '%(asctime)s | %(levelname)4.4s | %(name)s > %(message)s'
      )
      handler.setFormatter(lg.Formatter(handler_format))
      return handler
Example #13
0
def setup_logging(debugging=False):

    os.makedirs(CRASH_DIR, exist_ok=True)
    error_handler = logging.handlers.TimedRotatingFileHandler(
        filename=os.path.join(CRASH_DIR, 'crash'), when='s', interval=1, delay=True)
    error_handler.setLevel(logging.ERROR)

    os.makedirs(QUERY_DIR, exist_ok=True)
    querry_handler = TimedRotatingFileHandler(
        filename=os.path.join(QUERY_DIR, 'queries'), when='midnight')
    querry_handler.setLevel(logging.INFO)

    querry_handler.addFilter(query_filter)
    querry_handler.setFormatter(logging.Formatter("%(asctime)s: %(filename)s: %(message)s"))

    logger = logging.getLogger()
    logger.addHandler(error_handler)
    logger.addHandler(querry_handler)
    if debugging:
        console_handler = logging.StreamHandler()
        console_handler.setLevel(logging.DEBUG)
        console_handler.setFormatter(logging.Formatter("%(levelname)s:%(name)s [%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"))
        logger.addHandler(console_handler)
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    return logger
Example #14
0
    def __init__(self, envir):
        super(Logger, self).__init__(envir)
        logger = logging.getLogger()
        hdlr = TimedRotatingFileHandler("log/sensor.log", "midnight", 1, 10)
        formatter = logging.Formatter('\n%(levelname)-7s | %(asctime)-23s | === %(message)s ===')
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)
        logger.setLevel(logging.NOTSET)
        logger.info("begin")

        formatter = logging.Formatter('%(levelname)-7s | %(asctime)-23s | %(message)s')
        hdlr.setFormatter(formatter)
        self._logger = logger

        handlers = {
            "start": self.info("start"),
            "post": self.info("post"),
            "register": self.info("register"),
            "start post": self.info("start post"),
            "call register": self.info("call register"),
            "set period": self.info("set period"),
            "cancel category": self.info("cancel category"),
            "display": self.info("display"),
            "init info": self.info("init"),
            "exit": self.exit,
            "warning": self.warning("warning"),
            "error": self.error("error"),
        }
        for event, handler in handlers.iteritems():
            self.listen(event, handler)
        self.state = "running"
        self.trace_IO()
Example #15
0
  def get_log_handler(self, command):
    """Configure and return log handler.

    :param command: The command to load the configuration for. All options will
      be looked up in the `[COMMAND.command]` section. This is currently only
      used for configuring the file handler for logging. If logging is disabled
      for the command, a :class:`NullHandler` will be returned, else a
      :class:`TimedRotatingFileHandler`.

    """
    section = '%s.command' % (command, )
    path = osp.join(gettempdir(), '%s.log' % (command, ))
    level = lg.DEBUG
    if self.has_section(section):
      key = 'log.disable'
      if self.has_option(section, key) and self.getboolean(section, key):
        return NullHandler()
      if self.has_option(section, 'log.path'):
        path = self.get(section, 'log.path') # Override default path.
      if self.has_option(section, 'log.level'):
        level = getattr(lg, self.get(section, 'log.level').upper())
    file_handler = TimedRotatingFileHandler(
      path,
      when='midnight', # Daily backups.
      backupCount=1,
      encoding='utf-8',
    )
    fmt = '%(asctime)s\t%(name)-16s\t%(levelname)-5s\t%(message)s'
    file_handler.setFormatter(lg.Formatter(fmt))
    file_handler.setLevel(level)
    return file_handler
Example #16
0
def configure_loggers(min_level, log_file, log_format=None):
    DEFAULT_LOG_FORMAT = '%(asctime)s(%(name)s)[%(process)d--%(threadName)s]::'\
        '%(levelname)s - %(funcName)s(%(message)s)'
    blue_pref = '\x1b[' + BLUE
    red_pref = '\x1b[' + RED
    green_pref = '\x1b[' + GREEN
    yellow_pref = '\x1b[' + YELLOW
    suffix = '\x1b[0m'
    COLOR_LOG_FORMAT = '%(asctime)s(' + \
        blue_pref + '%(name)s' + suffix + \
        ')[%(process)d--%(threadName)s]::' + \
        red_pref + '%(levelname)s ' + suffix + '- ' + \
        green_pref + '%(funcName)s' + suffix + \
        yellow_pref + '(%(message)s)' + suffix

    if not os.path.exists(os.path.dirname(log_file)):
        os.makedirs(os.path.dirname(log_file))

    if log_format:
        _format = log_format
    else:
        if "NO_COLORS" in os.environ:
            _format = DEFAULT_LOG_FORMAT
        else:
            _format = COLOR_LOG_FORMAT
    logging.basicConfig(level=min_level, format=_format)

    formatter = logging.Formatter(DEFAULT_LOG_FORMAT)
    file_handler = TimedRotatingFileHandler(log_file, when='midnight')
    file_handler.setLevel(min_level)
    file_handler.setFormatter(formatter)

    logging.getLogger('').addHandler(file_handler)
    logging.getLogger('').setLevel(min_level)
Example #17
0
def start():
    #NOTE bots is always on PYTHONPATH!!! - otherwise it will not start.
    #***command line arguments**************************
    configdir = 'config'
    for arg in sys.argv[1:]:
        if not arg:
            continue
        if arg.startswith('-c'):
            configdir = arg[2:]
            if not configdir:
                print 'Configuration directory indicated, but no directory name.'
                sys.exit(1)
        elif arg in ["?", "/?"] or arg.startswith('-'):
            showusage()
        else:
            showusage()
    
    #***init general: find locating of bots, configfiles, init paths etc.***********************
    botsinit.generalinit(configdir)

    #***initialise logging. This logging only contains the logging from bots-webserver, not from cherrypy.
    botsglobal.logger = logging.getLogger('bots-webserver')
    botsglobal.logger.setLevel(logging.DEBUG)
    h = TimedRotatingFileHandler(botslib.join(botsglobal.ini.get('directories','logging'),'webserver.log'), backupCount=10)
    fileformat = logging.Formatter("%(asctime)s %(levelname)-8s: %(message)s",'%Y%m%d %H:%M:%S')
    h.setFormatter(fileformat)
    botsglobal.logger.addHandler(h)
    
    #***init cherrypy as webserver*********************************************
    #global configuration for cherrypy
    cherrypy.config.update({'global': {'log.screen': False, 'server.environment': botsglobal.ini.get('webserver','environment','production')}})
    #cherrypy handling of static files
    conf = {'/': {'tools.staticdir.on' : True,'tools.staticdir.dir' : 'media' ,'tools.staticdir.root': botsglobal.ini.get('directories','botspath')}}
    servestaticfiles = cherrypy.tree.mount(None, '/media', conf)    #None: no cherrypy application (as this only serves static files)
    #cherrypy handling of django
    servedjango = WSGIHandler()     #was: servedjango = AdminMediaHandler(WSGIHandler())  but django does not need the AdminMediaHandler in this setup. is much faster.
    #cherrypy uses a dispatcher in order to handle the serving of static files and django.
    dispatcher = wsgiserver.WSGIPathInfoDispatcher({'/': servedjango, '/media': servestaticfiles})
    botswebserver = wsgiserver.CherryPyWSGIServer(bind_addr=('0.0.0.0', botsglobal.ini.getint('webserver','port',8080)), wsgi_app=dispatcher, server_name=botsglobal.ini.get('webserver','name','bots-webserver'))
    botsglobal.logger.info(_(u'Bots web-server started.'))
    #handle ssl: cherrypy < 3.2 always uses pyOpenssl. cherrypy >= 3.2 uses python buildin ssl (python >= 2.6 has buildin support for ssl).
    ssl_certificate = botsglobal.ini.get('webserver','ssl_certificate',None)
    ssl_private_key = botsglobal.ini.get('webserver','ssl_private_key',None)
    if ssl_certificate and ssl_private_key:
        if cherrypy.__version__ >= '3.2.0':
            adapter_class = wsgiserver.get_ssl_adapter_class('builtin')
            botswebserver.ssl_adapter = adapter_class(ssl_certificate,ssl_private_key)
        else:
            #but: pyOpenssl should be there!
            botswebserver.ssl_certificate = ssl_certificate
            botswebserver.ssl_private_key = ssl_private_key
        botsglobal.logger.info(_(u'Bots web-server uses ssl (https).'))
    else:
        botsglobal.logger.info(_(u'Bots web-server uses plain http (no ssl).'))
    
    #***start the cherrypy webserver.
    try:
        botswebserver.start()
    except KeyboardInterrupt:
        botswebserver.stop()
Example #18
0
    def __init__(self, config, msgHandler = None):
        self.reconnectTimer = None
        self.smpp = None
        self.connectionRetry = True
        self.config = config

        # Setup statistics collector
        self.stats = SMPPClientStatsCollector().get(cid = self.config.id)
        self.stats.set('created_at', datetime.now())
                
        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY_CLIENT_BASE+".%s" % config.id)
        if len(self.log.handlers) != 1:
            self.log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file, 
                when = self.config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        if msgHandler is None:
            self.msgHandler = self.msgHandlerStub
        else:
            self.msgHandler = msgHandler
Example #19
0
	def __init__(self, dbName, uri, dataPath, logPath):

		"""
		Initialization of a database.

		Args:
			dbName: Name of the database.
			uri: Database connection uri.
			dataPath: Path to where auxiliary file will be created.
			logPath: Path to where log files will be created.
		"""
		self.uri = uri
		self.dbName = dbName

		logFile = logPath + 'mongodb.log'
		handler = TimedRotatingFileHandler(logFile, when="midnight", backupCount=6)
		handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s : %(message)s'))

		logger.setLevel(logging.INFO)
		logger.addHandler(handler)

		try:
			self.client=pymongo.MongoClient(uri)
		except Exception as e:
			print ("Could not connect to MongoDB: %s" % e)

		self.jsonFile = dataPath + 'dataToUpdate.json'
		if not os.path.exists(self.jsonFile):
			with open(self.jsonFile, 'w+') as f:
				f.write('[]')
		self.existsDataToDump = self.dumpJson()
Example #20
0
    def create_logger(self):
        """
        创建一个日志对象logger,同时打印文件日志和终端日志,其中Debug级别的日志只在终端打印
        :param spidername
        :return: logger object
        """
        LOG_FILE = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                                "logs", "{}.log".format(date.today()))
        formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s[line:%(lineno)d] - %(message)s',
                                      datefmt='%Y-%m-%d %H:%M:%S')   # 格式化日志
        file_handler = TimedRotatingFileHandler(LOG_FILE,'D', 1, 0)  # 实例化handler
        file_handler.suffix = "{}-%Y-%m-%d.log".format(self.config.Name)
        file_handler.setFormatter(formatter)
        file_handler.setLevel(logging.INFO)        # 设置文件日志打印级别

        console_handler = logging.StreamHandler()  # 设置终端日志打印
        console_handler.setLevel(logging.DEBUG)    # 设置终端日志打印级别
        console_handler.setFormatter(formatter)    # 设置终端日志打印格式

        logger = logging.getLogger(self.config.Name)     # 获取名为log_name的logger
        logger.addHandler(file_handler)            # 添加Handler
        logger.addHandler(console_handler)         # 添加Handler
        logger.setLevel(logging.INFO)              # 设置日志级别为DEBUG(级别最低)

        return logger
Example #21
0
    def __init__(self, config, auth_portal, RouterPB = None, SMPPClientManagerPB = None):
        self.config = config
        # A dict of protocol instances for each of the current connections,
        # indexed by system_id 
        self.bound_connections = {}
        self._auth_portal = auth_portal
        self.RouterPB = RouterPB
        self.SMPPClientManagerPB = SMPPClientManagerPB

        # Setup statistics collector
        self.stats = SMPPServerStatsCollector().get(cid = self.config.id)
        self.stats.set('created_at', datetime.now())

        # Set up a dedicated logger
        self.log = logging.getLogger(LOG_CATEGORY_SERVER_BASE+".%s" % config.id)
        if len(self.log.handlers) != 1:
            self.log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file, 
                when = self.config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        self.msgHandler = self.submit_sm_event
Example #22
0
def setup_logging(app, logfile, debug=False):
    cmdslog = TimedRotatingFileHandler(logfile,
                                       when='D',
                                       interval=1,
                                       backupCount=7)
    if debug:
        env = logging.DEBUG
        cmdslog.setFormatter(logging.Formatter(
            "%(name)s: [%(levelname)s] %(filename)s:%(lineno)d - %(message)s"))
    else:
        env = logging.INFO
        cmdslog.setFormatter(logging.Formatter(
            "%(name)s: [%(levelname)s] %(message)s"))

    cmdslog.setLevel(env)

    logger = logging.getLogger(app)
    logger.setLevel(env)
    logger.addHandler(cmdslog)
    if os.path.exists('/dev/log'):
        st_mode = os.stat('/dev/log').st_mode
        if stat.S_ISSOCK(st_mode):
            syslog_h = SysLogHandler(address='/dev/log')
            syslog_h.set_name(app)
            logger.addHandler(syslog_h)

    return _log(app, logger)
Example #23
0
def configure_logging(app_mode, app):
    logHandler = None
    if app_mode == 'DEBUG':
        # create console handler
        logHandler = logging.StreamHandler()
    elif app_mode == 'PROD':
        # create file time rotating handler
        logHandler = TimedRotatingFileHandler(
            filename=os.environ.get('APP_LOG_FILENAME', 'app.log'),
            when='D',
            backupCount=5,
            encoding='UTF-8'
        )
    if logHandler is None:
        return
    logHandler.setLevel(logging.DEBUG)
    logHandler.setFormatter(logging.Formatter(
        fmt='%(asctime)s %(name)-10s %(levelname)-7s %(message)s',
        datefmt='%H:%M:%S'))
    # get root logger
    logger = logging.getLogger()
    logger.addHandler(logHandler)
    logger.setLevel(logging.DEBUG)
    app.logger.addHandler(logHandler)
    app.logger.setLevel(logging.DEBUG)
    return
Example #24
0
 def __init__(self, config, SMPPClientManagerPB, RouterPB, loadConfigProfileWithCreds = {'username', 'password'}):
     self.config = config
     self.pb = {'smppcm': SMPPClientManagerPB, 'router': RouterPB}
     # Protocol sessions are kept here:
     self.sessions = {}
     self.sessionRef = 0
     self.sessionsOnline = 0
     # When defined, configuration profile will be loaded on startup
     self.loadConfigProfileWithCreds = loadConfigProfileWithCreds
           
     # Set up and configure a dedicated logger
     self.log = logging.getLogger('jcli')
     if len(self.log.handlers) != 1:
         self.log.setLevel(config.log_level)
         handler = TimedRotatingFileHandler(filename=self.config.log_file, 
             when = self.config.log_rotate)
         formatter = logging.Formatter(config.log_format, config.log_date_format)
         handler.setFormatter(formatter)
         self.log.addHandler(handler)
     
     
     # Init protocol
     self.protocol = lambda: JCliTelnetTransport(TelnetBootstrapProtocol,
                                                 insults.ServerProtocol,
                                                 JCliProtocol)
Example #25
0
    def __init__(self, config):
        self.config = config

        # Check if callbacks are defined in child class ?
        if self.callback is None:
            self.callback = self.throwing_callback
        if self.errback is None:
            self.errback = self.throwing_errback

        # For these values to None since they must be defined through .addSmpps()
        self.smpps = None
        self.smpps_access = None

        # Set up a dedicated logger
        self.log = logging.getLogger(self.log_category)
        if len(self.log.handlers) != 1:
            self.log.setLevel(self.config.log_level)
            handler = TimedRotatingFileHandler(filename=self.config.log_file,
                                               when=self.config.log_rotate)
            formatter = logging.Formatter(self.config.log_format, self.config.log_date_format)
            handler.setFormatter(formatter)
            self.log.addHandler(handler)
            self.log.propagate = False

        self.log.info('Thrower configured and ready.')
Example #26
0
    def _configLogs(self, sFile):
        """
        This method configure the logs for this object. 

        Two handlers are used, one to print to stdout and one to log in a file. For file, TimedRotatingFileHandler is used to rotate the log file every day. 

        @param sFile: Log file name
        @type sFile: string
        @todo: Set log level from config or user input
        """
        config = CNBConfig.getInstance()

        # Print output if not in daemon mode
        if not config.get('global', 'daemon'):
            ch = logging.StreamHandler()
            ch.setFormatter(logging.Formatter(config.get('global', 'log-format')))
            self.log.addHandler(ch)

        # Write also to file
        fh = TimedRotatingFileHandler(\
            os.path.join(config.get('global', 'log-dir'), sFile), \
            backupCount=0, \
            when='d', \
            interval=1)
        fh.setFormatter(logging.Formatter(config.get('global', 'log-format')))
        self.log.addHandler(fh)
       
        # Must stay at DEBUG to log errors
        self.log.setLevel(logging.DEBUG)
def run():
    """
    Run the server.
    """

    # Set up the logger.
    if not os.path.isdir(os.path.join(script_dir, 'logs')):
        os.makedirs(os.path.join(script_dir, 'logs'))
    # Format the logs.
    formatter = logging.Formatter(
            "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    # Enable the logs to split files at midnight.
    handler = TimedRotatingFileHandler(
            os.path.join(script_dir, 'logs', 'TorSpider.log'),
            when='midnight', backupCount=7, interval=1)
    handler.setLevel(app.config['LOG_LEVEL'])
    handler.setFormatter(formatter)
    log = logging.getLogger('werkzeug')
    log.setLevel(app.config['LOG_LEVEL'])
    log.addHandler(handler)
    app.logger.addHandler(handler)
    app.logger.setLevel(app.config['APP_LOG_LEVEL'])

    # Set up the app server, port, and configuration.
    port = int(environ.get('PORT', app.config['LISTEN_PORT']))
    addr = environ.get('LISTEN_ADDR', app.config['LISTEN_ADDR'])
    if app.config['USETLS']:
        context = (app.config['CERT_FILE'], app.config['CERT_KEY_FILE'])
        app.run(host=addr, port=port, threaded=True, ssl_context=context)
    else:
        app.run(host=addr, port=port, threaded=True)
Example #28
0
    def __init__(self, RouterPB, SMPPClientManagerPB, config, interceptor=None):
        Resource.__init__(self)

        # Setup stats collector
        stats = HttpAPIStatsCollector().get()
        stats.set('created_at', datetime.now())

        # Set up a dedicated logger
        log = logging.getLogger(LOG_CATEGORY)
        if len(log.handlers) != 1:
            log.setLevel(config.log_level)
            handler = TimedRotatingFileHandler(filename=config.log_file, when=config.log_rotate)
            formatter = logging.Formatter(config.log_format, config.log_date_format)
            handler.setFormatter(formatter)
            log.addHandler(handler)
            log.propagate = False

        # Set http url routings
        log.debug("Setting http url routing for /send")
        self.putChild('send', Send(config, RouterPB, SMPPClientManagerPB, stats, log, interceptor))
        log.debug("Setting http url routing for /rate")
        self.putChild('rate', Rate(config, RouterPB, stats, log, interceptor))
        log.debug("Setting http url routing for /balance")
        self.putChild('balance', Balance(RouterPB, stats, log))
        log.debug("Setting http url routing for /ping")
        self.putChild('ping', Ping(log))
Example #29
0
def create_timed_file_handler(level, format, ttl, filename, path):
	'''
	Creates a TimedRotatingFileHandler for the logging module
	that outputs log records to a file. This file will roll over
	given the ttl (time to live) which will create a new log file
	and back up the existing one.

	@param path The path of the log file (e.g. /logs/system.log)
	@param level The logging level of the file handler
	@param format The format of the file output for each LogRecord
	@param ttl The time to live for the the log file before it rolls over

	@return A new TimedRotatingFileHandler
	'''
	# Create all the directories in the path
	print (path.split('/'))
	for directory in path.split('/'):
		if not os.path.exists(directory):
			os.mkdir(directory)

	# Configure the TimedRotatingFileHandler
	file_handler = TimedRotatingFileHandler(path + '/' + filename, ttl)
	file_handler.setLevel(level)
	file_handler.setFormatter(logging.Formatter(format))
	return file_handler
Example #30
0
import logging
from logging.handlers import TimedRotatingFileHandler
from datetime import time

formatter = logging.Formatter(
    '%(asctime)s | %(levelname)s | %(filename)s | %(module)s | %(lineno)d | %(funcName)s | %(message)s'
)

paylogger = logging.getLogger("paylog")
payhandler = TimedRotatingFileHandler(filename='logs/paylog.log',
                                      when='MIDNIGHT',
                                      backupCount=7,
                                      atTime=time(0, 0, 0, 0))
payhandler.setFormatter(formatter)
paylogger.addHandler(payhandler)
paylogger.setLevel('INFO')

banklogger = logging.getLogger("banklog")
bankhandler = TimedRotatingFileHandler(filename='logs/banklog.log',
                                       when='MIDNIGHT',
                                       backupCount=7,
                                       atTime=time(0, 0, 0, 0))
bankhandler.setFormatter(formatter)
banklogger.addHandler(bankhandler)
banklogger.setLevel('INFO')

agentpaylogger = logging.getLogger("agentpaylog")
agentpayhandler = TimedRotatingFileHandler(filename='logs/agentpaylog.log',
                                           when='MIDNIGHT',
                                           backupCount=7,
                                           atTime=time(0, 0, 0, 0))
Example #31
0
# coding=utf-8
Example #32
0
    os.environ['GID'] = "%s" % gid


cnt = 0
url = "http://localhost/go-to-bed/"
pp = pprint.PrettyPrinter(indent=4)
active_crons = {}
testing = False
logger = logging.getLogger("go-to-bed")
logger.setLevel(logging.INFO)
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(message)s")
handler = TimedRotatingFileHandler(os.path.expanduser("~/.go-to-bed.log"),
                                   when="midnight",
                                   backupCount=20)
handler.setFormatter(formatter)
logger.addHandler(handler)
session_re = re.compile("(Session[0-9]+)\:")
var_val_re = re.compile("\t(.*)\s\=\s\'(.*)\'")
var_val_bool_re = re.compile("\t(.*)\s\=\s(TRUE|FALSE)")
ps_re = re.compile("(\d+)\s(.*?)\s+(.*)")

if "--test" in sys.argv:
    testing = True
    idx = sys.argv.index("--test")
    if len(sys.argv) > idx + 1:
        testing = sys.argv[idx + 1]
    logger.info("testing:%s", testing)

if "--url" in sys.argv:
    _url = ""
Example #33
0
# Setup logger
logging.basicConfig(
    encoding='utf-8',
    datefmt="%d-%m-%y %H:%M:%S",
    level=logging.INFO,
)
logger = logging.getLogger('moebot')
handler = TimedRotatingFileHandler(
    filename=f'moebot-{strftime("%d-%m-%y")}.log',
    when="D",
    interval=1,
    backupCount=5,
    encoding='utf-8',
    delay=False)
handler.setFormatter(fmt=Formatter(
    "%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)s] %(message)s"))
logger.addHandler(handler)


async def main() -> None:
    """Main loop of the entire bot"""
    logger.info("Starting main loop...")

    # Avoid rate limiting
    retries = 0

    # Get valid image
    image_regex = "https?://\S+?/\S+?\.(?:jpg|jpeg|gif|png)"

    while True:
        # Limit to only 5 retires
Example #34
0
from model import Model, ModelConfig
from feature_extrator import FeatureExtrator
from test_data_collator import TestDataCollator

app = Flask(__name__)
app.config['SECRET_KEY'] = 'dev'

if __name__ != '__main__':
    gunicorn_logger = logging.getLogger('gunicorn.error')
    logHandler = TimedRotatingFileHandler('logs/prod_pipeline.log',
                                          when='midnight',
                                          interval=1,
                                          backupCount=30)
    formatter = logging.Formatter(
        '[%(asctime)s] [%(process)d] [%(levelname)s] %(message)s')
    logHandler.setFormatter(formatter)
    gunicorn_logger.addHandler(logHandler)
    app.logger.handlers = gunicorn_logger.handlers
    app.logger.setLevel(gunicorn_logger.level)

bootstrap = Bootstrap(app)
moment = Moment(app)


def error_response(status_code, message=None):
    payload = {'error': HTTP_STATUS_CODES.get(status_code, 'Unknown error')}
    if message:
        payload['message'] = message
    response = jsonify(payload)
    response.status_code = status_code
    return response
Example #35
0
from logging.handlers import TimedRotatingFileHandler

from scihub_eva.utils.path_utils import *

DEFAULT_LOGGER = logging.getLogger('default')
DEFAULT_LOGGER.setLevel(logging.INFO)

DEFAULT_LOG_DIRECTORY = logs_dir()
DEFAULT_LOG_FILE = DEFAULT_LOG_DIRECTORY / 'SciHubEVA.log'
DEFAULT_LOG_HANDLER = TimedRotatingFileHandler(
    DEFAULT_LOG_FILE.resolve().as_posix(), when='d', encoding='utf-8')
DEFAULT_LOG_HANDLER.setLevel(logging.INFO)

DEFAULT_LOG_FORMATTER = logging.Formatter(
    '%(asctime)s - %(levelname)s - %(message)s')
DEFAULT_LOG_HANDLER.setFormatter(DEFAULT_LOG_FORMATTER)

DEFAULT_LOGGER.addHandler(DEFAULT_LOG_HANDLER)

LOGGER_SEP = '-' * 100


def format_log_message(message):
    return DEFAULT_LOG_FORMATTER.format(message)


__all__ = [
    'DEFAULT_LOG_DIRECTORY', 'DEFAULT_LOG_FILE', 'DEFAULT_LOG_HANDLER',
    'DEFAULT_LOG_FORMATTER', 'DEFAULT_LOGGER', 'LOGGER_SEP'
]
Example #36
0
# from logging.handlers import RotatingFileHandler
from logging.handlers import TimedRotatingFileHandler
import logging

loglevel = logging.INFO
logfile = "./autodial.log"

# logHandler = RotatingFileHandler(logfile, mode='a', maxBytes=50*1024*1024, backupCount=10, encoding=None, delay=0)
logHandler = TimedRotatingFileHandler(logfile,
                                      when='D',
                                      interval=1,
                                      backupCount=20)
#logFormatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-6s %(message)s')
logFormatter = logging.Formatter(
    '%(asctime)s %(filename)-12s[line:%(lineno)d] %(thread)d %(levelname)s %(message)s'
)

logHandler.setFormatter(logFormatter)
logger = logging.getLogger('')
logger.addHandler(logHandler)
logger.setLevel(loglevel)
Example #37
0
import json
import urllib
import commands
import urllib2
from datetime import datetime
from logging.handlers import TimedRotatingFileHandler

# log
LOG_FILE = "/home/opvis/opvis_agent/agent_service/log/update.log"
logger = logging.getLogger()
logger.setLevel(logging.INFO)
fh = TimedRotatingFileHandler(LOG_FILE, when='D', interval=1, backupCount=30)
datefmt = '%Y-%m-%d %H:%M:%S'
format_str = '%(asctime)s %(levelname)s %(message)s '
formatter = logging.Formatter(format_str, datefmt)
fh.setFormatter(formatter)
logger.addHandler(fh)


def monitor():
    with open("/home/opvis/opvis_agent/agent_service/agent.lock", "r") as fd:
        jifangip = fd.read()
    url_base = "http://" + jifangip
    # 获取authorized——keys的md5值
    get_md5_url = url_base + "/hostPlugInOperation/getHostTrustRelationshipMD5"
    print(get_md5_url)
    # 上报返回结果
    upload_status_url = url_base + "/hostPlugInOperation/uploadOperationMD5Information"
    print(upload_status_url)
    headers = {"Content-Type": "application/json"}
    data = urllib.urlopen(get_md5_url).read()
Example #38
0
def get_file_handler(log_file):
    file_handler = TimedRotatingFileHandler(log_file, when='midnight')
    file_handler.setFormatter(FORMATTER)
    return file_handler
Example #39
0
def get_file_handler(logger_name):
    file_handler = TimedRotatingFileHandler(logger_name, when="midnight", backupCount=7)  # Only keep 7 previous logs.
    file_handler.setFormatter(FORMATTER)
    return file_handler
from flask import Flask
import logging
from logging.handlers import TimedRotatingFileHandler

from flask import request

server_port = 5000
app = Flask(__name__)
formatter = logging.Formatter("%(asctime)s - %(levelname)-7s - %(message)s")
handler = TimedRotatingFileHandler('log/flask_server.log',
                                   when="midnight",
                                   interval=1,
                                   encoding='utf8')
handler.suffix = "%Y-%m-%d"
handler.setFormatter(formatter)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(handler)

mylogger = logging.getLogger('myapp')
my_formatter = logging.Formatter(
    "[%(asctime)s][%(name)s][%(process)d][%(thread)d][%(message)s][[in %(pathname)s:%(lineno)d]"
)
my_handler = TimedRotatingFileHandler("log/flask_app.log",
                                      when="D",
                                      interval=1,
                                      backupCount=15,
                                      encoding="UTF-8",
                                      delay=False,
                                      utc=True)
app.logger.addHandler(handler)
Example #41
0
    def create_logger(
            self,
            log_file_name='{0}/e2etest_running.log'.format(config.BASE_DIR),
            log_level=logging.DEBUG,
            log_date_format='%Y-%m-%d %H:%M:%S%z',
            log_formater='%(asctime)s %(filename)s:%(funcName)s %(levelname)s [line:%(lineno)d] %(message)s',
            max_log_files=3,
            one_day_one_file=True,
            max_log_file_size=10485760,
            log_to_standard_output=False):
        '''
        @summary: create the logger
        @param log_file_name: the log file name, should be absolute path. default value is /tmp/vamp/videocenter_running.log
                             if the value is None or "", print the log to standard output
        @param log_level: Integer of the log level. default value is logging.DEBUG
        @param max_log_files: the max number of files. It is valid when one_day_one_file equal False. default value is 3
        @param one_day_one_file: whether only create a file in one day. default value is True, one day one log file
        @param max_log_file_size: the max size of the log file. unit is byte. default value is 10 MB
        @param log_date_format: String of log date format. default value is '%Y-%m-%d %H:%M:%S%z', like 2017-06-01 11:44:06+0000
        @param log_to_standard_output: whether print logs into standard output, this argument will ignore log_file_name value
        @return: the logger
        '''
        # initialize log file
        if log_file_name:
            log_file_name = os.path.abspath(
                log_file_name)  # change path to absolute path
            if not os.path.exists(os.path.dirname(log_file_name)):
                os.makedirs(os.path.dirname(log_file_name))

        # write log into file or standard output
        if log_file_name and type(log_file_name) == type(
                '') and log_file_name != '':
            # write log to file
            logger = logging.getLogger(log_file_name)
            logger.setLevel(log_level)

            # write a new log file every day
            if one_day_one_file:
                Rthandler = TimedRotatingFileHandler(log_file_name,
                                                     when='D',
                                                     backupCount=max_log_files)
            else:
                Rthandler = RotatingFileHandler(log_file_name,
                                                maxBytes=max_log_file_size,
                                                backupCount=max_log_files)
            formatter = logging.Formatter(fmt=log_formater,
                                          datefmt=log_date_format)
            Rthandler.setFormatter(formatter)
            logger.addHandler(Rthandler)

            # write log to standard output synchronously
            if log_to_standard_output:
                console = logging.StreamHandler()
                console.setLevel(log_level)
                console.setFormatter(formatter)
                logger.addHandler(console)

        # write log to standard output default
        else:
            logging.basicConfig(level=log_level,
                                format=log_formater,
                                datefmt=log_date_format)
            logger = logging

        return logger
Example #42
0
def main():
    global cal, debug_mode, display_meeting_summary, particle, use_remote_notify

    # Logging
    # Setup the basic console logger
    format_str = '%(asctime)s %(levelname)s %(message)s'
    date_format = '%Y-%m-%d %H:%M:%S'
    logging.basicConfig(format=format_str, level=logging.INFO, datefmt=date_format)
    logger = logging.getLogger()
    # Add a file handler as well; roll at midnight and keep 7 copies
    file_handler = TimedRotatingFileHandler("remind_log", when="midnight", backupCount=6)
    log_formatter = logging.Formatter(format_str, datefmt=date_format)
    file_handler.setFormatter(log_formatter)
    # file log always gets debug; console log level set in the config
    file_handler.setLevel(logging.DEBUG)
    logger.addHandler(file_handler)


    # tell the user what we're doing...
    print('\n')
    print(HASHES)
    print(HASH, 'Pi Remind HD Notify                      ', HASH)
    print(HASH, 'By John M. Wargo (https://johnwargo.com) ', HASH)
    print(HASHES)
    print('From: ' + PROJECT_URL + '\n')

    settings = Settings.get_instance()
    settings.validate_config_options()

    debug_mode = settings.get_debug_mode()
    if debug_mode:
        logging.info('Remind: Enabling debug mode')
        logger.setLevel(logging.DEBUG)

    display_meeting_summary = settings.get_display_meeting_summary()

    use_remote_notify = settings.get_use_remote_notify()
    if use_remote_notify:
        logging.info('Remind: Remote Notify Enabled')
        access_token = settings.get_access_token()
        device_id = settings.get_device_id()
        # Check to see if the string values we need are populated
        if len(access_token) < 1 or len(device_id) < 1:
            logging.error('One or more values are missing from the project configuration file')
            logging.error(CONFIG_ERROR_STR)
            sys.exit(0)
        logging.debug('Remind: Creating Particle object')
        particle = ParticleCloud(access_token, device_id)

        logging.info('Remind: Resetting Remote Notify status')
        particle.set_status(Status.FREE.value)
        time.sleep(1)
        particle.set_status(Status.OFF.value)

    # is the reboot counter in play?
    use_reboot_counter = settings.get_use_reboot_counter()
    if use_reboot_counter:
        # then get the reboot counter limit
        reboot_counter_limit = settings.get_reboot_counter_limit()
        # and tell the user the feature is enabled
        logging.info('Remind: Reboot enabled ({} retries)'.format(reboot_counter_limit))

    logging.info('Remind: Initializing Google Calendar interface')
    try:
        cal = GoogleCalendar()
        # Set the timeout for the rest of the Google API calls.
        # need this at its default during the registration process.
        socket.setdefaulttimeout(5)  # seconds
    except Exception as e:
        logging.error('Remind: Unable to initialize Google Calendar API')
        logging.error('Exception type: {}'.format(type(e)))
        logging.error('Error: {}'.format(sys.exc_info()[0]))
        unicorn.set_all(unicorn.FAILURE_COLOR)
        time.sleep(5)
        unicorn.off()
        sys.exit(0)

    logging.info('Remind: Application initialized')

    # flash some random LEDs just for fun...
    unicorn.flash_random(5, 0.5)
    # blink all the LEDs GREEN to let the user know the hardware is working
    unicorn.flash_all(3, 0.10, unicorn.GREEN)
    # get to work
    processing_loop()
    if os.environ["USE_LOGSTASH"] == "true":
        logger.info("Adding logstash appender")
        lshandler = AsynchronousLogstashHandler(
            "logstash", 5001, database_path='logstash_test.db')
        lshandler.setLevel(logging.ERROR)
        logger.addHandler(lshandler)

    handler = TimedRotatingFileHandler("logs/" + MODULE + ".log",
                                       when="d",
                                       interval=1,
                                       backupCount=30)

    logFormatter = logging.Formatter(
        '%(asctime)s.%(msecs)03d %(levelname)s %(module)s - %(funcName)s: %(message)s'
    )
    handler.setFormatter(logFormatter)
    logger.addHandler(handler)

    logger.info("==============================")
    logger.info("Starting: %s" % MODULE)
    logger.info("Module:   %s" % (VERSION))
    logger.info("==============================")

    #>> AMQC
    server = {
        "ip": os.environ["AMQC_URL"],
        "port": os.environ["AMQC_PORT"],
        "login": os.environ["AMQC_LOGIN"],
        "password": os.environ["AMQC_PASSWORD"]
    }
    logger.info(server)
Example #44
0
def get_file_handler():
    """Sets up file logging handler."""
    file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
    file_handler.setFormatter(FORMATTER)
    return file_handler
Example #45
0
app.config['LOG_FILE_FOLDER_DIR'] = LOG_FILE_FOLDER_DIR
# 配置日志
logFormatter = logging.Formatter(
    "[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s][%(threadName)s] - %(message)s"
)
rootLogger = logging.getLogger('werkzeug')
rootLogger.handlers.clear()

# time rotating handler
rotatingHandler = TimedRotatingFileHandler(Path(base_dir, 'logs',
                                                'takehome.log'),
                                           when="D",
                                           interval=1,
                                           backupCount=30,
                                           encoding='utf-8')
rotatingHandler.setFormatter(logFormatter)
rotatingHandler.setLevel(logging.DEBUG)
logging.getLogger().handlers.clear()
logging.getLogger().addHandler(rotatingHandler)
# stream handler
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
consoleHandler.setLevel(logging.INFO)
rootLogger.addHandler(consoleHandler)

# ORM文件
DB_MODEL_FILE = Path(base_dir, 'backend', 'sqllib', 'models',
                     'backend/sqllib/models/db_model.py')


def allowed_file(filename):
Example #46
0
if not os.path.exists(LOG_HOME):
    os.makedirs(LOG_HOME)

# logging.basicConfig(level=logging.INFO,
#                     format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
#                     datefmt='%a, %d %b %Y %H:%M:%S',
#                     filename='{0}/main.log'.format(LOG_HOME),
#                     filemode='w')

logging.basicConfig(level=logging.INFO, filemode='w')

Rthandler = TimedRotatingFileHandler('{0}/main.log'.format(LOG_HOME), 'D', 1, 0, encoding='utf8')
Rthandler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s [line:%(lineno)d] %(levelname)s %(message)s')
Rthandler.setFormatter(formatter)
logging.getLogger('').addHandler(Rthandler)

TEXT_LENGTH = 1


def download(url):
    headers = {'User-agent':
                   'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
               # 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_1 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13B143 Safari/601.1'
               }
    page = None
    err_info = None
    try:
        req = request.Request(url, headers=headers)
        response = request.urlopen(req, timeout=10)
root_logger = logging.getLogger()
mail_buffer = StringIO()

if not root_logger.handlers:
    log_format = logging.Formatter('%(asctime)s %(levelname)s:  %(message)s')
    root_logger.setLevel(logging.INFO)

    log_dir = os.path.abspath('logs')
    if os.path.exists(log_dir):
        if not os.path.isdir(log_dir):
            raise(Exception("Logging directory {d} exists but as a file.".format(d=log_dir)))
    else:
        os.makedirs(log_dir)

    file_handler = TimedRotatingFileHandler('logs/safeway_coupon.log', when="midnight", interval=1, backupCount=30)
    file_handler.setFormatter(log_format)
    file_handler.setLevel(logging.INFO)
    root_logger.addHandler(file_handler)

    console_handler = logging.StreamHandler()
    console_handler.setFormatter(log_format)
    root_logger.addHandler(console_handler)

    mail_handler = logging.StreamHandler(mail_buffer)
    mail_formatter = logging.Formatter('%(message)s')
    mail_handler.setFormatter(mail_formatter)
    mail_handler.setLevel(logging.INFO)
    root_logger.addHandler(mail_handler)


sleep_multiplier = 1.0
Example #48
0
import pymongo
import time, os, sys, logging
from logging.handlers import TimedRotatingFileHandler
sys.path.append('../config')
import read_config


log_file_path = '../log/mongo_utils.log'
logger = logging.getLogger('mongo_utils.py')
ch = logging.StreamHandler()
th = TimedRotatingFileHandler(log_file_path, when="MIDNIGHT", interval=1, backupCount=7)
formatter = logging.Formatter('%(name)s :%(lineno)d ------ %(asctime)s------ %(levelname)s ------ %(message)s',
                              '%a, %d %b %Y %H:%M:%S', )
ch.setFormatter(formatter)
th.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(th)
logger.setLevel(logging.INFO)

# redis集群
class MongoCluster:
    def connect(self):
        return pymongo.MongoClient('mongodb://%s' % read_config.mongo_prop)


if __name__ == "__main__":
    client = MongoCluster().connect()
    db = client.get_database('gse-transaction')
    collection = db.get_collection('tokens')
    for result in collection.find({"contractAddress":"0xef1878ace027089520e8825bbdd16ad0048e3288"}):
Example #49
0
import logging
import os
from logging.handlers import TimedRotatingFileHandler

# start logging to the file with log rotation at midnight
logformatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s')
loghandler = TimedRotatingFileHandler(os.path.dirname(os.path.realpath(__file__)) + '/../vigibot.log',
                                   when='midnight',
                                   backupCount=10)
loghandler.setFormatter(logformatter)
Example #50
0
import sys
import logging
from chronos.lib.config_parser import cfg
from logging.handlers import TimedRotatingFileHandler

logging.getLogger("socketIO-client").setLevel(logging.ERROR)
logging.getLogger("requests").setLevel(logging.ERROR)
logging.getLogger("pymodbus").setLevel(logging.ERROR)
log_formatter = logging.Formatter("%(asctime)s %(levelname)s:%(message)s",
                                  "%Y-%m-%d %H:%M:%S")
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(log_formatter)
rotate_logs_handler = TimedRotatingFileHandler(cfg.files.log_path,
                                               when="midnight",
                                               backupCount=3)
rotate_logs_handler.setFormatter(log_formatter)
root_logger.addHandler(console_handler)
root_logger.addHandler(rotate_logs_handler)
Example #51
0
def configure_file_logging():
    if LOG_FILE:
        file_handler = TimedRotatingFileHandler(LOG_FILE, when="midnight")
        file_handler.setFormatter(formatter)
        file_handler.setLevel(logging.DEBUG)
        logger.addHandler(file_handler)
Example #52
0
def get_file_handler():
    file_handler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
    file_handler.setFormatter(FORMATTER)
    file_handler.setLevel(logging.WARNING)
    return file_handler
Example #53
0
def create_app(config_name='default', jobs_enabled=True):
    """
    Set up the Flask Application context.

    :param config_name: Configuration for specific application context.

    :return: Flask application
    """

    app = Flask(__name__)
    app.config.from_object(config[config_name])
    config[config_name].init_app(app)

    # TODO: handler_info, handler_debug, handler_warn
    mail_handler = SMTPHandler(mailhost=(app.config['MAIL_SERVER'],
                                         app.config['MAIL_PORT']),
                               fromaddr=app.config['MAIL_SENDER'],
                               toaddrs=OPENRECORDS_DL_EMAIL,
                               subject='OpenRecords Error')
    mail_handler.setLevel(logging.ERROR)
    mail_handler.setFormatter(
        Formatter('''
    Message Type:       %(levelname)s
    Location:           %(pathname)s:%(lineno)d
    Module:             %(module)s
    Function:           %(funcName)s
    Time:               %(asctime)s
    
    Message:
    %(message)s
    '''))
    app.logger.addHandler(mail_handler)

    handler_error = TimedRotatingFileHandler(os.path.join(
        app.config['LOGFILE_DIRECTORY'],
        'openrecords_{}_error.log'.format(app.config['APP_VERSION_STRING'])),
                                             when='midnight',
                                             interval=1,
                                             backupCount=60)
    handler_error.setLevel(logging.ERROR)
    handler_error.setFormatter(
        Formatter(
            '------------------------------------------------------------------------------- \n'
            '%(asctime)s %(levelname)s: %(message)s '
            '[in %(pathname)s:%(lineno)d]\n'))
    app.logger.addHandler(handler_error)

    app.jinja_env.filters[
        'format_event_type'] = jinja_filters.format_event_type
    app.jinja_env.filters[
        'format_response_type'] = jinja_filters.format_response_type
    app.jinja_env.filters[
        'format_response_privacy'] = jinja_filters.format_response_privacy
    app.jinja_env.filters[
        'format_ultimate_determination_reason'] = jinja_filters.format_ultimate_determination_reason

    recaptcha.init_app(app)
    bootstrap.init_app(app)
    es.init_app(app,
                use_ssl=app.config['ELASTICSEARCH_USE_SSL'],
                verify_certs=app.config['ELASTICSEARCH_VERIFY_CERTS'])
    db.init_app(app)
    csrf.init_app(app)
    moment.init_app(app)
    login_manager.init_app(app)
    mail.init_app(app)
    celery.conf.update(app.config)
    sentry.init_app(app, logging=app.config["USE_SENTRY"], level=logging.INFO)

    if jobs_enabled:
        scheduler.init_app(app)

    with app.app_context():
        from app.models import Anonymous
        login_manager.login_view = 'auth.login'
        login_manager.anonymous_user = Anonymous
        KVSessionExtension(session_redis, app)

    # schedule jobs
    if jobs_enabled:
        # NOTE: if running with reloader, jobs will execute twice
        import jobs
        scheduler.add_job(
            'update_request_statuses',
            jobs.update_request_statuses,
            name="Update requests statuses every day at 3 AM.",
            trigger=CronTrigger(hour=3),
        )
        scheduler.add_job(
            'check_sanity',
            jobs.check_sanity,
            name="Check if scheduler is running every morning at 8 AM.",
            # trigger=IntervalTrigger(minutes=1)  # TODO: switch to cron below after testing
            trigger=CronTrigger(hour=8))

        scheduler.start()

    # Error Handlers
    @app.errorhandler(400)
    def bad_request(e):
        return render_template("error/generic.html",
                               status_code=400,
                               message=e.description or None)

    @app.errorhandler(403)
    def forbidden(e):
        return render_template("error/generic.html", status_code=403)

    @app.errorhandler(404)
    def page_not_found(e):
        return render_template("error/generic.html", status_code=404)

    @app.errorhandler(500)
    def internal_server_error(e):
        error_id = str(uuid.uuid4())
        app.logger.error("""Request:   {method} {path}
    IP:        {ip}
    User:      {user}
    Agent:     {agent_platform} | {agent_browser} {agent_browser_version}
    Raw Agent: {agent}
    Error ID:  {error_id}
            """.format(method=flask_request.method,
                       path=flask_request.path,
                       ip=flask_request.remote_addr,
                       agent_platform=flask_request.user_agent.platform,
                       agent_browser=flask_request.user_agent.browser,
                       agent_browser_version=flask_request.user_agent.version,
                       agent=flask_request.user_agent.string,
                       user=current_user,
                       error_id=error_id),
                         exc_info=e)
        return render_template("error/generic.html",
                               status_code=500,
                               error_id=error_id)

    @app.errorhandler(503)
    def maintenance(e):
        with open(os.path.join(app.instance_path, 'maintenance.json')) as f:
            maintenance_info = json.load(f)
        return render_template('error/maintenance.html',
                               description=maintenance_info['description'],
                               outage_time=maintenance_info['outage_time'])

    @app.before_request
    def check_maintenance_mode():
        if os.path.exists(os.path.join(app.instance_path, 'maintenance.json')):
            if not flask_request.cookies.get('authorized_maintainer', None):
                return abort(503)

    @app.context_processor
    def add_session_config():
        """Add current_app.permanent_session_lifetime converted to milliseconds
        to context. The config variable PERMANENT_SESSION_LIFETIME is not
        used because it could be either a timedelta object or an integer
        representing seconds.
        """
        return {
            'PERMANENT_SESSION_LIFETIME_MS':
            (app.permanent_session_lifetime.seconds * 1000),
        }

    @app.context_processor
    def add_debug():
        """Add current_app.debug to context."""
        return dict(debug=app.debug)

    # Register Blueprints
    from .main import main
    app.register_blueprint(main)

    from .auth import auth
    app.register_blueprint(auth, url_prefix="/auth")

    from .request import request
    app.register_blueprint(request, url_prefix="/request")

    from .request.api import request_api_blueprint
    app.register_blueprint(request_api_blueprint,
                           url_prefix="/request/api/v1.0")

    from .report import report
    app.register_blueprint(report, url_prefix="/report")

    from .response import response
    app.register_blueprint(response, url_prefix="/response")

    from .upload import upload
    app.register_blueprint(upload, url_prefix="/upload")

    from .user import user
    app.register_blueprint(user, url_prefix="/user")

    from .agency import agency
    app.register_blueprint(agency, url_prefix="/agency")

    from .agency.api import agency_api_blueprint
    app.register_blueprint(agency_api_blueprint, url_prefix="/agency/api/v1.0")

    from .search import search
    app.register_blueprint(search, url_prefix="/search")

    from .admin import admin
    app.register_blueprint(admin, url_prefix="/admin")

    from .user_request import user_request
    app.register_blueprint(user_request, url_prefix="/user_request")

    from .permissions import permissions
    app.register_blueprint(permissions, url_prefix="/permissions/api/v1.0")

    # exit handling
    if jobs_enabled:
        atexit.register(lambda: scheduler.shutdown())

    return app
Example #54
0
import time
import os
import re


# In[ ]:

# 日期滚动日志
log_format = '%(asctime)s--%(filename)s[Line:%(lineno)d]--%(levelname)s--%(message)s'
log_datefmt = '%Y-%m-%d, %A, %H:%M:%S'
formatter = logging.Formatter(log_format, log_datefmt)
logging.basicConfig(level = logging.INFO)
logger = logging.getLogger()

log_handler = TimedRotatingFileHandler(filename = "/tmp/auto_connect_log", when = 'd', interval = 1, backupCount = 3)
log_handler.setFormatter(formatter)
log_handler.suffix = "%Y-%m-%d.log"
log_handler.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}.log$")

logger.addHandler(log_handler)


# In[ ]:

URL = "http://210.77.16.21"
REMOTE_SERVER = "119.75.217.109"
USERNAME = "******"
PASSWORD = "******"


# In[ ]:
Example #55
0
class LogEngine(object, metaclass=stSingleton):
    # 日志级别
    LEVEL_DEBUG = logging.DEBUG
    LEVEL_INFO = logging.INFO
    LEVEL_WARN = logging.WARN
    LEVEL_ERROR = logging.ERROR
    LEVEL_CRITICAL = logging.CRITICAL

    # ----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.logger = logging.getLogger()
        # self.formatter = logging.Formatter('%(asctime)s  %(levelname)s: %(message)s')
        self.formatter = logging.Formatter(
            "%(levelname)s  [%(asctime)s.%(msecs)d][%(filename)s:%(lineno)d][%(process)d:%(threadName)s] %(message)s"
        )
        self.level = self.LEVEL_CRITICAL

        self.consoleHandler = None
        self.fileHandler = None
        self.timedRotatingFileHandler = None

        # 添加NullHandler防止无handler的错误输出
        nullHandler = logging.NullHandler()
        self.logger.addHandler(nullHandler)

        # 日志级别函数映射
        self.levelFunctionDict = {
            self.LEVEL_DEBUG: self.debug,
            self.LEVEL_INFO: self.info,
            self.LEVEL_WARN: self.warn,
            self.LEVEL_ERROR: self.error,
            self.LEVEL_CRITICAL: self.critical,
        }

    # ----------------------------------------------------------------------
    def setLogLevel(self, level):
        """设置日志级别"""
        self.logger.setLevel(level)
        self.level = level

    # ----------------------------------------------------------------------
    def addConsoleHandler(self):
        """添加终端输出"""
        if not self.consoleHandler:
            self.consoleHandler = logging.StreamHandler()
            self.consoleHandler.setLevel(self.level)
            self.consoleHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.consoleHandler)

    # ----------------------------------------------------------------------
    def addFileHandler(self, filename=''):
        """添加文件输出"""
        if not self.fileHandler:
            if not filename:
                filename = 'st_' + datetime.now().strftime('%Y%m%d') + '.log'
            filepath = getTempPath(filename)
            self.fileHandler = logging.FileHandler(filepath)
            self.fileHandler.setLevel(self.level)
            self.fileHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.fileHandler)

    # ----------------------------------------------------------------------
    def addTimedRotatingFileHandler(self,
                                    filename='',
                                    when='H',
                                    interval=1,
                                    backupCount=0):
        """添加文件输出"""
        if not self.fileHandler:
            if not filename:
                filename = 'stlog'
            filepath = getTempPath(filename)
            self.timedRotatingFileHandler = TimedRotatingFileHandler(
                filename=filepath,
                when=when,
                interval=interval,
                backupCount=backupCount)
            self.timedRotatingFileHandler.setLevel(self.level)
            self.timedRotatingFileHandler.setFormatter(self.formatter)
            self.logger.addHandler(self.timedRotatingFileHandler)

    # ----------------------------------------------------------------------
    def debug(self, msg):
        """开发时用"""
        self.logger.debug(msg)

    # ----------------------------------------------------------------------
    def info(self, msg):
        """正常输出"""
        self.logger.info(msg)

    # ----------------------------------------------------------------------
    def warn(self, msg):
        """警告信息"""
        self.logger.warn(msg)

    # ----------------------------------------------------------------------
    def error(self, msg):
        """报错输出"""
        self.logger.error(msg)

    # ----------------------------------------------------------------------
    def exception(self, msg):
        """报错输出+记录异常信息"""
        self.logger.exception(msg)

    # ----------------------------------------------------------------------
    def critical(self, msg):
        """影响程序运行的严重错误"""
        self.logger.critical(msg)

    # ----------------------------------------------------------------------
    def processLogEvent(self, event):
        """处理日志事件"""
        log = event.dict_['data']
        function = self.levelFunctionDict[log.logLevel]  # 获取日志级别对应的处理函数
        msg = '\t'.join([log.gatewayName, log.logContent])
        function(msg)
Example #56
0
from logging.handlers import TimedRotatingFileHandler
import logging
import sys
import os

if not os.path.exists('logs'):
    os.mkdir('logs')

if not os.path.exists('logs/developer_entry_task.log'):
    open("logs/developer_entry_task.log", "w+").close()

formater = logging.Formatter(
    '[%(levelname)s] - %(name)s - %(asctime)s - %(message)s')
file_handler = TimedRotatingFileHandler('logs/developer_entry_task.log',
                                        when='midnight',
                                        backupCount=20)
file_handler.setFormatter(formater)
file_handler.suffix = "%Y-%m-%d"


def get_logger(name):
    log = logging.getLogger(name)
    if '--debug' not in sys.argv:
        log.setLevel(logging.INFO)
    else:
        log.setLevel(logging.DEBUG)
    log.addHandler(file_handler)
    return log
Example #57
0
import Image
import ImageDraw
import ImageFont

import sys
import logging
from logging.handlers import TimedRotatingFileHandler
import Adafruit_DHT

import dbconnect

LOG_FILE = 'monitor.log'

logFormat = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fileHandler = TimedRotatingFileHandler(LOG_FILE, when='midnight')
fileHandler.setFormatter(logFormat)
consoleHandler = logging.StreamHandler(sys.stdout)
consoleHandler.setFormatter(logFormat)

log = logging.getLogger('monitor')
log.setLevel(logging.DEBUG)
log.addHandler(fileHandler)
log.addHandler(consoleHandler)

log.debug('Starting up')


def main():

    epd = epd2in9.EPD()
    epd.init(epd.lut_full_update)
Example #58
0
def main():
    # TODO:  delete this default dict after testing fallback.
    # default_confs = {
    #    "filter_query" : "status eq Active or status eq 'Pending - Renewal'",
    #    "csv_backup_filename_root" : "erras_backup_members_",
    #    "csv_filename_temp" : "erras_members_new.csv",
    #    "csv_filename" : "erras_members.csv",
    #    "apricot_response_root" : "wild_apricot_response_",
    #    "members_log_filename" : "erras_members.log",
    #    "keypad_field_names" : "Keypad",
    #    "rfid_field_names" : "RFID",
    #    "loop_delay" : 500,
    #    "csv_prune_max" : 5,
    #    "json_prune_max" : 10
    # }
    # parser = ConfigParser(default_confs)
    # TODO:  delete the above default dict after testing fallback.
    
    parser = ConfigParser()
    config_file_name = 'erras.ini'
    section_name = "erras"
    # This constructs a file path to the config_file_name in the same directory as the script file.
    config_path = str(pathlib.Path(__file__).with_name(config_file_name))
    with open(config_path) as config_file:
        parser.read_file(config_file)
    
    wa_api_client_id = parser.get(section_name, "wa_api_client_id")
    wa_api_client_secret = parser.get(section_name, "wa_api_client_secret")
    credential_name = parser.get(section_name, "credential_name")
    credential_key = parser.get(section_name, "credential_key")
    api_key = parser.get(section_name, "api_key")
    filter_query = parser.get(section_name, "filter_query", fallback="status eq Active or status eq 'Pending - Renewal'")
    request_url_root = parser.get(section_name, "request_url_root")
    csv_backup_filename_root = parser.get(section_name, "csv_backup_filename_root", fallback="erras_backup_members_")
    csv_filename_temp = parser.get(section_name, "csv_filename_temp", fallback="erras_members_new.csv")
    csv_filename = parser.get(section_name, "csv_filename", fallback="erras_members.csv")
    apricot_response_root = parser.get(section_name, "apricot_response_root", fallback="wild_apricot_response_")
    loop_delay = parser.getint(section_name, "loop_delay", fallback=500)
    csv_prune_max = parser.getint(section_name, "csv_prune_max", fallback=5)
    json_prune_max = parser.getint(section_name, "json_prune_max", fallback=5)
    log_filename = parser.get(section_name, "members_log_filename", fallback="erras_members.log")
    keypad_field_names_string = parser.get(section_name, "keypad_field_names", fallback="Keypad")
    rfid_field_names_string = parser.get(section_name, "rfid_field_names", fallback="RFID")
    # Split up the key_fields_string into a list.
    # TODO: look into this later for split with escape
    # https://stackoverflow.com/questions/18092354/python-split-string-without-splitting-escaped-character
    keypad_field_names = keypad_field_names_string.split(",")
    rfid_field_names = rfid_field_names_string.split(",")
    
    # set up logger
    logger_name = "erras_members"
    logger_format = '%(asctime)s %(levelname)s %(message)s'
    log = logging.getLogger(logger_name)
    log.setLevel(logging.DEBUG)
    formatter = logging.Formatter(logger_format)
    
    # https://docs.python.org/2/library/logging.handlers.html
    # how often the log file is rotated is interval * when
    # when = S/M/H/D/W0-W6/midnight
    # so when='S', interval=500 means every 500 seconds.
    handler = TimedRotatingFileHandler(log_filename, when='D', interval=1, backupCount=20)
    handler.setFormatter(formatter)
    # handler.setLevel(logging.INFO)
    handler.setLevel(logging.DEBUG)
    
    log.addHandler(handler)
    # log.addHandler(JournalHandler())
    
    # Log the field names
    for field in keypad_field_names:
        log.debug("keypad field names: %s" % field)
    for field in rfid_field_names:
        log.debug("RFID field names: %s" % field)
    
    errasfiles = ErrasFiles(keypad_field_names, rfid_field_names, log)
    
    api = WaApiClient(wa_api_client_id, wa_api_client_secret, log, debug=True)
    api.authenticate_with_contact_credentials(credential_name, credential_key)
    
    log.info("Starting request loop.")
    while(True):
        log.info("########################### requesting member data ############################")
        params = { '$filter': filter_query,
                   '$async': 'false' }
    
        request_url = request_url_root + '?' + urllib.parse.urlencode(params)
        log.debug("Request url is: %s" % request_url)
    
        contacts = api.execute_request(request_url)
        # each contact is an ApiObject instance
        contact_list = contacts.Contacts
        log.info("There are %d contacts in results." % len(contact_list))
        
        # Save the newly downloaded member data in a backup file
        csv_backup_filename = csv_backup_filename_root + errasfiles.get_timestamp() + ".csv"
        errasfiles.print_contacts_csv(contact_list, csv_backup_filename)
        # And in a temp file.
        errasfiles.print_contacts_csv(contact_list, csv_filename_temp)
    
        # Note, do not use across filesystem boundaries.  File rename is
        # only atomic on unix if both new and old are on the same
        # filesystem.
        os.rename(csv_filename_temp, csv_filename)
        log.info("Member data saved in filename %s" % csv_filename)
        directory = os.getcwd()
        errasfiles.prune(directory, csv_backup_filename_root, ".csv", csv_prune_max)
        errasfiles.prune(directory, apricot_response_root, ".json", json_prune_max)
    
        log.info("Sleeping for %d seconds." % loop_delay)
        time.sleep(loop_delay)
Example #59
0
@app.errorhandler(500)
def internal_error(error):
    return render_template('error/500.html'), 500


# ###################
# ## Enable ErrorLog
# ###################
if not app.debug:
    import logging
    # we use log file to log errors here
    # you can also implement mail notification
    # see logging.handlers.SMTPHandler
    from logging.handlers import TimedRotatingFileHandler
    log_file = '{dir}/{file}'.format(dir=app.basedir,
                                     file=app.config['ERROR_LOG'])
    if not os.path.exists(os.path.split(log_file)[0]):
        os.makedirs(os.path.split(log_file)[0])
    file_handler = TimedRotatingFileHandler(log_file,
                                            when='midnight',
                                            interval=1,
                                            backupCount=0)
    file_handler.setFormatter(
        logging.Formatter(
            '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
        ))
    app.logger.setLevel(logging.INFO)
    file_handler.setLevel(logging.INFO)
    app.logger.addHandler(file_handler)
    app.logger.info('Project start up...')
Example #60
-1
def configure_logging(app):
    subject = '[Error] %s encountered errors on %s' % (app.config['DOMAIN'], datetime.now().strftime('%Y/%m/%d'))
    subject += (' [DEV]' if app.debug else '')
    mail_config = [(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                   app.config['MAIL_DEFAULT_SENDER'], app.config['ADMINS'],
                   subject,
                   (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])]
    if app.config['MAIL_USE_SSL']:
        mail_handler = SSLSMTPHandler(*mail_config)
    else:
        mail_handler = SMTPHandler(*mail_config)

    mail_handler.setLevel(logging.ERROR)
    app.logger.addHandler(mail_handler)

    formatter = logging.Formatter(
        '%(asctime)s %(process)d-%(thread)d %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')

    debug_log = os.path.join(app.root_path, app.config['DEBUG_LOG'])
    debug_file_handler = TimedRotatingFileHandler(debug_log, when='midnight', interval=1, backupCount=90)
    debug_file_handler.setLevel(logging.DEBUG)
    debug_file_handler.setFormatter(formatter)
    app.logger.addHandler(debug_file_handler)

    error_log = os.path.join(app.root_path, app.config['ERROR_LOG'])
    error_file_handler = TimedRotatingFileHandler(error_log, when='midnight', interval=1, backupCount=90)
    error_file_handler.setLevel(logging.ERROR)
    error_file_handler.setFormatter(formatter)
    app.logger.addHandler(error_file_handler)

    # Flask运行在产品模式时, 只会输出ERROR, 此处使之输入INFO
    if not app.config['DEBUG']:
        app.logger.setLevel(logging.INFO)