Beispiel #1
0
class ElasticSearchBackend(AbstractBackend):
    """
  ElasticsearchBackend is used to create an abstraction of the handler and the formatter concepts from logging.
  Here, we have a CMRESHandler which is part of an external library named 'cmreslogging' based on 'logging'.
  CMRESHandler is a specific handler created to send log records to an ElasticSearch DB. It does not need a Formatter
  object.
  """
    def __init__(self):
        """
    CMRESHandler needs, at least, a hostname, a username, a password, a port and a specific index
    from the ElasticSearch DB to send log records.
    """
        super(ElasticSearchBackend, self).__init__(None, None)
        self.__host = ''
        self.__user = None
        self.__passwd = None
        self.__port = 9203
        self.__index = ''
        self.__bufferSize = 1000
        self.__flushTime = 1

    def createHandler(self, parameters=None):
        """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
        if parameters is not None:
            self.__host = parameters.get('Host', self.__host)
            self.__user = parameters.get('User', self.__user)
            self.__passwd = parameters.get('Password', self.__passwd)
            self.__port = int(parameters.get('Port', self.__port))
            self.__index = parameters.get('Index', self.__index)
            self.__bufferSize = int(
                parameters.get('BufferSize', self.__bufferSize))
            self.__flushTime = int(
                parameters.get('FlushTime', self.__flushTime))

        if self.__user is not None and self.__passwd is not None:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                auth_details=(self.__user, self.__passwd),
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        else:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        # We give a format containing only asctime to add the field in elasticsearch
        # asctime is not created at the initialization of the LogRecords but built in the format process
        self._handler.setFormatter(logging.Formatter('%(asctime)s'))

    def setLevel(self, level):
        """
    No possibility to set the level of the ElasticSearch handler.
    It is not set by default so it can send all Log Records of all levels to ElasticSearch.
    """
        pass

    def setFormat(self, fmt, datefmt, options):
        """
    Each backend give a format to their formatters and attach them to their handlers.

    :params fmt: string representing the log format
    :params datefmt: string representing the date format
    :params component: string represented as "system/component"
    :params options: dictionary of logging options. ex: {'Color': True}
    """
        pass
Beispiel #2
0
def get_logger(name=None):
    """
    get logger by name
    :param name: name of logger
    :return: logger
    """

    # 开启只保留7日内Log,不需要可以注释掉
    rmNonRecentLog()

    global loggers

    if not name: name = __name__

    if loggers.get(name):
        return loggers.get(name)

    logger = logging.getLogger(name)
    logger.setLevel(LOG_LEVEL)

    # 输出到控制台
    if LOG_ENABLED and LOG_TO_CONSOLE:
        stream_handler = logging.StreamHandler(sys.stdout)
        stream_handler.setLevel(level=LOG_LEVEL)
        formatter = logging.Formatter(LOG_FORMAT)
        stream_handler.setFormatter(formatter)
        logger.addHandler(stream_handler)

    # 输出到文件
    if LOG_ENABLED and LOG_TO_FILE:
        # 如果路径不存在,创建日志文件文件夹
        log_dir = dirname(LOG_PATH)
        if not exists(log_dir): makedirs(log_dir)
        # 添加 FileHandler
        file_handler = logging.FileHandler(LOG_PATH, encoding='utf-8')
        file_handler.setLevel(level=LOG_LEVEL)
        formatter = logging.Formatter(LOG_FORMAT)
        file_handler.setFormatter(formatter)
        logger.addHandler(file_handler)

    # 输出到 Elasticsearch
    if LOG_ENABLED and LOG_TO_ES:
        # 添加 CMRESHandler
        es_handler = CMRESHandler(
            hosts=[{
                'host': ELASTIC_SEARCH_HOST,
                'port': ELASTIC_SEARCH_PORT
            }],
            # 可以配置对应的认证权限
            auth_type=CMRESHandler.AuthType.NO_AUTH,
            es_index_name=ELASTIC_SEARCH_INDEX,
            # 一个月分一个 Index
            index_name_frequency=CMRESHandler.IndexNameFrequency.MONTHLY,
            # 额外增加环境标识
            es_additional_fields={'environment': APP_ENVIRONMENT})
        es_handler.setLevel(level=LOG_LEVEL)
        formatter = logging.Formatter(LOG_FORMAT)
        es_handler.setFormatter(formatter)
        logger.addHandler(es_handler)

    # 保存到全局 loggers
    loggers[name] = logger
    return logger
Beispiel #3
0
    async def InitServer(cls,
                         servertype="unkown-servertype",
                         startmysql=True,
                         startpg=True):

        # 创建日志目录
        if not os.path.exists('logs'):
            os.makedirs('logs')
        logging.basicConfig(level=logging.DEBUG)  # 默认屏幕输出
        filehandler = TimedRotatingFileHandler(  # 时间轮转输出
            filename="logs/my.log",
            when='D',
            interval=1,
            backupCount=0)
        filehandler.suffix = "%Y%m%d-%H%M%S.log"
        formatter = logging.Formatter(
            "%(asctime)s-%(name)s-%(levelname)s-[%(filename)s:%(lineno)d]-%(message)s"
        )
        filehandler.setFormatter(formatter)
        filehandler.setLevel(logging.INFO)

        logging.getLogger().addHandler(filehandler)  # 添加时间轮转输出

        # 服务器名字   数据文件路径
        cls.myservertype = servertype
        cls.mydatapath = datafilepath()
        # 生成一个新的 guid 可能不会使用,如果有的话
        newguid = cls.myservertype + "_" + str(uuid.uuid1())

        await cls.loadconfig()

        if (cls.elkdatabase == ""):
            cls.elkdatabase = cls.myservertype

        if (cls.myserverguid == None):
            cls.myserverguid = newguid
            # 没有配置文件就保存
            await cls.saveconfig()

        # ELK的日志输出
        es_handler = CMRESHandler(hosts=[{
            'host': cls.elkhost,
            'port': cls.elkport
        }],
                                  auth_type=CMRESHandler.AuthType.NO_AUTH,
                                  es_index_name=cls.elkdatabase,
                                  es_additional_fields={
                                      'App': cls.myservertype,
                                      'AppGuid': cls.myserverguid
                                  })
        es_handler.setLevel(logging.INFO)

        formatter2 = logging.Formatter(
            "%(asctime)s-%(name)s-%(levelname)s-[%(filename)s:%(lineno)d]-%(message)s"
        )
        es_handler.setFormatter(formatter2)

        logging.getLogger().addHandler(es_handler)
        # 禁止如下两个模块乱打调试日志,因为ELK会把 debug 日志打在屏幕,有干扰
        logging.getLogger("elasticsearch").setLevel(logging.WARNING)
        logging.getLogger("requests").setLevel(logging.WARNING)
        logging.getLogger("urllib3").setLevel(logging.WARNING)

        # 创建 mysql 的工厂
        if startmysql:
            cls.mysqlengine = await create_engine(user=cls.mysqluser,
                                                  password=cls.mysqlpassword,
                                                  host=cls.mysqlhost,
                                                  port=cls.mysqlport,
                                                  db=cls.mysqldb,
                                                  loop=cls.asyncioloop)
        if startpg:
            cls.pg_pool = await asyncpg.create_pool(
                user='******',
                password='******',
                database='goldhonor',
                host='factory.goldhonor.com',
                port=15432,
                command_timeout=60)

        pass
class ElasticSearchBackend(AbstractBackend):
  """
  ElasticsearchBackend is used to create an abstraction of the handler and the formatter concepts from logging.
  Here, we have a CMRESHandler which is part of an external library named 'cmreslogging' based on 'logging'.
  CMRESHandler is a specific handler created to send log records to an ElasticSearch DB. It does not need a Formatter
  object.
  """

  def __init__(self):
    """
    CMRESHandler needs, at least, a hostname, a username, a password, a port and a specific index
    from the ElasticSearch DB to send log records.
    """
    super(ElasticSearchBackend, self).__init__(None, None)
    self.__host = ''
    self.__user = None
    self.__passwd = None
    self.__port = 9203
    self.__index = ''
    self.__bufferSize = 1000
    self.__flushTime = 1

  def createHandler(self, parameters=None):
    """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
    if parameters is not None:
      self.__host = parameters.get('Host', self.__host)
      self.__user = parameters.get('User', self.__user)
      self.__passwd = parameters.get('Password', self.__passwd)
      self.__port = int(parameters.get('Port', self.__port))
      self.__index = parameters.get('Index', self.__index)
      self.__bufferSize = int(parameters.get('BufferSize', self.__bufferSize))
      self.__flushTime = int(parameters.get('FlushTime', self.__flushTime))

    if self.__user is not None and self.__passwd is not None:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                                   auth_details=(self.__user, self.__passwd),
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    else:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.NO_AUTH,
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    # We give a format containing only asctime to add the field in elasticsearch
    # asctime is not created at the initialization of the LogRecords but built in the format process
    self._handler.setFormatter(logging.Formatter('%(asctime)s'))

  def setLevel(self, level):
    """
    No possibility to set the level of the ElasticSearch handler.
    It is not set by default so it can send all Log Records of all levels to ElasticSearch.
    """
    pass

  def setFormat(self, fmt, datefmt, options):
    """
    Each backend give a format to their formatters and attach them to their handlers.

    :params fmt: string representing the log format
    :params datefmt: string representing the date format
    :params component: string represented as "system/component"
    :params options: dictionary of logging options. ex: {'Color': True}
    """
    pass