Esempio n. 1
0
    def test_es_log_extra_argument_insertion(self):
        self.log.info("About to test elasticsearch insertion")
        handler = CMRESHandler(hosts=[{
            'host': self.getESHost(),
            'port': self.getESPort()
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               es_index_name="pythontest",
                               es_additional_fields={
                                   'App': 'Test',
                                   'Environment': 'Dev'
                               },
                               raise_on_indexing_exceptions=True)

        es_test_server_is_up = handler.test_es_source()
        self.log.info(
            "ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEqual(True, es_test_server_is_up)

        log = logging.getLogger("PythonTest")
        log.addHandler(handler)
        log.warning("Extra arguments Message",
                    extra={
                        "Arg1": 300,
                        "Arg2": 400
                    })
        self.assertEqual(1, len(handler._buffer))
        self.assertEqual(handler._buffer[0]['Arg1'], 300)
        self.assertEqual(handler._buffer[0]['Arg2'], 400)
        self.assertEqual(handler._buffer[0]['App'], 'Test')
        self.assertEqual(handler._buffer[0]['Environment'], 'Dev')
        handler.flush()
        self.assertEqual(0, len(handler._buffer))
Esempio n. 2
0
  def createHandler(self, parameters=None):
    """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
    if parameters is not None:
      self.__host = parameters.get('Host', self.__host)
      self.__user = parameters.get('User', self.__user)
      self.__passwd = parameters.get('Password', self.__passwd)
      self.__port = int(parameters.get('Port', self.__port))
      self.__index = parameters.get('Index', self.__index)
      self.__bufferSize = int(parameters.get('BufferSize', self.__bufferSize))
      self.__flushTime = int(parameters.get('FlushTime', self.__flushTime))

    if self.__user is not None and self.__passwd is not None:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                                   auth_details=(self.__user, self.__passwd),
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    else:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.NO_AUTH,
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    # We give a format containing only asctime to add the field in elasticsearch
    # asctime is not created at the initialization of the LogRecords but built in the format process
    self._handler.setFormatter(logging.Formatter('%(asctime)s'))
Esempio n. 3
0
    def test_buffered_log_insertion_flushed_when_buffer_full(self):
        handler = CMRESHandler(hosts=[{
            'host': self.getESHost(),
            'port': self.getESPort()
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               buffer_size=2,
                               flush_frequency_in_sec=1000,
                               es_index_name="pythontest",
                               es_additional_fields={
                                   'App': 'Test',
                                   'Environment': 'Dev'
                               },
                               raise_on_indexing_exceptions=True)

        es_test_server_is_up = handler.test_es_source()
        self.log.info(
            "ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEqual(True, es_test_server_is_up)

        log = logging.getLogger("PythonTest")
        log.setLevel(logging.DEBUG)
        log.addHandler(handler)
        log.warning("First Message")
        log.info("Seccond Message")
        self.assertEqual(0, len(handler._buffer))
        handler.close()
    def logData(self, data):

        if self.es_user != "null":
            self.handler = CMRESHandler(
                hosts=[{"host": str(self.es_host), "port": int(self.es_port)}],
                auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                use_ssl=True,
                verify_ssl=False,
                auth_details=(self.es_user, self.es_pwd),
                es_index_name="dockeree",
                es_additional_fields=data,
            )
        else:
            self.handler = CMRESHandler(
                hosts=[{"host": str(self.es_host), "port": int(self.es_port)}],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name="dockeree",
                es_additional_fields=data,
            )

        # log that data
        self.eslog.addHandler(self.handler)
        self.eslog.debug(
            "[{}] Logging namespace {} quota to {}:{}".format(
                time.time(), data["metadata"]["namespace"], self.es_host, self.es_port
            )
        )
        self.eslog.removeHandler(self.handler)
 def test_ping(self):
     handler = CMRESHandler(hosts=[{'host': 'localhost', 'port': 9200}],
                            auth_type=CMRESHandler.AuthType.NO_AUTH,
                            es_index_name="pythontest",
                            use_ssl=False)
     es_test_server_is_up = handler.test_es_source()
     self.assertEquals(True, es_test_server_is_up)
 def test_ping(self):
     handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                            auth_type=CMRESHandler.AuthType.NO_AUTH,
                            es_index_name="pythontest",
                            use_ssl=False,
                            raise_on_indexing_exceptions=True)
     es_test_server_is_up = handler.test_es_source()
     self.assertEqual(True, es_test_server_is_up)
 def test_ping(self):
     handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                            auth_type=CMRESHandler.AuthType.NO_AUTH,
                            es_index_name="pythontest",
                            use_ssl=False,
                            raise_on_indexing_exceptions=True)
     es_test_server_is_up = handler.test_es_source()
     self.assertEqual(True, es_test_server_is_up)
    def test_index_name_frequency_functions(self):
        index_name = "pythontest"
        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.DAILY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_daily_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.WEEKLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_weekly_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.MONTHLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_monthly_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.YEARLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_yearly_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.DISABLED,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_disabled_index_name(index_name)
        )
    def _initialize_logger(self):
        # initialize handler
        self.handler = CMRESHandler(
            hosts=[{
                'host': self._get_option('host'),
                'port': self._get_option('port')
            }],
            auth_details=(self._get_option('username'),
                          self._get_option('password')),
            auth_type=CMRESHandler.AuthType(self._get_option('auth_type')),
            use_ssl=self._get_option('use_ssl'),
            verify_ssl=self._get_option('verify_ssl'),
            es_index_name=self._get_option('es_index_name'),
            es_additional_fields={
                'app': self._get_option('app'),
                'env': self._get_option('env')
            },
            raise_on_indexing_exceptions=self._get_option(
                'raise_on_indexing_exceptions'))

        # check if elastic server is up
        self._es_server_is_up = self.handler.test_es_source()

        # initialize logger
        self._logger = logging.getLogger(self._get_option('es_logger_name'))
        # FIXME self._logger.setLevel(self._get_option('es_logger_level'))
        self._logger.setLevel(logging.DEBUG)

        self._logger.addHandler(self.handler)

        #
        self.ansible_session = str(uuid.uuid1())
        self.ansible_version = os.popen("ansible --version | head -1").read()

        self.errors = 0
        self.base_data = {
            'ansible_global_session': self.ansible_session,
            'ansible_global_version': self.ansible_version
        }

        #
        self.options = {}
        if cli:
            self._options = cli.options
            self.base_data['ansible_global_checkmode'] = self._options.check
            self.base_data['ansible_global_tags'] = self._options.tags
            self.base_data[
                'ansible_global_skip_tags'] = self._options.skip_tags
            self.base_data[
                'ansible_global_inventory'] = self._options.inventory
Esempio n. 10
0
class Logger:
    __is_initialized = False

    class __CustomFormatter(logging.Formatter):
        """Logging Formatter to add colors and count warning / errors"""

        grey = "\x1b[38;21m"
        yellow = "\x1b[33;21m"
        red = "\x1b[31;21m"
        bold_red = "\x1b[31;1m"
        reset = "\x1b[0m"
        format = "%(asctime)s %(name)s [%(levelname)s] %(message)s"

        FORMATS = {
            logging.DEBUG: grey + format + reset,
            logging.INFO: grey + format + reset,
            logging.WARNING: yellow + format + reset,
            logging.ERROR: red + format + reset,
            logging.CRITICAL: bold_red + format + reset
        }

        def format(self, record):
            log_fmt = self.FORMATS.get(record.levelno)
            formatter = logging.Formatter(log_fmt)
            return formatter.format(record)

    def __init__(self):
        self.__root = logging.getLogger("BackupCreator")
        if Logger.__is_initialized is False:
            Logger.__is_initialized = True
            self.__root.setLevel(logging.DEBUG)

            self.__handler = logging.StreamHandler(sys.stdout)
            self.__handler.setLevel(logging.DEBUG)
            self.__handler.setFormatter(self.__CustomFormatter())
            self.__root.addHandler(self.__handler)

            self.__handler2 = CMRESHandler(
                hosts=[{
                    'host': 'localhost',
                    'port': 9200
                }],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name="logstash_mailsender-{0:yyyy.MM.dd}")
            self.__handler2.setLevel(logging.DEBUG)
            self.__root.addHandler(self.__handler2)

    def get_logger(self):
        return self.__root
 def test_fast_insertion_of_hundred_logs(self):
     handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                            auth_type=CMRESHandler.AuthType.NO_AUTH,
                            use_ssl=False,
                            buffer_size=500,
                            flush_frequency_in_sec=0.5,
                            es_index_name="pythontest",
                            raise_on_indexing_exceptions=True)
     log = logging.getLogger("PythonTest")
     log.setLevel(logging.DEBUG)
     log.addHandler(handler)
     for i in range(100):
         log.info("Logging line {0:d}".format(i), extra={'LineNum': i})
     handler.flush()
     self.assertEqual(0, len(handler._buffer))
 def test_fast_insertion_of_hundred_logs(self):
     handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                            auth_type=CMRESHandler.AuthType.NO_AUTH,
                            use_ssl=False,
                            buffer_size=500,
                            flush_frequency_in_sec=0.5,
                            es_index_name="pythontest",
                            raise_on_indexing_exceptions=True)
     log = logging.getLogger("PythonTest")
     log.setLevel(logging.DEBUG)
     log.addHandler(handler)
     for i in range(100):
         log.info("Logging line {0:d}".format(i), extra={'LineNum': i})
     handler.flush()
     self.assertEqual(0, len(handler._buffer))
Esempio n. 13
0
class ESLoggerConfig(BaseConfig):
    USE_ELASTIC_SEARCH = bool(get_configuration_value('USE_ES_LOGS', False))
    ES_HANDLER = None
    if USE_ELASTIC_SEARCH:
        from cmreslogging.handlers import CMRESHandler
        es_host = get_configuration_value('ES_HOST', 'localhost')
        es_port = int(get_configuration_value('ES_PORT', 9200))
        es_user = get_configuration_value('ES_USER', None)
        es_password = get_configuration_value('ES_PASSWORD', None)
        es_use_ssl = bool(int(get_configuration_value('ES_USE_SSL', 0)))
        es_verify_ssl = get_configuration_value('ES_VERIFY_SSL', False)
        es_index_name = get_configuration_value('ES_INDEX_NAME',
                                                'tesla_admin_index')
        es_auth_type = CMRESHandler.AuthType.NO_AUTH
        if es_user is not None and es_password is not None:
            es_auth_type = CMRESHandler.AuthType.BASIC_AUTH

        es_handler = CMRESHandler(hosts=[{
            'host': es_host,
            'port': es_port
        }],
                                  auth_type=es_auth_type,
                                  auth_details=(es_user, es_password),
                                  use_ssl=es_use_ssl,
                                  verify_ssl=es_verify_ssl,
                                  es_index_name=es_index_name,
                                  flush_frequency_in_sec=15,
                                  es_additional_fields={
                                      'Module': BaseConfig.MODULE_NAME,
                                      'Version': BaseConfig.MODULE_VERSION,
                                      'Environment': BaseConfig.CONFIG_NAME
                                  })
        if es_handler.test_es_source():
            es_handler.setLevel(logging.INFO)
            ES_HANDLER = es_handler
Esempio n. 14
0
    def add_elasticsearch_handler(self):
        if ELASTICSEARCH_INSTALLED:
            es_hosts = self._env('HANDLERS_ELASTICSEARCH_HOST')
            if not es_hosts:
                self.logger.warning('Elasticsearch handler host not set')
                return

            name = self._env('HANDLERS_ELASTICSEARCH_NAME',
                             default='elasticsearch')
            level = self._env('HANDLERS_ELASTICSEARCH_LEVEL', default='info')
            formatter = self._env('HANDLER_ELASTICSEARCH_FORMATTER',
                                  default='json')

            hosts = []
            es_hosts = es_hosts.split(',')
            for es_host in es_hosts:
                host, port = es_host.split(':', 1)
                hosts.append({'host': host, 'port': port})

            handler_args = {
                'hosts': hosts,
                'auth_type': CMRESHandler.AuthType.NO_AUTH
            }

            handler = CMRESHandler(**handler_args)

            self.add_handler(handler=handler,
                             name=name,
                             formatter=formatter,
                             level=level)
        else:
            self.logger.error(
                'It seems that the elasticsearch handler is not installed. '
                'You can install it by running `pip install '
                'wryte[elasticsearch]`')
Esempio n. 15
0
def create_app():
    from project.models import db
    from project.views import views_bp as views_blueprint

    app = Flask(__name__)
    app.config.from_object(config[ENVIRONMENT])

    if not app.config["DEBUG"]:
        handler = CMRESHandler(hosts=[{
            'host': '192.168.99.100',
            'port': 9200
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name="my_python_index",
                               es_additional_fields={
                                   'App': app.config["APP_NAME"],
                                   'Environment': ENVIRONMENT
                               })

        app.logger.addHandler(handler)

    db.init_app(app)
    bcrypt.init_app(app)
    jwt = JWT(app, authenticate, identity)
    swagger = Swagger(app, config=swagger_config)

    app.register_blueprint(views_blueprint)
    return app, db
Esempio n. 16
0
def configure_logger(app):
    logs_folder = app.config.get('LOG_PATH')
    if not logs_folder:
        logs_folder = os.getcwd()
    logs_folder = os.path.join(logs_folder, 'ecm_logs')
    if not os.path.isdir(logs_folder):
        os.makedirs(logs_folder)

    handler = RotatingFileHandler(os.path.join(logs_folder, 'ecm.log'),
                                  maxBytes=1 * 1024 * 1024,
                                  backupCount=10,
                                  encoding='UTF-8')

    # 如果不是debug模式,则捕捉INFO以上的日志信息,默认NOTSET
    if not app.config.get('DEBUG'):
        handler.setLevel(logging.INFO)
        app.logger.setLevel(logging.INFO)

    logging_format = logging.Formatter(
        '[%(asctime)s] [%(levelname)s] [%(pathname)s:%(lineno)d] [%(message)s]'
    )
    handler.setFormatter(logging_format)
    app.logger.addHandler(handler)

    es_handler = CMRESHandler(hosts=[{
        'host': 'localhost',
        'port': 9200
    }],
                              auth_type=CMRESHandler.AuthType.NO_AUTH,
                              es_index_name="central_management_log")
    app.logger.setLevel(logging.INFO)
    app.logger.addHandler(es_handler)
    return
    def test_index_name_frequency_functions(self):
        index_name = "pythontest"
        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.DAILY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_daily_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.WEEKLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_weekly_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.MONTHLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_monthly_index_name(index_name)
        )

        handler = CMRESHandler(hosts=[{'host': self.getESHost(), 'port': self.getESPort()}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=index_name,
                               use_ssl=False,
                               index_name_frequency=CMRESHandler.IndexNameFrequency.YEARLY,
                               raise_on_indexing_exceptions=True)
        self.assertEqual(
            handler._index_name_func.__func__(index_name),
            CMRESHandler._get_yearly_index_name(index_name)
        )
Esempio n. 18
0
    def createHandler(self, parameters=None):
        """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
        if parameters is not None:
            self.__host = parameters.get('Host', self.__host)
            self.__user = parameters.get('User', self.__user)
            self.__passwd = parameters.get('Password', self.__passwd)
            self.__port = int(parameters.get('Port', self.__port))
            self.__index = parameters.get('Index', self.__index)
            self.__bufferSize = int(
                parameters.get('BufferSize', self.__bufferSize))
            self.__flushTime = int(
                parameters.get('FlushTime', self.__flushTime))

        if self.__user is not None and self.__passwd is not None:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                auth_details=(self.__user, self.__passwd),
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        else:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        # We give a format containing only asctime to add the field in elasticsearch
        # asctime is not created at the initialization of the LogRecords but built in the format process
        self._handler.setFormatter(logging.Formatter('%(asctime)s'))
Esempio n. 19
0
def get_db_logger(
        host='search-parsl-logging-test-2yjkk2wuoxukk2wdpiicl7mcrm.us-east-1.es.amazonaws.com',
        port=443,
        enable_es_logging=False,
        index_name="my_python_index",
        version='1.0.0',
        **kwargs):
    """
    Parameters
    ----------
    host : str, optional
        URL to the elasticsearch cluster. Skip the http(s)://
    port : int, optional
        Port to use to access the elasticsearch cluster
    enable_es_logging : Bool, optional
        Set to True to enable logging to elasticsearch
    index_name : str, optional
        Index name to use for elasticsearch

    Returns
    -------
    logging.logger object

    Raises
    ------
    OptionalModuleMissing

    """
    logger = logging.getLogger(__file__)
    if enable_es_logging:
        if not _es_logging_enabled:
            raise OptionalModuleMissing(
                ['CMRESHandler'],
                "Logging to ElasticSearch requires the cmreslogging module")

        handler = CMRESHandler(
            hosts=[{
                'host': host,
                'port': port
            }],
            use_ssl=True,
            auth_type=CMRESHandler.AuthType.NO_AUTH,
            es_index_name=index_name,
            es_additional_fields={
                'Campaign': "test",
                # use the name of the user's home directory as their username since there
                # does not seem to be a portable way to do this
                'Version': version,
                'Username': getpass.getuser()
            })
        logger = logging.getLogger("ParslElasticsearch")
        logger.setLevel(logging.INFO)
        logger.addHandler(handler)
    else:
        logger.addHandler(NullHandler())

    return logger
Esempio n. 20
0
def enrich_es_handler():
    return CMRESHandler(hosts=[{
        'host_ip': ES_HOST,
        'port': ES_PORT
    }],
                        auth_type=CMRESHandler.AuthType.NO_AUTH,
                        es_index_name=ES_LOG_INDEX,
                        es_additional_fields={
                            'App': APP,
                            'Environment': ENV
                        })
Esempio n. 21
0
    def __init__(self):
        self.__root = logging.getLogger("BackupCreator")
        if Logger.__is_initialized is False:
            Logger.__is_initialized = True
            self.__root.setLevel(logging.DEBUG)

            self.__handler = logging.StreamHandler(sys.stdout)
            self.__handler.setLevel(logging.DEBUG)
            self.__handler.setFormatter(self.__CustomFormatter())
            self.__root.addHandler(self.__handler)

            self.__handler2 = CMRESHandler(
                hosts=[{
                    'host': 'localhost',
                    'port': 9200
                }],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name="logstash_mailsender-{0:yyyy.MM.dd}")
            self.__handler2.setLevel(logging.DEBUG)
            self.__root.addHandler(self.__handler2)
    def test_buffered_log_insertion_flushed_when_buffer_full(self):
        handler = CMRESHandler(hosts=[{'host': 'localhost', 'port': 9200}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               buffer_size=2,
                               flush_frequency_in_sec=1000,
                               es_index_name="pythontest",
                               es_additional_fields={'App': 'Test', 'Environment': 'Dev'})

        es_test_server_is_up = handler.test_es_source()
        self.log.info("ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEquals(True, es_test_server_is_up)

        log = logging.getLogger("PythonTest")
        log.setLevel(logging.DEBUG)
        log.addHandler(handler)
        log.warning("First Message")
        log.info("Seccond Message")
        self.assertEquals(0, len(handler._buffer))
        handler.close()
    def test_es_log_extra_argument_insertion(self):
        self.log.info("About to test elasticsearch insertion")
        handler = CMRESHandler(hosts=[{'host': 'localhost', 'port': 9200}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               es_index_name="pythontest",
                               es_additional_fields={'App': 'Test', 'Environment': 'Dev'})

        es_test_server_is_up = handler.test_es_source()
        self.log.info("ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEquals(True, es_test_server_is_up)

        log = logging.getLogger("PythonTest")
        log.addHandler(handler)
        log.warning("Extra arguments Message", extra={"Arg1": 300, "Arg2": 400})
        self.assertEquals(1, len(handler._buffer))
        self.assertEquals(handler._buffer[0]['Arg1'], 300)
        self.assertEquals(handler._buffer[0]['Arg2'], 400)
        self.assertEquals(handler._buffer[0]['App'], 'Test')
        self.assertEquals(handler._buffer[0]['Environment'], 'Dev')
        handler.flush()
        self.assertEquals(0, len(handler._buffer))
    def test_buffered_log_insertion_after_interval_expired(self):
        handler = CMRESHandler(hosts=[{'host': 'localhost', 'port': 9200}],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               flush_frequency_in_sec=0.1,
                               es_index_name="pythontest",
                               es_additional_fields={'App': 'Test', 'Environment': 'Dev'})

        es_test_server_is_up = handler.test_es_source()
        self.log.info("ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEquals(True, es_test_server_is_up)

        log = logging.getLogger("PythonTest")
        log.addHandler(handler)
        log.warning("Extra arguments Message", extra={"Arg1": 300, "Arg2": 400})
        self.assertEquals(1, len(handler._buffer))
        self.assertEquals(handler._buffer[0]['Arg1'], 300)
        self.assertEquals(handler._buffer[0]['Arg2'], 400)
        self.assertEquals(handler._buffer[0]['App'], 'Test')
        self.assertEquals(handler._buffer[0]['Environment'], 'Dev')
        time.sleep(1)
        self.assertEquals(0, len(handler._buffer))
Esempio n. 25
0
 def __init__(self):
     es_config = G_CONFIG.config['elasticsearch']
     hosts = [{'host': es_config['host'], 'port': es_config['port']}]
     self.handler = CMRESHandler(
         hosts=hosts,
         auth_type=CMRESHandler.AuthType.NO_AUTH,
         es_index_name=self.ES_INDEX_NAME,
         index_name_frequency=CMRESHandler.IndexNameFrequency.MONTHLY)
     self.es = Elasticsearch(hosts=hosts)
     self.log = logging.getLogger(self.LOGGER_NAME)
     self.log.setLevel(logging.INFO)
     self.log.addHandler(self.handler)
     self.es.indices.put_template(name='qlogger_template',
                                  body=self._index_template())
Esempio n. 26
0
def elasticsearch():
    import logging
    from cmreslogging.handlers import CMRESHandler

    handler = CMRESHandler(
        hosts=[{
            "host": "localhost",
            "port": 9200
        }],
        auth_type=CMRESHandler.AuthType.NO_AUTH,
        es_index_name="minc_index",
    )
    root_logger = logging.getLogger()
    root_logger.setLevel(logging.INFO)
    root_logger.addHandler(handler)
Esempio n. 27
0
def get_logger():
    # Enable logging
    logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',
                        level=logging.INFO)
    logger = logging.getLogger(__name__)
    if os.getenv('ELASTIC_HOST') and os.getenv('ELASTIC_USER') and os.getenv('ELASTIC_PASS') and \
            os.getenv('SEND_LOGS_TO_ELASTIC'):
        handler = CMRESHandler(
            hosts=[{
                'host': os.getenv('ELASTIC_HOST'),
                'port': 9200
            }],
            auth_type=CMRESHandler.AuthType.BASIC_AUTH,
            auth_details=(os.getenv('ELASTIC_USER'),
                          os.getenv('ELASTIC_PASS')),
            es_index_name="munich-tg-logs",
            index_name_frequency=CMRESHandler.IndexNameFrequency.MONTHLY)
        logger.addHandler(handler)
    return logger
Esempio n. 28
0
def main(args=None):
    ####################
    # Synchronize time #
    ####################
    try:
        import ntplib
        client = ntplib.NTPClient()
        response = client.request('pool.ntp.org')
        os.system(
            'date ' +
            time.strftime('%m%d%H%M%Y.%S', time.localtime(response.tx_time)))
    except:
        print('Could not sync with time server.')
    """The main routine."""
    if args is None:
        args = sys.argv[1:]
    print "version=" + version

    ################
    # Parse config #
    ################

    config = ConfigParser.RawConfigParser()
    config.file = '/etc/ble_positioning_node/config.conf'
    config.read(config.file)

    ###############################
    # We want reporting to kibana #
    ###############################

    LOG_CONN_POINTS = [{
        'host': config.get('Communication', 'elastic'),
        'port': config.getint('Communication', 'elastic_port')
    }]

    handler = CMRESHandler(hosts=LOG_CONN_POINTS,
                           auth_type=CMRESHandler.AuthType.NO_AUTH,
                           use_ssl=True,
                           es_index_name="beacon-scanner")

    scan = BLEScanner(handler, config)
    scan.main()
Esempio n. 29
0
def init_logging(application):

    if application.config['ELASTIC_HOST']:
        env_name = 'development'
        if 'APP_CONFIG_FILE' in os.environ:
            env_name = os.environ['APP_CONFIG_FILE']

        handler = CMRESHandler(hosts=[{
            'host':
            application.config["ELASTIC_HOST"],
            'port':
            9200
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name="logs-nisse",
                               es_additional_fields={'environment': env_name})
        application.logger.addHandler(handler)


#    application.logger.setLevel(logging.DEBUG)
Esempio n. 30
0
def create_app():
    from project.models import db
    from project.views import views_bp as views_blueprint
    environment = os.environ.get("ENVIRONMENT", "default")

    app = Flask(__name__)
    app.config.from_object(CONFIG[environment])
    app.wsgi_app = PrefixMiddleware(app.wsgi_app,
                                    prefix=app.config["APPLICATION_ROOT"])

    db.init_app(app)

    if not app.config["DEBUG"]:
        handler = CMRESHandler(hosts=[{
            'host': '192.168.99.100',
            'port': 9200
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name="my_python_index",
                               es_additional_fields={
                                   'App': app.config["APP_NAME"],
                                   'Environment': ENVIRONMENT
                               })
        app.logger.addHandler(handler)

    SWAGGER_CONFIG["specs"][0]["route"] = SWAGGER_CONFIG["specs"][0][
        "route"].format(application_root=app.config["APPLICATION_ROOT"])
    SWAGGER_CONFIG["static_url_path"] = SWAGGER_CONFIG[
        "static_url_path"].format(
            application_root=app.config["APPLICATION_ROOT"])
    SWAGGER_CONFIG["specs_route"] = SWAGGER_CONFIG["specs_route"].format(
        application_root=app.config["APPLICATION_ROOT"])
    SWAGGER_CONFIG["basePath"] = SWAGGER_CONFIG["basePath"].format(
        application_root=app.config["APPLICATION_ROOT"])
    Swagger(app, config=SWAGGER_CONFIG)

    app.register_blueprint(views_blueprint)
    with app.test_request_context():
        db.create_all()
    return app, db
Esempio n. 31
0
    def test_new_index_has_messages(self):
        index_name = "pythontest-index-messages"
        handler = CMRESHandler(hosts=[{
            'host': self.getESHost(),
            'port': self.getESPort()
        }],
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               use_ssl=False,
                               es_index_name=index_name,
                               es_additional_fields={
                                   'App': 'Test',
                                   'Environment': 'Dev'
                               },
                               raise_on_indexing_exceptions=True)

        message = "Specific message %s"
        es_test_server_is_up = handler.test_es_source()
        index = handler._index_name_func.__func__(index_name)
        client = handler._client
        client.indices.delete(index, ignore=404)
        self.log.info(
            "ES services status is:  {0!s}".format(es_test_server_is_up))
        self.assertEqual(True, es_test_server_is_up)
        log = logging.getLogger("PythonTest")
        log.setLevel(logging.DEBUG)
        log.addHandler(handler)
        log.info(message, 1)
        log.info(message, 'str')
        handler.flush()
        self.assertEqual(0, len(handler._buffer))
        # Wait a little while elasticsearch creating mapping and indexing messages
        time.sleep(1)
        self.assertEqual(
            2,
            client.search(index,
                          body={"query": {
                              "match": {
                                  'message': message
                              }
                          }})['hits']['total'])
        handler.close()
Esempio n. 32
0
def send_log(message, level='info'):
    from cmreslogging.handlers import CMRESHandler
    handler = CMRESHandler(hosts=[{
        'host': 'es01',
        'port': 9200
    }],
                           auth_type=CMRESHandler.AuthType.NO_AUTH,
                           es_index_name="my_python_index")
    log = logging.getLogger("PythonTest")
    log.setLevel(logging.INFO)
    log.addHandler(handler)
    if level == 'info':
        log.info(str(message))
    else:
        log.error(str(message))


#log.debug()
#log.info()
#log.warning()
#log.error()
#log.critical()
#send_log('123456789')
Esempio n. 33
0
 def getLogger(self):
     handler = logging.handlers.SysLogHandler(
         address=('logsene-receiver-syslog.sematext.com', 514))
     handler2 = CMRESHandler(hosts=[{
         'host': os.getenv('BMU_ES_HOST'),
         'port': 9200
     }],
                             auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                             auth_details=(os.getenv('BMU_ES_USR'),
                                           os.getenv('BMU_ES_PWD')),
                             es_index_name="i_bmu")
     formater = logging.Formatter(
         "4c971c88-73a9-4557-b534-4fb4cebc6d48:%(message)s")
     console_formatter = logging.Formatter(
         fmt='%(asctime)s %(levelname)-8s %(message)s',
         datefmt='%Y-%m-%d %H:%M:%S')
     handler.setFormatter(formater)
     console = logging.StreamHandler(sys.stdout)
     log = logging.getLogger("BackupMeUp")
     log.setLevel(logging.INFO)
     log.addHandler(console)
     log.addHandler(handler)
     log.addHandler(handler2)
     return log
Esempio n. 34
0
def get_db_logger(logger_name='parsl_db_logger',
                  is_logging_server=False,
                  monitoring_config=None,
                  **kwargs):
    """
    Parameters
    ----------
    logger_name : str, optional
        Name of the logger to use. Prevents adding repeat handlers or incorrect handlers
    is_logging_server : Bool, optional
        Used internally to determine which handler to return when using local db logging
    monitoring_config : MonitoringConfig, optional
        Pass in a logger class object to use for generating loggers.

    Returns
    -------
    logging.logger object

    Raises
    ------
    OptionalModuleMissing

    """
    logger = logging.getLogger(logger_name)
    if monitoring_config is None:
        logger.addHandler(NullHandler())
        return logger

    if monitoring_config.database_type == 'elasticsearch':
        if not _es_logging_enabled:
            raise OptionalModuleMissing(
                ['CMRESHandler'],
                "Logging to ElasticSearch requires the cmreslogging module")

        handler = CMRESHandler(hosts=[{
            'host': monitoring_config.host,
            'port': monitoring_config.port
        }],
                               use_ssl=monitoring_config.enable_ssl,
                               auth_type=CMRESHandler.AuthType.NO_AUTH,
                               es_index_name=monitoring_config.index_name,
                               es_additional_fields={
                                   'Campaign': "test",
                                   'Version': monitoring_config.version,
                                   'Username': getpass.getuser()
                               })
        logger = logging.getLogger(monitoring_config.logger_name)
        logger.setLevel(logging.INFO)
        logger.addHandler(handler)
    elif monitoring_config.database_type == 'local_database' and not is_logging_server:
        # add a handler that will pass logs to the logging server
        handler = RemoteHandler(monitoring_config.web_app_host,
                                monitoring_config.web_app_port)
        # use the specific name generated by the server or the monitor wrapper
        logger = logging.getLogger(logger_name)
        logger.setLevel(logging.INFO)
        logger.addHandler(handler)
    elif monitoring_config.database_type == 'local_database' and is_logging_server:
        # add a handler that will take logs being recieved on the server and log them to the database
        handler = DatabaseHandler(monitoring_config.eng_link)
        # use the specific name generated by the server or the monitor wrapper
        logger = logging.getLogger(logger_name)
        logger.setLevel(logging.INFO)
        logger.addHandler(handler)
    else:
        raise ValueError(
            'database_type must be one of ["local_database", "elasticsearch"]')

    return logger
Esempio n. 35
0
class ElasticSearchBackend(AbstractBackend):
  """
  ElasticsearchBackend is used to create an abstraction of the handler and the formatter concepts from logging.
  Here, we have a CMRESHandler which is part of an external library named 'cmreslogging' based on 'logging'.
  CMRESHandler is a specific handler created to send log records to an ElasticSearch DB. It does not need a Formatter
  object.
  """

  def __init__(self):
    """
    CMRESHandler needs, at least, a hostname, a username, a password, a port and a specific index
    from the ElasticSearch DB to send log records.
    """
    super(ElasticSearchBackend, self).__init__(None, None)
    self.__host = ''
    self.__user = None
    self.__passwd = None
    self.__port = 9203
    self.__index = ''
    self.__bufferSize = 1000
    self.__flushTime = 1

  def createHandler(self, parameters=None):
    """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
    if parameters is not None:
      self.__host = parameters.get('Host', self.__host)
      self.__user = parameters.get('User', self.__user)
      self.__passwd = parameters.get('Password', self.__passwd)
      self.__port = int(parameters.get('Port', self.__port))
      self.__index = parameters.get('Index', self.__index)
      self.__bufferSize = int(parameters.get('BufferSize', self.__bufferSize))
      self.__flushTime = int(parameters.get('FlushTime', self.__flushTime))

    if self.__user is not None and self.__passwd is not None:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                                   auth_details=(self.__user, self.__passwd),
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    else:
      self._handler = CMRESHandler(hosts=[{'host': self.__host, 'port': self.__port}],
                                   auth_type=CMRESHandler.AuthType.NO_AUTH,
                                   es_index_name=self.__index,
                                   use_ssl=True,
                                   verify_ssl=True,
                                   buffer_size=self.__bufferSize,
                                   flush_frequency_in_sec=self.__flushTime)
    # We give a format containing only asctime to add the field in elasticsearch
    # asctime is not created at the initialization of the LogRecords but built in the format process
    self._handler.setFormatter(logging.Formatter('%(asctime)s'))

  def setLevel(self, level):
    """
    No possibility to set the level of the ElasticSearch handler.
    It is not set by default so it can send all Log Records of all levels to ElasticSearch.
    """
    pass

  def setFormat(self, fmt, datefmt, options):
    """
    Each backend give a format to their formatters and attach them to their handlers.

    :params fmt: string representing the log format
    :params datefmt: string representing the date format
    :params component: string represented as "system/component"
    :params options: dictionary of logging options. ex: {'Color': True}
    """
    pass
Esempio n. 36
0
sqsChainTxKey = 'SQS_UBIRCH_BIGCHAIN_DB_TX'
sqsRegionKey = 'AWS_REGION'
ipdbAppIdKey = 'IPDB_APP_ID'
ipdbAppKeyKey = 'IPDB_APP_KEY'
bigChainDbHostKey = 'BIG_CHAIN_DB_HOST'
numThreadsKey = 'NUM_TRHEADS'
esLoggerHostKey = 'ES_LOG_HOST'
esLoggerPortKey = 'ES_LOG_PORT'

if (esLoggerHostKey in os.environ.keys()):
    esLoggerHost = os.environ[esLoggerHostKey]
    esLoggerPort = int(os.environ[esLoggerPortKey])
    esLoggerHandler = CMRESHandler(
        hosts=[{
            'host': esLoggerHost,
            'port': esLoggerPort
        }],
        auth_type=CMRESHandler.AuthType.NO_AUTH,
        es_index_name="big-chain-store-service-logs")
    logger.addHandler(esLoggerHandler)

if (awsAccessKeyId not in os.environ or awsSecretAccessKey not in os.environ):
    logger.error("env vars missing")
    logger.info("AWS_ACCESS_KEY_ID -> AWS access key")
    logger.info("AWS_SECRET_ACCESS_KEY -> AWS secret key")
    logger.info(
        "SQS_CHAIN_IN -> AWS SQS queue name of inbound data which should be stored into bigChainDb (optional), default is 'local_dev_ubirch_bigchaindb_in'"
    )
    logger.info(
        "SQS_CHAIN_TX -> AWS SQS queue name for outbound tx hash publishing (optional), default is 'local_dev_ubirch_bigchaindb_tx'"
    )
Esempio n. 37
0
    async def InitServer(cls,
                         servertype="unkown-servertype",
                         startmysql=True,
                         startpg=True):

        # 创建日志目录
        if not os.path.exists('logs'):
            os.makedirs('logs')
        logging.basicConfig(level=logging.DEBUG)  # 默认屏幕输出
        filehandler = TimedRotatingFileHandler(  # 时间轮转输出
            filename="logs/my.log",
            when='D',
            interval=1,
            backupCount=0)
        filehandler.suffix = "%Y%m%d-%H%M%S.log"
        formatter = logging.Formatter(
            "%(asctime)s-%(name)s-%(levelname)s-[%(filename)s:%(lineno)d]-%(message)s"
        )
        filehandler.setFormatter(formatter)
        filehandler.setLevel(logging.INFO)

        logging.getLogger().addHandler(filehandler)  # 添加时间轮转输出

        # 服务器名字   数据文件路径
        cls.myservertype = servertype
        cls.mydatapath = datafilepath()
        # 生成一个新的 guid 可能不会使用,如果有的话
        newguid = cls.myservertype + "_" + str(uuid.uuid1())

        await cls.loadconfig()

        if (cls.elkdatabase == ""):
            cls.elkdatabase = cls.myservertype

        if (cls.myserverguid == None):
            cls.myserverguid = newguid
            # 没有配置文件就保存
            await cls.saveconfig()

        # ELK的日志输出
        es_handler = CMRESHandler(hosts=[{
            'host': cls.elkhost,
            'port': cls.elkport
        }],
                                  auth_type=CMRESHandler.AuthType.NO_AUTH,
                                  es_index_name=cls.elkdatabase,
                                  es_additional_fields={
                                      'App': cls.myservertype,
                                      'AppGuid': cls.myserverguid
                                  })
        es_handler.setLevel(logging.INFO)

        formatter2 = logging.Formatter(
            "%(asctime)s-%(name)s-%(levelname)s-[%(filename)s:%(lineno)d]-%(message)s"
        )
        es_handler.setFormatter(formatter2)

        logging.getLogger().addHandler(es_handler)
        # 禁止如下两个模块乱打调试日志,因为ELK会把 debug 日志打在屏幕,有干扰
        logging.getLogger("elasticsearch").setLevel(logging.WARNING)
        logging.getLogger("requests").setLevel(logging.WARNING)
        logging.getLogger("urllib3").setLevel(logging.WARNING)

        # 创建 mysql 的工厂
        if startmysql:
            cls.mysqlengine = await create_engine(user=cls.mysqluser,
                                                  password=cls.mysqlpassword,
                                                  host=cls.mysqlhost,
                                                  port=cls.mysqlport,
                                                  db=cls.mysqldb,
                                                  loop=cls.asyncioloop)
        if startpg:
            cls.pg_pool = await asyncpg.create_pool(
                user='******',
                password='******',
                database='goldhonor',
                host='factory.goldhonor.com',
                port=15432,
                command_timeout=60)

        pass
Esempio n. 38
0
load_dotenv()

# load variables
http_base_container = os.getenv("HTTP_BASE_CONTAINER_IMAIS")
elastic_search_host = os.getenv("ELASTIC_SEARCH_HOST")
elastic_search_port = os.getenv("ELASTIC_SEARCH_PORT")
index_name = os.getenv("INDEX_NAME")
info = os.getenv("INFO")
logger_name = os.getenv("LOGGER_NAME")

# add elasticsearch logging capabilities
# parameters coming from env file
handler = CMRESHandler(
    hosts=[{
        "host": elastic_search_host,
        "port": elastic_search_port
    }],
    auth_type=CMRESHandler.AuthType.NO_AUTH,
    es_index_name=index_name,
)

# name of the logger
# level and handler
log = logging.getLogger(logger_name)
log.setLevel(logging.INFO)
log.addHandler(handler)


class BlobStorage(object):
    """A class that allows you to connect to Azure Blob Storage

    With this class it will be possible to identify the metadata of the files,
Esempio n. 39
0
class ElasticSearchBackend(AbstractBackend):
    """
  ElasticsearchBackend is used to create an abstraction of the handler and the formatter concepts from logging.
  Here, we have a CMRESHandler which is part of an external library named 'cmreslogging' based on 'logging'.
  CMRESHandler is a specific handler created to send log records to an ElasticSearch DB. It does not need a Formatter
  object.
  """
    def __init__(self):
        """
    CMRESHandler needs, at least, a hostname, a username, a password, a port and a specific index
    from the ElasticSearch DB to send log records.
    """
        super(ElasticSearchBackend, self).__init__(None, None)
        self.__host = ''
        self.__user = None
        self.__passwd = None
        self.__port = 9203
        self.__index = ''
        self.__bufferSize = 1000
        self.__flushTime = 1

    def createHandler(self, parameters=None):
        """
    Each backend can initialize its attributes and create its handler with them.

    :params parameters: dictionary of parameters. ex: {'FileName': file.log}
    """
        if parameters is not None:
            self.__host = parameters.get('Host', self.__host)
            self.__user = parameters.get('User', self.__user)
            self.__passwd = parameters.get('Password', self.__passwd)
            self.__port = int(parameters.get('Port', self.__port))
            self.__index = parameters.get('Index', self.__index)
            self.__bufferSize = int(
                parameters.get('BufferSize', self.__bufferSize))
            self.__flushTime = int(
                parameters.get('FlushTime', self.__flushTime))

        if self.__user is not None and self.__passwd is not None:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.BASIC_AUTH,
                auth_details=(self.__user, self.__passwd),
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        else:
            self._handler = CMRESHandler(
                hosts=[{
                    'host': self.__host,
                    'port': self.__port
                }],
                auth_type=CMRESHandler.AuthType.NO_AUTH,
                es_index_name=self.__index,
                use_ssl=True,
                verify_ssl=True,
                buffer_size=self.__bufferSize,
                flush_frequency_in_sec=self.__flushTime)
        # We give a format containing only asctime to add the field in elasticsearch
        # asctime is not created at the initialization of the LogRecords but built in the format process
        self._handler.setFormatter(logging.Formatter('%(asctime)s'))

    def setLevel(self, level):
        """
    No possibility to set the level of the ElasticSearch handler.
    It is not set by default so it can send all Log Records of all levels to ElasticSearch.
    """
        pass

    def setFormat(self, fmt, datefmt, options):
        """
    Each backend give a format to their formatters and attach them to their handlers.

    :params fmt: string representing the log format
    :params datefmt: string representing the date format
    :params component: string represented as "system/component"
    :params options: dictionary of logging options. ex: {'Color': True}
    """
        pass