Esempio n. 1
0
 def __init__(self, t_name, tp):
     threading.Thread.__init__(self, name=t_name)
     self._publish_kafka_conn = None
     topic = cfg.CONF.thresholding_engine.alarm_topic
     self._publish_kafka_conn = (kafka_conn.KafkaConnection(topic))
     self.interval = cfg.CONF.thresholding_engine.check_alarm_interval
     self.thresholding_processors = tp
Esempio n. 2
0
    def __init__(self, threads=1000):
        super(NotificationEngine, self).__init__(threads)
        self._kafka_conn = kafka_conn.KafkaConnection(
            cfg.CONF.notificationengine.topic)
        self.doc_type = cfg.CONF.notificationengine.doc_type

        # load index strategy
        if cfg.CONF.notificationengine.index_strategy:
            self.index_strategy = driver.DriverManager(
                namespace.STRATEGY_NS,
                cfg.CONF.notificationengine.index_strategy,
                invoke_on_load=True,
                invoke_kwds={}).driver
            LOG.debug(dir(self.index_strategy))
        else:
            self.index_strategy = None

        self.index_prefix = cfg.CONF.notificationengine.index_prefix

        self._es_conn = es_conn.ESConnection(
            self.doc_type, self.index_strategy, self.index_prefix)

        if cfg.CONF.notificationengine.processor:
            self.notification_processor = driver.DriverManager(
                namespace.PROCESSOR_NS,
                cfg.CONF.notificationengine.processor,
                invoke_on_load=True,
                invoke_kwds={}).driver
            LOG.debug(dir(self.notification_processor))
        else:
            self.notification_processor = None
Esempio n. 3
0
 def __init__(self, t_name, tp):
     threading.Thread.__init__(self, name=t_name)
     # init kafka connection to metrics topic
     self._consume_kafka_conn = None
     topic = cfg.CONF.thresholdengine.metrics_topic
     self._consume_kafka_conn = kafka_conn.KafkaConnection(topic)
     self.threshold_processors = tp
Esempio n. 4
0
 def __init__(self, t_name, tp):
     threading.Thread.__init__(self, name=t_name)
     # init kafka connection to alarm topic
     self._publish_kafka_conn = None
     topic = cfg.CONF.thresholdengine.alarm_topic
     self._publish_kafka_conn = (
         kafka_conn.KafkaConnection(topic))
     # set time interval for calling processors to refresh alarms
     self.interval = cfg.CONF.thresholdengine.check_alarm_interval
     self.threshold_processors = tp
Esempio n. 5
0
    def __init__(self, threads=1000):
        super(ESPersister, self).__init__(threads)
        self._kafka_conn = kafka_conn.KafkaConnection(
            cfg.CONF.es_persister.topic)

        # load index strategy
        if cfg.CONF.es_persister.index_strategy:
            self.index_strategy = driver.DriverManager(
                namespace.STRATEGY_NS,
                cfg.CONF.es_persister.index_strategy,
                invoke_on_load=True,
                invoke_kwds={}).driver
            LOG.debug(dir(self.index_strategy))
        else:
            self.index_strategy = None

        self.index_prefix = cfg.CONF.es_persister.index_prefix
        # Use doc_type if it is defined.
        if cfg.CONF.es_persister.doc_type:
            self.doc_type = cfg.CONF.es_persister.doc_type
        else:
            self.doc_type = cfg.CONF.es_persister.topic

        # create connection to ElasticSearch
        self._es_conn = es_conn.ESConnection(
            self.doc_type, self.index_strategy, self.index_prefix)

        # load message processor
        if cfg.CONF.es_persister.processor:
            self.msg_processor = driver.DriverManager(
                namespace.PROCESSOR_NS,
                cfg.CONF.es_persister.processor,
                invoke_on_load=True,
                invoke_kwds={}).driver
            LOG.debug(dir(self.msg_processor))
        else:
            self.msg_processor = None
Esempio n. 6
0
 def __init__(self, t_name, tp):
     threading.Thread.__init__(self, name=t_name)
     self._consume_kafka_conn = None
     topic = cfg.CONF.thresholding_engine.definition_topic
     self._consume_kafka_conn = (kafka_conn.KafkaConnection(topic))
     self.thresholding_processors = tp
Esempio n. 7
0
    def __init__(self, global_conf):
        LOG.debug('initializing V2API!')
        super(MetricDispatcher, self).__init__()
        self.topic = cfg.CONF.metrics.topic
        self.doc_type = cfg.CONF.metrics.doc_type
        self.size = cfg.CONF.metrics.size
        self._kafka_conn = kafka_conn.KafkaConnection(self.topic)

        # load index strategy
        if cfg.CONF.metrics.index_strategy:
            self.index_strategy = driver.DriverManager(
                namespace.STRATEGY_NS,
                cfg.CONF.metrics.index_strategy,
                invoke_on_load=True,
                invoke_kwds={}).driver
            LOG.debug(dir(self.index_strategy))
        else:
            self.index_strategy = None

        self.index_prefix = cfg.CONF.metrics.index_prefix

        self._es_conn = es_conn.ESConnection(
            self.doc_type, self.index_strategy, self.index_prefix)

        # Setup the get metrics query body pattern
        self._query_body = {
            "query": {"bool": {"must": []}},
            "size": self.size}

        self._aggs_body = {}
        self._stats_body = {}
        self._sort_clause = []

        # Setup the get metrics query url, the url should be similar to this:
        # http://host:port/data_20141201/metrics/_search
        # the url should be made of es_conn uri, the index prefix, metrics
        # dispatcher topic, then add the key word _search.
        self._query_url = ''.join([self._es_conn.uri,
                                  self._es_conn.index_prefix, '*/',
                                  cfg.CONF.metrics.topic,
                                  '/_search?search_type=count'])

        # the url to get all the properties of metrics
        self._query_mapping_url = ''.join([self._es_conn.uri,
                                           self._es_conn.index_prefix,
                                           '*/_mappings/',
                                           cfg.CONF.metrics.topic])

        # Setup metrics query aggregation command. To see the structure of
        # the aggregation, copy and paste it to a json formatter.
        self._metrics_agg = """
        {"by_name":{"terms":{"field":"name","size":%(size)d},
        "aggs":{"by_dim":{"terms":{"field":"dimensions_hash","size":%(size)d},
        "aggs":{"metrics":{"top_hits":{"_source":{"exclude":
        ["dimensions_hash","timestamp","value"]},"size":1}}}}}}}
        """

        self._measure_agg = """
        {"by_name":{"terms":{"field":"name","size":%(size)d},
        "aggs":{"by_dim":{"terms":{"field":"dimensions_hash",
        "size": %(size)d},"aggs":{"dimension":{"top_hits":{
        "_source":{"exclude":["dimensions_hash","timestamp",
        "value"]},"size":1}},"measures": {"top_hits":{
        "_source": {"include": ["timestamp", "value"]},
        "sort": [{"timestamp": "asc"}],"size": %(size)d}}}}}}}
        """

        self._stats_agg = """
Esempio n. 8
0
    def __init__(self, global_conf):
        LOG.debug('initializing KafkaDispatcher!')
        super(KafkaDispatcher, self).__init__(global_conf)

        self._kafka_conn = kafka_conn.KafkaConnection()