def __init__(self, kafka_url, zookeeper_url, zookeeper_path, group, topic):
        """Init
             kafka_url, group, topic - kafka connection details
        """

        self._kafka_topic = topic

        self._zookeeper_url = zookeeper_url
        self._zookeeper_path = zookeeper_path

        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        self._kafka = kafka.client.KafkaClient(kafka_url)

        # No auto-commit so that commits only happen after the alarm is processed.
        self._consumer = kafka.consumer.SimpleConsumer(self._kafka,
                                                       group,
                                                       self._kafka_topic,
                                                       auto_commit=False,
                                                       iter_timeout=5)

        self._consumer.provide_partition_info(
        )  # Without this the partition is not provided in the response
        self._consumer.fetch_last_known_offsets()
 def __init__(self, config):
     self.statsd = monascastatsd.Client(name='monasca',
                                        dimensions=BaseProcessor.dimensions)
     notifiers.init(self.statsd)
     notifiers.load_plugins(config['notification_types'])
     notifiers.config(config['notification_types'])
     self._db_repo = get_db_repo(config)
     self.insert_configured_plugins()
Exemplo n.º 3
0
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(name='monasca',
                                         dimensions=BaseProcessor.dimensions)
     if 'database' in config and 'repo_driver' in config['database']:
         self._db_repo = simport.load(config['database']['repo_driver'])(config)
     else:
         self._db_repo = simport.load('monasca_notification.common.repositories.mysql.mysql_repo:MysqlRepo')(config)
Exemplo n.º 4
0
def get_statsd_client(dimensions=None):
    local_dims = dimensions.copy() if dimensions else {}
    local_dims.update(NOTIFICATION_DIMENSIONS)
    client = monascastatsd.Client(name='monasca',
                                  host=CONF.statsd.host,
                                  port=CONF.statsd.port,
                                  dimensions=local_dims)

    return client
Exemplo n.º 5
0
    def test_context_manager(self):
        fake_socket = FakeSocket()
        with mstatsd.Connection() as conn:
            conn.socket = fake_socket
            client = mstatsd.Client(name='ContextTester', connection=conn)
            client.get_gauge('page').send('views', 123)
            client.get_timer('page').timing('timer', 12)

        self.assertEqual(
            'ContextTester.page.views:123|g\nContextTester.page.timer:12|ms',
            fake_socket.recv())
Exemplo n.º 6
0
    def test_batched_buffer_autoflush(self):
        fake_socket = FakeSocket()
        with mstatsd.Connection() as conn:
            conn.socket = fake_socket
            client = mstatsd.Client(name='BufferedTester', connection=conn)
            counter = client.get_counter('mycounter')
            for _ in range(51):
                counter.increment()
            self.assertEqual(
                '\n'.join(['BufferedTester.mycounter:1|c' for _ in range(50)]),
                fake_socket.recv())

        self.assertEqual('BufferedTester.mycounter:1|c', fake_socket.recv())
Exemplo n.º 7
0
    def __init__(self, url):
        """Init
             url, group - kafka connection details
        """
        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        self._kafka = kafka.client.KafkaClient(url)
        self._producer = kafka.producer.KeyedProducer(
            self._kafka,
            async=False,
            req_acks=kafka.producer.KeyedProducer.ACK_AFTER_LOCAL_WRITE,
            ack_timeout=2000)
Exemplo n.º 8
0
def get_client(dimensions=None):
    # type: (object) -> object
    """Creates statsd client

    Creates monasca-statsd client using configuration from
    config file and supplied dimensions.

    Configuration is composed out of ::

        [monitoring]
        statsd_host = 192.168.10.4
        statsd_port = 8125
        statsd_buffer = 50

    Dimensions are appended to following dictionary ::

        {
            'service': 'monitoring',
            'component': 'monasca-api'
        }

    Note:
        Passed dimensions do not override those specified in
        dictionary above

    :param dict dimensions: Optional dimensions
    :return: statsd client
    :rtype: monascastatsd.Client
    """
    dims = _DEFAULT_DIMENSIONS.copy()
    if dimensions:
        for key, val in dimensions.items():
            if key not in _DEFAULT_DIMENSIONS:
                dims[key] = val
            else:
                LOG.warn('Cannot override fixed dimension %s=%s', key,
                         _DEFAULT_DIMENSIONS[key])

    connection = monascastatsd.Connection(
        host=CONF.monitoring.statsd_host,
        port=CONF.monitoring.statsd_port,
        max_buffer_size=CONF.monitoring.statsd_buffer
    )
    client = monascastatsd.Client(name=_CLIENT_NAME,
                                  connection=connection,
                                  dimensions=dims)

    LOG.debug('Created statsd client %s[%s] = %s:%d', _CLIENT_NAME, dims,
              CONF.monitoring.statsd_host, CONF.monitoring.statsd_port)

    return client
Exemplo n.º 9
0
 def __init__(
         self, alarm_ttl, mysql_host, mysql_user,
         mysql_passwd, dbname, mysql_ssl=None):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(name='monasca',
                                         dimensions=BaseProcessor.dimensions)
     try:
         self._mysql = MySQLdb.connect(host=mysql_host, user=mysql_user,
                                       passwd=unicode(mysql_passwd).encode('utf-8'),
                                       db=dbname, ssl=mysql_ssl)
         self._mysql.autocommit(True)
     except:
         log.exception('MySQL connect failed')
         raise
 def __init__(self, config):
     self._topics = {}
     self._topics['notification_topic'] = config['kafka'][
         'notification_topic']
     self._topics['retry_topic'] = config['kafka'][
         'notification_retry_topic']
     self._statsd = monascastatsd.Client(
         name='monasca', dimensions=BaseProcessor.dimensions)
     self._consumer = KafkaConsumer(
         config['kafka']['url'], config['zookeeper']['url'],
         config['zookeeper']['notification_path'], config['kafka']['group'],
         config['kafka']['alarm_topic'])
     self._producer = KafkaProducer(config['kafka']['url'])
     self._alarm_ttl = config['processors']['alarm']['ttl']
     self._alarms = AlarmProcessor(self._alarm_ttl, config)
     self._notifier = NotificationProcessor(config['notification_types'])
Exemplo n.º 11
0
    def send_messages(self):
        '''Main processing for sending messages.'''
        try:
            conn = mstatsd.Connection(host=self.host, port=self.port)
            self.client = mstatsd.Client(name='statsd-generator', connection=conn)
            for index in range(1, self.num_of_iterations + 1):
                print("Starting iteration " + str(index) +
                      " of " + str(self.num_of_iterations))
                counter = self.client.get_counter('teraflops')
                counter.increment(5)
                gauge = self.client.get_gauge()
                gauge.send('num_of_teraflops',
                           random.uniform(1.0, 10.0),
                           dimensions={'origin': 'dev',
                                       'environment': 'test'})
                histogram = self.client.get_histogram('hist')
                histogram.send('file.upload.size',
                               random.randrange(1, 100),
                               dimensions={'version': '1.0'})
                set = self.client.get_set('hist')
                set.send('load_time',
                         random.randrange(1, 100),
                         dimensions={'page_name': 'mypage.html'})

                timer = self.client.get_timer('timer')

                @timer.timed('config_db_time',
                             dimensions={'db_name': 'mydb'})
                def time_db():
                    time.sleep(0.2)
                time_db()

                with timer.time('time_block'):
                    time.sleep(0.3)

                # Send some regular statsd messages
                counter = statsd.Counter('statsd_counter')
                counter += 1
                gauge = statsd.Gauge('statsd_gauge')
                gauge.send('cpu_percent',
                           random.uniform(1.0, 100.0))
                print("Completed iteration " + str(index) +
                      ".  Sleeping for " + str(self.delay) + " seconds...")
                time.sleep(self.delay)
        except Exception:
            print ("Error sending statsd messages...")
            raise
    def __init__(self, config, interval):
        self._topic_name = config['kafka']['periodic'][interval]

        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        zookeeper_path = config['zookeeper']['periodic_path'][interval]
        self._consumer = KafkaConsumer(config['kafka']['url'],
                                       config['zookeeper']['url'],
                                       zookeeper_path,
                                       config['kafka']['group'],
                                       self._topic_name)

        self._producer = KafkaProducer(config['kafka']['url'])

        self._notifier = NotificationProcessor(config['notification_types'])
        self._db_repo = get_db_repo(config)
Exemplo n.º 13
0
def get_statsd_client(dimensions=None):
    local_dims = dimensions.copy() if dimensions else {}
    local_dims.update(NOTIFICATION_DIMENSIONS)
    if CONF.statsd.enable:
        LOG.debug("Stablishing connection with statsd on {0}:{1}".format(
            CONF.statsd.host, CONF.statsd.port))
        client = monascastatsd.Client(name='monasca',
                                      host=CONF.statsd.host,
                                      port=CONF.statsd.port,
                                      dimensions=local_dims)
    else:
        LOG.debug("Overriding monascastatsd.Client to use it offline")
        client = OfflineClient(name='monasca',
                               host=CONF.statsd.host,
                               port=CONF.statsd.port,
                               dimensions=local_dims)
    return client
Exemplo n.º 14
0
    def __init__(self, config):
        self._retry_interval = config['retry']['interval']
        self._retry_max = config['retry']['max_attempts']

        self._topics = {}
        self._topics['notification_topic'] = config['kafka'][
            'notification_topic']
        self._topics['retry_topic'] = config['kafka'][
            'notification_retry_topic']

        self._statsd = monascastatsd.Client(
            name='monasca', dimensions=BaseProcessor.dimensions)

        self._consumer = KafkaConsumer(
            config['kafka']['url'], config['zookeeper']['url'],
            config['zookeeper']['notification_retry_path'],
            config['kafka']['group'],
            config['kafka']['notification_retry_topic'])

        self._producer = KafkaProducer(config['kafka']['url'])

        self._notifier = NotificationProcessor(config['notification_types'])
Exemplo n.º 15
0
 def test_client_default_host_port(self, connection_mock):
     mstatsd.Client()
     connection_mock.assert_called_once_with(host='localhost',
                                             port=8125,
                                             max_buffer_size=50)
Exemplo n.º 16
0
 def test_client_set_host_port(self, connection_mock):
     mstatsd.Client(host='foo.bar', port=5213)
     connection_mock.assert_called_once_with(host='foo.bar',
                                             port=5213,
                                             max_buffer_size=50)
Exemplo n.º 17
0
 def setUp(self):
     conn = mstatsd.Connection()
     conn.socket = FakeSocket()
     self.client = mstatsd.Client(connection=conn,
                                  dimensions={'env': 'test'})
Exemplo n.º 18
0
 def __init__(self, config):
     self.statsd = monascastatsd.Client(name='monasca',
                                        dimensions=BaseProcessor.dimensions)
     notifiers.init(self.statsd)
     notifiers.config(config)
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(
         name='monasca', dimensions=BaseProcessor.dimensions)
     self._db_repo = get_db_repo(config)