def build_streams_notification_message(env, events, action_type='FIRE'):
    message = None
    if len(events) > 0:
        stream_name = stream_helpers.stream_def_name(env['stream_name'])
        tenant_id = events[0]['_tenant_id']
        streams_repo = StreamsRepository()
        stream_definition_rows = (
            streams_repo.get_stream_definitions(
                tenant_id, stream_name))
        for row in stream_definition_rows:
            if action_type.upper() == 'FIRE':
                action_id = row['fire_actions']
            else:
                action_id = row['expire_actions']
            message = {'tenant_id': tenant_id,
                       'stream_def': {
                           'name': stream_name,
                           'id': row['id'],
                           'description': row['description'],
                           'actions_enabled': row['actions_enabled'],
                           'action_type': action_type,
                           'action_id': action_id},
                       'events': []}
            for e in events:
                message['events'].append(e)
    return message
 def __init__(self, conf):
     self.conf = conf
     self._streams_repo = StreamsRepository()
class EventProcessorBase(object):
    """EventProcessorBase

    The base class for the EventProcessor and PipelineProcessor.
    """

    dimensions = {
        'service': 'monitoring', 'component': 'monasca-events-engine'}

    def __init__(self, conf):
        self.conf = conf
        self._streams_repo = StreamsRepository()

    def stream_defs_from_database(self):
        slist = list()
        try:
            stream_definition_rows = \
                self._streams_repo.get_all_stream_definitions()
            for row in stream_definition_rows:
                row['fire_criteria'] = json.loads(row['fire_criteria'])
                row['select_by'] = json.loads(row['select_by'])
                row['group_by'] = json.loads(row['group_by'])
                w_stream = stream_helpers.stream_def_to_winchester_format(
                    row)
                slist.append(w_stream)
        except exceptions.RepositoryException as e:
            log.error(e)

        return slist

    def stream_definition_consumer(self, conf, lock, group, manager):
        '''Stream Definition Consumer

        Stream definition consumer runs as a thread for the event processor and
        pipeline processor processes.

        :param CONF.cfg conf: the conf object. (clarity, CONF.cfg is global)
        :param thread.Lock() lock: Used to lock manager access.
        :param string group: The Kafka group for stream definitions.
        :param object manager: trigger_manager, or pipeline_manager object.
        '''
        kafka_url = conf.kafka.url
        group = conf.kafka.stream_def_group
        topic = conf.kafka.stream_def_topic
        kafka = KafkaClient(kafka_url)
        consumer = SimpleConsumer(
            kafka,
            group,
            topic,
            auto_commit=True)

        consumer.seek(0, 2)

        statsd = monascastatsd.Client(name='monasca',
                                      dimensions=self.dimensions)
        stream_definitions_created = \
            statsd.get_counter('stream_definitions_created')
        stream_definitions_deleted = \
            statsd.get_counter('stream_definitions_deleted')

        for s in consumer:
            offset, message = s
            stream_def = json.loads(message.value)

            if 'stream-definition-created' in stream_def:
                log.debug('Received a stream definition created event')
                stream_create = stream_helpers.stream_def_to_winchester_format(
                    stream_def['stream-definition-created'])
                slist = list()
                slist.append(stream_create)
                lock.acquire()
                try:
                    manager.add_trigger_definition(slist)
                    stream_definitions_created.increment()
                except Exception as e:
                    log.exception(e)
                finally:
                    lock.release()
            elif 'stream-definition-deleted' in stream_def:
                log.debug('Received a stream-definition-deleted event')
                name = stream_helpers.stream_unique_name(
                    stream_def['stream-definition-deleted'])
                lock.acquire()
                try:
                    manager.delete_trigger_definition(name)
                    stream_definitions_deleted.increment()
                except Exception as e:
                    log.exception(e)
                finally:
                    lock.release()
            else:
                log.error('Unknown event received on stream_def_topic')