def _generate_models(self): event_models = [] self.s_time = datetime.datetime(2013, 12, 31, 5, 0) event_models.append( models.Event(message_id='1', event_type='empty_ev', generated=self.s_time, traits=[ models.Trait('random', models.Trait.TEXT_TYPE, 'blah') ], raw={})) event_models.append( models.Event(message_id='2', event_type='admin_ev', generated=self.s_time, traits=[ models.Trait('project_id', models.Trait.TEXT_TYPE, self.admin_proj_id), models.Trait('user_id', models.Trait.TEXT_TYPE, self.admin_user_id) ], raw={})) event_models.append( models.Event(message_id='3', event_type='user_ev', generated=self.s_time, traits=[ models.Trait('project_id', models.Trait.TEXT_TYPE, self.proj_id), models.Trait('user_id', models.Trait.TEXT_TYPE, self.user_id) ], raw={})) self.event_conn.record_events(event_models)
def test_bad_event(self): now = datetime.datetime.utcnow() m = [ models.Event("1", "Foo", now, []), models.Event("2", "Zoo", now, []) ] with mock.patch.object(self.event_conn, "_record_event") as mock_save: mock_save.side_effect = MyException("Boom") problem_events = self.event_conn.record_events(m) self.assertEqual(2, len(problem_events)) for bad, event in problem_events: self.assertEqual(bad, models.Event.UNKNOWN_PROBLEM)
def record_events(self, events): if not isinstance(events, list): events = [events] event_list = [] for ev in events: if publisher_utils.verify_signature( ev, self.conf.publisher.telemetry_secret): try: event_list.append( models.Event( message_id=ev['message_id'], event_type=ev['event_type'], generated=timeutils.normalize_time( timeutils.parse_isotime(ev['generated'])), traits=[models.Trait( name, dtype, models.Trait.convert_value(dtype, value)) for name, dtype, value in ev['traits']], raw=ev.get('raw', {})) ) except Exception: LOG.exception(_LE("Error processing event and it will be " "dropped: %s"), ev) else: LOG.warning(_LW( 'event signature invalid, discarding event: %s'), ev) self.event_conn.record_events(event_list)
def _generate_models(self): event_models = [] base = 0 self.s_time = datetime.datetime(2013, 12, 31, 5, 0) self.trait_time = datetime.datetime(2013, 12, 31, 5, 0) for i in range(20): trait_models = [ models.Trait(name, type, value) for name, type, value in [( 'trait_A', models.Trait.TEXT_TYPE, "my_text"), ('trait_B', models.Trait.INT_TYPE, base + 1), ('trait_C', models.Trait.FLOAT_TYPE, float(base) + 0.123456), ('trait_D', models.Trait.DATETIME_TYPE, self.trait_time)] ] event_models.append( models.Event(message_id=str(uuid.uuid4()), event_type='foo.bar', generated=self.trait_time, traits=trait_models, raw={'status': { 'nested': 'started' }})) self.trait_time += datetime.timedelta(seconds=1) self.event_conn.record_events(event_models)
def get_events(self, event_filter): """Return an iter of models.Event objects. :param event_filter: storage.EventFilter object, consists of filters for events that are stored in database. """ q, start, stop = hbase_utils.make_events_query_from_filter( event_filter) with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) gen = events_table.scan(filter=q, row_start=start, row_stop=stop) for event_id, data in gen: traits = [] events_dict = hbase_utils.deserialize_entry(data)[0] for key, value in events_dict.items(): if (not key.startswith('event_type') and not key.startswith('timestamp')): trait_name, trait_dtype = key.rsplit('+', 1) traits.append( ev_models.Trait(name=trait_name, dtype=int(trait_dtype), value=value)) ts, mess = event_id.split('_', 1) yield ev_models.Event( message_id=mess, event_type=events_dict['event_type'], generated=events_dict['timestamp'], traits=sorted(traits, key=operator.attrgetter('dtype')))
def sample(self, messages): events = chain.from_iterable(m["payload"] for m in messages) events = [ models.Event(message_id=ev['message_id'], event_type=ev['event_type'], generated=timeutils.normalize_time( timeutils.parse_isotime(ev['generated'])), traits=[ models.Trait( name, dtype, models.Trait.convert_value(dtype, value)) for name, dtype, value in ev['traits'] ], raw=ev.get('raw', {})) for ev in events if publisher_utils.verify_signature( ev, self.conf.publisher.telemetry_secret) ] try: with self.publish_context as p: p(events) except Exception: if not self.conf.notification.ack_on_event_error: return oslo_messaging.NotificationResult.REQUEUE raise return oslo_messaging.NotificationResult.HANDLED
def get_events(self, event_filter, limit=None): """Return an iter of models.Event objects. :param event_filter: storage.EventFilter object, consists of filters for events that are stored in database. :param limit: Maximum number of results to return. """ if limit == 0: return q = pymongo_utils.make_events_query_from_filter(event_filter) if limit is not None: results = self.db.event.find(q, limit=limit) else: results = self.db.event.find(q) for event in results: traits = [] for trait in event['traits']: traits.append( models.Trait(name=trait['trait_name'], dtype=int(trait['trait_type']), value=trait['trait_value'])) yield models.Event(message_id=event['_id'], event_type=event['event_type'], generated=event['timestamp'], traits=traits, raw=event.get('raw'))
def make_test_data(conn, start, end, interval, event_types): # Compute start and end timestamps for the new data. if isinstance(start, datetime.datetime): timestamp = start else: timestamp = timeutils.parse_strtime(start) if not isinstance(end, datetime.datetime): end = timeutils.parse_strtime(end) increment = datetime.timedelta(minutes=interval) print('Adding new events') n = 0 while timestamp <= end: data = [] for i in range(event_types): traits = [ models.Trait('id1_%d' % i, 1, str(uuid.uuid4())), models.Trait('id2_%d' % i, 2, random.randint(1, 10)), models.Trait('id3_%d' % i, 3, random.random()), models.Trait('id4_%d' % i, 4, timestamp) ] data.append( models.Event(str(uuid.uuid4()), 'event_type%d' % i, timestamp, traits)) n += 1 conn.record_events(data) timestamp = timestamp + increment print('Added %d new events' % n)
def get_events(self, event_filter, limit=None): """Return an iter of models.Event objects. :param event_filter: storage.EventFilter object, consists of filters for events that are stored in database. """ if limit == 0: return q, start, stop = hbase_utils.make_events_query_from_filter( event_filter) with self.conn_pool.connection() as conn: events_table = conn.table(self.EVENT_TABLE) gen = events_table.scan(filter=q, row_start=start, row_stop=stop, limit=limit) for event_id, data in gen: traits = [] events_dict = hbase_utils.deserialize_entry(data)[0] for key, value in events_dict.items(): if isinstance(key, tuple): trait_name, trait_dtype = key traits.append(models.Trait(name=trait_name, dtype=int(trait_dtype), value=value)) ts, mess = event_id.split(':') yield models.Event( message_id=hbase_utils.unquote(mess), event_type=events_dict['event_type'], generated=events_dict['timestamp'], traits=sorted(traits, key=operator.attrgetter('dtype')), raw=events_dict['raw'] )
def _generate_models(self): event_models = [] base = 0 self.trait_time = datetime.datetime(2013, 12, 31, 5, 0) for event_type in ['Foo', 'Bar', 'Zoo']: trait_models = [ models.Trait(name, type, value) for name, type, value in [( 'trait_A', models.Trait.TEXT_TYPE, "my_%s_text" % event_type), ('trait_B', models.Trait.INT_TYPE, base + 1), ('trait_C', models.Trait.FLOAT_TYPE, float(base) + 0.123456), ('trait_D', models.Trait.DATETIME_TYPE, self.trait_time)] ] # Message ID for test will be 'base'. So, message ID for the first # event will be '0', the second '100', and so on. # trait_time in first event will be equal to self.trait_time # (datetime.datetime(2013, 12, 31, 5, 0)), next will add 1 day, so # second will be (datetime.datetime(2014, 01, 01, 5, 0)) and so on. event_models.append( models.Event(message_id=str(base), event_type=event_type, generated=self.trait_time, traits=trait_models)) base += 100 self.trait_time += datetime.timedelta(days=1) self.event_conn.record_events(event_models)
def record_events(self, events): if not isinstance(events, list): events = [events] event_list = [] for ev in events: try: event_list.append( models.Event(message_id=ev['message_id'], event_type=ev['event_type'], generated=timeutils.normalize_time( timeutils.parse_isotime(ev['generated'])), traits=[ models.Trait( name, dtype, models.Trait.convert_value( dtype, value)) for name, dtype, value in ev['traits'] ], raw=ev.get('raw', {}))) except Exception: LOG.exception( _LE("Error processing event and it will be " "dropped: %s"), ev) self.event_conn.record_events(event_list)
def get_events(self, event_filter): iclient = es.client.IndicesClient(self.conn) indices = iclient.get_mapping('%s_*' % self.index_name).keys() if indices: filter_args = self._make_dsl_from_filter(indices, event_filter) results = self.conn.search( fields=['_id', 'timestamp', '_type', '_source'], sort='timestamp:asc', **filter_args) trait_mappings = {} for record in results['hits']['hits']: trait_list = [] if not record['_type'] in trait_mappings: trait_mappings[record['_type']] = list( self.get_trait_types(record['_type'])) for key in record['_source']['traits'].keys(): value = record['_source']['traits'][key] for t_map in trait_mappings[record['_type']]: if t_map['name'] == key: dtype = t_map['data_type'] break trait_list.append( models.Trait(name=key, dtype=dtype, value=models.Trait.convert_value( dtype, value))) gen_ts = timeutils.normalize_time( timeutils.parse_isotime(record['_source']['timestamp'])) yield models.Event(message_id=record['_id'], event_type=record['_type'], generated=gen_ts, traits=sorted( trait_list, key=operator.attrgetter('dtype')))
def test_event_conn(self): event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), []) with mock.patch.object(self.dispatcher.event_conn, 'record_events') as record_events: self.dispatcher.record_events(event) self.assertTrue(record_events.called)
def test_event_conn(self): event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}) event = utils.message_from_event(event, self.CONF.publisher.telemetry_secret) with mock.patch.object(self.dispatcher.event_conn, 'record_events') as record_events: self.dispatcher.record_events(event) self.assertEqual(1, len(record_events.call_args_list[0][0][0]))
def setUp(self): super(EventPipelineTestCase, self).setUp() self.CONF = self.useFixture(fixture_config.Config()).conf self.CONF([]) self.p_type = pipeline.EVENT_TYPE self.transformer_manager = None self.test_event = models.Event( message_id=uuid.uuid4(), event_type='a', generated=datetime.datetime.utcnow(), traits=[ models.Trait('t_text', 1, 'text_trait'), models.Trait('t_int', 2, 'int_trait'), models.Trait('t_float', 3, 'float_trait'), models.Trait('t_datetime', 4, 'datetime_trait') ], raw={'status': 'started'}) self.test_event2 = models.Event( message_id=uuid.uuid4(), event_type='b', generated=datetime.datetime.utcnow(), traits=[ models.Trait('t_text', 1, 'text_trait'), models.Trait('t_int', 2, 'int_trait'), models.Trait('t_float', 3, 'float_trait'), models.Trait('t_datetime', 4, 'datetime_trait') ], raw={'status': 'stopped'}) self.useFixture( mockpatch.PatchObject(publisher, 'get_publisher', side_effect=self.get_publisher)) self._setup_pipeline_cfg() self._reraise_exception = True self.useFixture( mockpatch.Patch('ceilometer.pipeline.LOG.exception', side_effect=self._handle_reraise_exception))
def setUp(self): super(TestBaseApiEventRBAC, self).setUp() traits = [ ev_model.Trait('project_id', 1, 'project-good'), ev_model.Trait('user_id', 1, 'user-good') ] self.message_id = str(uuid.uuid4()) ev = ev_model.Event(self.message_id, 'event_type', datetime.datetime.now(), traits, {}) self.event_conn.record_events([ev])
def to_event(self, notification_body): event_type = notification_body['event_type'] message_id = notification_body['message_id'] when = self._extract_when(notification_body) traits = (self.traits[t].to_trait(notification_body) for t in self.traits) # Only accept non-None value traits ... traits = [trait for trait in traits if trait is not None] event = models.Event(message_id, event_type, when, traits) return event
def test_http_dispatcher_bad(self): self.CONF.dispatcher_http.event_target = '' dispatcher = http.HttpDispatcher(self.CONF) event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}).serialize() with mock.patch('ceilometer.dispatcher.http.LOG', mock.MagicMock()) as LOG: dispatcher.record_events(event) self.assertTrue(LOG.exception.called)
def setUp(self): super(TestEventDispatcherHttp, self).setUp() self.CONF = service.prepare_service([], []) # repr(uuid.uuid4()) is used in test event creation to avoid an # exception being thrown when the uuid is serialized to JSON event = event_models.Event(repr(uuid.uuid4()), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}) event = utils.message_from_event(event, self.CONF.publisher.telemetry_secret) self.event = event
def test_http_dispatcher_share_target(self): self.CONF.dispatcher_http.target = 'fake' dispatcher = http.HttpDispatcher(self.CONF) event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}).serialize() with mock.patch.object(requests, 'post') as post: dispatcher.record_events(event) self.assertEqual('fake', post.call_args[0][0])
def test_http_dispatcher_bad(self): self.CONF.dispatcher_http.event_target = '' dispatcher = http.HttpDispatcher(self.CONF) event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}) event = utils.message_from_event(event, self.CONF.publisher.telemetry_secret) with mock.patch('ceilometer.dispatcher.http.LOG', mock.MagicMock()) as LOG: dispatcher.record_events(event) self.assertTrue(LOG.exception.called)
def to_event(self, notification_body): event_type = notification_body['event_type'] message_id = notification_body['message_id'] when = self._extract_when(notification_body) traits = (self.traits[t].to_trait(notification_body) for t in self.traits) # Only accept non-None value traits ... traits = [trait for trait in traits if trait is not None] raw = (notification_body if notification_body.get('priority') in self.raw_levels else {}) event = models.Event(message_id, event_type, when, traits, raw) return event
def test_http_dispatcher_share_target(self): self.CONF.dispatcher_http.target = 'fake' dispatcher = http.HttpDispatcher(self.CONF) event = event_models.Event(uuid.uuid4(), 'test', datetime.datetime(2012, 7, 2, 13, 53, 40), [], {}) event = utils.message_from_event(event, self.CONF.publisher.telemetry_secret) with mock.patch.object(requests, 'post') as post: dispatcher.record_events(event) self.assertEqual('fake', post.call_args[0][0])
def _verify_data(self, trait, trait_table): now = datetime.datetime.utcnow() ev = models.Event('1', 'name', now, [trait], {}) self.event_conn.record_events([ev]) session = self.event_conn._engine_facade.get_session() t_tables = [ sql_models.TraitText, sql_models.TraitFloat, sql_models.TraitInt, sql_models.TraitDatetime ] for table in t_tables: if table == trait_table: self.assertEqual(1, session.query(table).count()) else: self.assertEqual(0, session.query(table).count())
def to_event(self, priority, notification_body): event_type = notification_body['event_type'] message_id = notification_body['metadata']['message_id'] when = timeutils.normalize_time( timeutils.parse_isotime( notification_body['metadata']['timestamp'])) traits = (self.traits[t].to_trait(notification_body) for t in self.traits) # Only accept non-None value traits ... traits = [trait for trait in traits if trait is not None] raw = notification_body if priority in self.raw_levels else {} event = models.Event(message_id, event_type, when, traits, raw) return event
def sample(self, ctxt, publisher_id, event_type, payload, metadata): events = [ models.Event(message_id=ev['message_id'], event_type=ev['event_type'], generated=timeutils.normalize_time( timeutils.parse_isotime(ev['generated'])), traits=[ models.Trait( name, dtype, models.Trait.convert_value(dtype, value)) for name, dtype, value in ev['traits'] ]) for ev in payload ] with self.publish_context as p: p(events)
class TestEventDirectPublisher(tests_db.TestBase): test_data = [event.Event(message_id=str(uuid.uuid4()), event_type='event_%d' % i, generated=datetime.datetime.utcnow(), traits=[], raw={}) for i in range(0, 5)] def test_direct_publisher(self): parsed_url = netutils.urlsplit('direct://') publisher = direct.DirectPublisher(parsed_url) publisher.publish_events(None, self.test_data) e_types = list(self.event_conn.get_event_types()) self.assertEqual(5, len(e_types)) self.assertEqual(['event_%d' % i for i in range(0, 5)], sorted(e_types))
def sample(self, ctxt, publisher_id, event_type, payload, metadata): events = [ models.Event( message_id=ev['message_id'], event_type=ev['event_type'], generated=timeutils.normalize_time( timeutils.parse_isotime(ev['generated'])), traits=[models.Trait(name, dtype, models.Trait.convert_value(dtype, value)) for name, dtype, value in ev['traits']], raw=ev.get('raw', {})) for ev in payload if publisher_utils.verify_signature( ev, cfg.CONF.publisher.telemetry_secret) ] with self.publish_context as p: p(events)
def get_events(self, event_filter): """Return an iter of models.Event objects. :param event_filter: storage.EventFilter object, consists of filters for events that are stored in database. """ q = pymongo_utils.make_events_query_from_filter(event_filter) for event in self.db.event.find(q): traits = [] for trait in event['traits']: traits.append(models.Trait(name=trait['trait_name'], dtype=int(trait['trait_type']), value=trait['trait_value'])) yield models.Event(message_id=event['_id'], event_type=event['event_type'], generated=event['timestamp'], traits=traits, raw=event.get('raw'))
def start_fixture(self): """Create some events.""" conf = fixture_config.Config().conf self.conn = storage.get_connection_from_config(conf, 'event') events = [] name_list = ['chocolate.chip', 'peanut.butter', 'sugar'] for ix, name in enumerate(name_list): timestamp = datetime.datetime.utcnow() message_id = 'fea1b15a-1d47-4175-85a5-a4bb2c72924{}'.format(ix) traits = [models.Trait('type', 1, name), models.Trait('ate', 2, ix)] event = models.Event(message_id, 'cookies_{}'.format(name), timestamp, traits, {'nested': {'inside': 'value'}}) events.append(event) self.conn.record_events(events)