def create_event_producer(tenant, name, pattern, durable, encrypted, sinks): """ Creates an Event Producer object, assigns it to a tenant, and updates the tenant in the datastore. """ new_event_producer = EventProducer( _db_handler.next_sequence_value(tenant.tenant_id), name, pattern, durable, encrypted, sinks) #add the event_producer to the tenant tenant.event_producers.append(new_event_producer) #save the tenant's data save_tenant(tenant) #create a new mapping for the producer in the default # sink to enable time_to_live ttl_tasks.create_ttl_mapping.delay( tenant_id=tenant.tenant_id, producer_pattern=new_event_producer.pattern) #return the id of the newly created producer return new_event_producer.get_id()
def before(self): self.db_handler = MagicMock() self.req = MagicMock() self.req.content_type = 'application/json' self.resp = MagicMock() self.producer_id = 432 self.producer_name = 'producer1' self.producer_id_2 = 432 self.producer_name_2 = 'producer2' self.not_valid_producer_id = 777 self.producers = [ EventProducer(self.producer_id, self.producer_name, 'syslog'), EventProducer(self.producer_id_2, self.producer_name_2, 'syslog') ] self.token_original = 'ffe7104e-8d93-47dc-a49a-8fb0d39e5192' self.token_previous = 'bbd6302e-8d93-47dc-a49a-8fb0d39e5192' self.token_invalid = 'xxxyyy33-8d93-47dc-a49a-8fb0d39e5192' self.timestamp_original = "2013-03-19T18:16:48.411029Z" self.token = Token(self.token_original, self.token_previous, self.timestamp_original) self.tenant_id = '1234' self.tenant_name = 'TenantName' self.tenant = Tenant(self.tenant_id, self.token, event_producers=self.producers) self.tenant_not_found = MagicMock(return_value=None) self.tenant_found = MagicMock(return_value=self.tenant) self._set_resource()
def setUp(self): self.timestamp = "2013-03-19T18:16:48.411029Z" self.producers = [ EventProducer(432, 'producer1', 'syslog', durable=True), EventProducer(433, 'producer2', 'syslog', durable=False) ] self.token = Token('ffe7104e-8d93-47dc-a49a-8fb0d39e5192', 'bbd6302e-8d93-47dc-a49a-8fb0d39e5192', "2013-03-19T18:16:48.411029Z") self.tenant_id = '1234' self.tenant = Tenant(self.tenant_id, self.token, event_producers=self.producers) self.tenant_found = MagicMock(return_value=self.tenant) self.cache = MagicMock() self.valid_message_token = 'ffe7104e-8d93-47dc-a49a-8fb0d39e5192' self.invalid_message_token = 'yyy7104e-8d93-47dc-a49a-8fb0d39e5192' self.get_token = MagicMock(return_value=self.token) self.get_tenant = MagicMock(return_value=self.tenant) self.get_none = MagicMock(return_value=None) self.config = WorkerConfiguration( personality='correlation', personality_module='meniscus.personas.worker.correlation.app', worker_id='fgc7104e-8d93-47dc-a49a-8fb0d39e5192', worker_token='bbd6307f-8d93-47dc-a49a-8fb0d39e5192', coordinator_uri='http://192.168.1.2/v1') self.get_config = MagicMock(return_value=self.config)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): #Todo: (stevendgonzales) Examine whether or not to remove #Todo: persist_message as a linked subtask(callback) of the #Todo: normalization task instead Queue the task based on routing #Todo: determined at the end of the normalization process. # send the message to normalization then to the data dispatch normalizer.normalize_message.apply_async( (message,), link=dispatch.persist_message.subtask()) else: # Queue the message for indexing/storage dispatch.persist_message(message)
def on_post(self, req, resp, tenant_id, validated_body): body = validated_body['event_producer'] tenant = find_tenant(self.db, tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = find_event_producer(tenant, producer_name=event_producer_name) if producer: abort(falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.' .format(producer.name, producer.get_id())) # Create the new profile for the host new_event_producer = EventProducer( self.db.next_sequence_value(tenant.tenant_id), event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) tenant.event_producers.append(new_event_producer) self.db.update('tenant', tenant.format_for_save()) resp.status = falcon.HTTP_201 resp.set_header('Location', '/v1/{0}/producers/{1}' .format(tenant_id, new_event_producer.get_id()))
def on_post(self, req, resp, tenant_id, validated_body): body = validated_body['event_producer'] tenant = find_tenant(self.db, tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = find_event_producer(tenant, producer_name=event_producer_name) if producer: abort( falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.'. format(producer.name, producer.get_id())) # Create the new profile for the host new_event_producer = EventProducer( self.db.next_sequence_value(tenant.tenant_id), event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) tenant.event_producers.append(new_event_producer) self.db.update('tenant', tenant.format_for_save()) resp.status = falcon.HTTP_201 resp.set_header( 'Location', '/v1/{0}/producers/{1}'.format(tenant_id, new_event_producer.get_id()))
class WhenTestingEventProducerObject(unittest.TestCase): def setUp(self): with patch('meniscus.data.model.tenant.DEFAULT_SINK', 'elasticsearch'): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false') def test_event_producer_object_get_id(self): self.assertEqual(self.event_producer.get_id(), 'EVid') def test_event_producer_object_format(self): ep_dict = self.event_producer.format() self.assertEqual(ep_dict['id'], 'EVid') self.assertEqual(ep_dict['name'], 'mybillingsapp') self.assertEqual(ep_dict['pattern'], 'syslog') self.assertEqual(ep_dict['durable'], 'true') self.assertEqual(ep_dict['encrypted'], 'false') self.assertListEqual(ep_dict['sinks'], ['elasticsearch'])
def setUp(self): with patch('meniscus.data.model.tenant.DEFAULT_SINK', 'elasticsearch'): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false')
class WhenTestingEventProducerObject(unittest.TestCase): def setUp(self): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false') def test_event_producer_object_get_id(self): self.assertEqual(self.event_producer.get_id(), 'EVid') def test_event_producer_object_format(self): ep_dict = self.event_producer.format() self.assertEqual(ep_dict['id'], 'EVid') self.assertEqual(ep_dict['name'], 'mybillingsapp') self.assertEqual(ep_dict['pattern'], 'syslog') self.assertEqual(ep_dict['durable'], 'true') self.assertEqual(ep_dict['encrypted'], 'false')
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer( tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def setUp(self): self.producers = [ EventProducer(432, 'producer1', 'syslog', durable=True, sinks=VALID_SINKS), EventProducer(433, 'producer2', 'syslog', durable=False) ] self.token = Token('ffe7104e-8d93-47dc-a49a-8fb0d39e5192', 'bbd6302e-8d93-47dc-a49a-8fb0d39e5192', "2013-03-19T18:16:48.411029Z") self.tenant_id = '1234' self.tenant_name = 'TenantName' self.tenant = Tenant(self.tenant_id, self.token, event_producers=self.producers, tenant_name=self.tenant_name) self.destination = {'transaction_id': None, 'transaction_time': None}
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def create_event_producer(tenant, name, pattern, durable, encrypted, sinks): """ Creates an Event Producer object, assigns it to a tenant, and updates the tenant in the datastore. """ new_event_producer = EventProducer( _db_handler.next_sequence_value(tenant.tenant_id), name, pattern, durable, encrypted, sinks) #add the event_producer to the tenant tenant.event_producers.append(new_event_producer) #save the tenant's data save_tenant(tenant) #create a new mapping for the producer in the default # sink to enable time_to_live mapping_tasks.create_ttl_mapping.delay( tenant_id=tenant.tenant_id, producer_pattern=new_event_producer.pattern) #return the id of the newly created producer return new_event_producer.get_id()
def setUp(self): self.tenant_id = "12673247623548752387452378" self.tenant_dict = { "tenant_id": self.tenant_id, "tenant_name": "TenantName", "_id": "507f1f77bcf86cd799439011", "event_producers": [{ "id": 123, "name": "apache", "pattern": "apache2.cee", "durable": False, "encrypted": False, "sinks": ["elasticsearch"] }, { "id": 124, "name": "system.auth", "pattern": "auth_log.cee", "durable": False, "encrypted": False, "sinks": ["elasticsearch", "hdfs"] }], "token": { "valid": "c8a4db32-635a-46b6-94ed-04b1bd533f41", "previous": None, "last_changed": "2013-03-19T18:16:48.411029Z" } } self.producer_id = "234" self.event_producer = EventProducer(_id=self.producer_id, name="nginx", pattern="nginx") self.ds_handler = MagicMock() self.ds_handler.find_one.return_value = self.tenant_dict self.tenant_obj = tenant_util.load_tenant_from_dict(self.tenant_dict) self.tenant_cache = MagicMock() self.tenant_cache.cache_get.return_value = jsonutils.dumps( self.tenant_dict) self.tenant_cache.cache_exists.return_value = True self.tenant_cache.cache_update = MagicMock() self.token_cache = MagicMock() self.token_cache.cache_get.return_value = jsonutils.dumps( self.tenant_dict['token']) self.token_cache.cache_exists.return_value = True self.cache_empty = MagicMock() self.cache_empty.cache_exists.return_value = False self.cache_empty.cache_set = MagicMock()
def load_tenant_from_dict(tenant_dict): #Create a list of EventProducer objects from the dictionary event_producers = [ EventProducer(e['id'], e['name'], e['pattern'], e['durable'], e['encrypted'], e['sinks']) for e in tenant_dict['event_producers'] ] token = load_token_from_dict(tenant_dict['token']) _id = None if "_id" in tenant_dict.keys(): _id = tenant_dict['_id'] #Create the parent tenant object tenant = Tenant(tenant_dict['tenant_id'], token, event_producers=event_producers, _id=_id, tenant_name=tenant_dict['tenant_name']) #Return tenant object return tenant
def setUp(self): self.tenant_id = '5164b8f4-16fb-4376-9d29-8a6cbaa02fa9' self.message_token = 'ffe7104e-8d93-47dc-a49a-8fb0d39e5192' self.producers = [ EventProducer(432, 'producer1', 'syslog', durable=True), EventProducer(433, 'producer2', 'syslog', durable=False) ] self.invalid_message_token = 'yyy7104e-8d93-47dc-a49a-8fb0d39e5192' self.token = Token('ffe7104e-8d93-47dc-a49a-8fb0d39e5192', 'bbd6302e-8d93-47dc-a49a-8fb0d39e5192', '2013-03-19T18:16:48.411029Z') self.tenant = Tenant(self.tenant_id, self.token, event_producers=self.producers) self.get_token = MagicMock(return_value=self.token) self.get_tenant = MagicMock(return_value=self.tenant) self.get_none = MagicMock(return_value=None) self.src_msg = { 'HOST': 'tohru', '_SDATA': { 'meniscus': { 'token': self.message_token, 'tenant': self.tenant_id } }, 'PRIORITY': 'info', 'MESSAGE': '127.0.0.1 - - [12/Jul/2013:19:40:58 +0000] ' '\'GET /test.html HTTP/1.1\' 404 466 \'-\' ' '\'curl/7.29.0\'', 'FACILITY': 'local1', 'MSGID': '345', 'ISODATE': '2013-07-12T14:17:00+00:00', 'PROGRAM': 'apache', 'DATE': '2013-07-12T14:17:00.134+00:00', 'PID': '234' } self.malformed_sys_msg = { 'HOST': 'tohru', '_SDATA': { 'meniscus': { 'token': '', 'tenant': '' } }, 'PRIORITY': 'info', 'MESSAGE': '127.0.0.1 - - [12/Jul/2013:19:40:58 +0000] ' '\'GET /test.html HTTP/1.1\' 404 466 \'-\' ' '\'curl/7.29.0\'', 'FACILITY': 'local1', 'MSGID': '345', 'ISODATE': '2013-07-12T14:17:00+00:00', 'PROGRAM': 'apache', 'DATE': '2013-07-12T14:17:00.134+00:00', 'PID': '234' } self.cee_msg = { 'host': 'tohru', 'pri': 'info', 'msg': '127.0.0.1 - - [12/Jul/2013:19:40:58 +0000] ' '\'GET /test.html HTTP/1.1\' 404 466 \'-\' ' '\'curl/7.29.0\'', 'msgid': '345', 'time': '2013-07-12T14:17:00+00:00', 'pname': 'apache', 'pid': '234', 'ver': '1', 'native': { 'meniscus': { 'token': 'ffe7104e-8d93-47dc-a49a-8fb0d39e5192', 'tenant': '5164b8f4-16fb-4376-9d29-8a6cbaa02fa9' } } } self.config = WorkerConfiguration( personality='worker', hostname='worker01', coordinator_uri='http://192.168.1.2/v1') self.get_config = MagicMock(return_value=self.config) self.tenant_found = MagicMock(return_value=self.tenant)
def setUp(self): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false')