def create_event_producer(tenant, name, pattern, durable, encrypted, sinks): """ Creates an Event Producer object, assigns it to a tenant, and updates the tenant in the datastore. """ new_event_producer = EventProducer( _db_handler.next_sequence_value(tenant.tenant_id), name, pattern, durable, encrypted, sinks) #add the event_producer to the tenant tenant.event_producers.append(new_event_producer) #save the tenant's data save_tenant(tenant) #create a new mapping for the producer in the default # sink to enable time_to_live ttl_tasks.create_ttl_mapping.delay( tenant_id=tenant.tenant_id, producer_pattern=new_event_producer.pattern) #return the id of the newly created producer return new_event_producer.get_id()
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): #Todo: (stevendgonzales) Examine whether or not to remove #Todo: persist_message as a linked subtask(callback) of the #Todo: normalization task instead Queue the task based on routing #Todo: determined at the end of the normalization process. # send the message to normalization then to the data dispatch normalizer.normalize_message.apply_async( (message,), link=dispatch.persist_message.subtask()) else: # Queue the message for indexing/storage dispatch.persist_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def on_post(self, req, resp, tenant_id, validated_body): body = validated_body['event_producer'] tenant = find_tenant(self.db, tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = find_event_producer(tenant, producer_name=event_producer_name) if producer: abort(falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.' .format(producer.name, producer.get_id())) # Create the new profile for the host new_event_producer = EventProducer( self.db.next_sequence_value(tenant.tenant_id), event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) tenant.event_producers.append(new_event_producer) self.db.update('tenant', tenant.format_for_save()) resp.status = falcon.HTTP_201 resp.set_header('Location', '/v1/{0}/producers/{1}' .format(tenant_id, new_event_producer.get_id()))
def on_post(self, req, resp, tenant_id, validated_body): body = validated_body['event_producer'] tenant = find_tenant(self.db, tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = find_event_producer(tenant, producer_name=event_producer_name) if producer: abort( falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.'. format(producer.name, producer.get_id())) # Create the new profile for the host new_event_producer = EventProducer( self.db.next_sequence_value(tenant.tenant_id), event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) tenant.event_producers.append(new_event_producer) self.db.update('tenant', tenant.format_for_save()) resp.status = falcon.HTTP_201 resp.set_header( 'Location', '/v1/{0}/producers/{1}'.format(tenant_id, new_event_producer.get_id()))
class WhenTestingEventProducerObject(unittest.TestCase): def setUp(self): with patch('meniscus.data.model.tenant.DEFAULT_SINK', 'elasticsearch'): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false') def test_event_producer_object_get_id(self): self.assertEqual(self.event_producer.get_id(), 'EVid') def test_event_producer_object_format(self): ep_dict = self.event_producer.format() self.assertEqual(ep_dict['id'], 'EVid') self.assertEqual(ep_dict['name'], 'mybillingsapp') self.assertEqual(ep_dict['pattern'], 'syslog') self.assertEqual(ep_dict['durable'], 'true') self.assertEqual(ep_dict['encrypted'], 'false') self.assertListEqual(ep_dict['sinks'], ['elasticsearch'])
class WhenTestingEventProducerObject(unittest.TestCase): def setUp(self): self.event_producer = EventProducer('EVid', 'mybillingsapp', 'syslog', 'true', 'false') def test_event_producer_object_get_id(self): self.assertEqual(self.event_producer.get_id(), 'EVid') def test_event_producer_object_format(self): ep_dict = self.event_producer.format() self.assertEqual(ep_dict['id'], 'EVid') self.assertEqual(ep_dict['name'], 'mybillingsapp') self.assertEqual(ep_dict['pattern'], 'syslog') self.assertEqual(ep_dict['durable'], 'true') self.assertEqual(ep_dict['encrypted'], 'false')
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer( tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def create_event_producer(tenant, name, pattern, durable, encrypted, sinks): """ Creates an Event Producer object, assigns it to a tenant, and updates the tenant in the datastore. """ new_event_producer = EventProducer( _db_handler.next_sequence_value(tenant.tenant_id), name, pattern, durable, encrypted, sinks) #add the event_producer to the tenant tenant.event_producers.append(new_event_producer) #save the tenant's data save_tenant(tenant) #create a new mapping for the producer in the default # sink to enable time_to_live mapping_tasks.create_ttl_mapping.delay( tenant_id=tenant.tenant_id, producer_pattern=new_event_producer.pattern) #return the id of the newly created producer return new_event_producer.get_id()