def on_put(self, req, resp, tenant_id, event_producer_id, validated_body): """ Make an update to a specified Event Producer's configuration when an HTTP PUT is received """ body = validated_body['event_producer'] #verify the tenant exists tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() #verify the event_producer exists and belongs to the tenant event_producer = tenant_util.find_event_producer( tenant, producer_id=event_producer_id) if not event_producer: _producer_not_found() #if a key is present, update the event_producer with the value if 'name' in body.keys() and event_producer.name != body['name']: #if the tenant already has a profile with this name then abort duplicate_producer = tenant_util.find_event_producer( tenant, producer_name=body['name']) if duplicate_producer: api.abort( falcon.HTTP_400, 'EventProducer with name {0} already exists with id={1}.'. format(duplicate_producer.name, duplicate_producer.get_id())) event_producer.name = body['name'] if 'pattern' in body: event_producer.pattern = str(body['pattern']) if 'durable' in body: event_producer.durable = body['durable'] if 'encrypted' in body: event_producer.encrypted = body['encrypted'] if 'sinks' in body: event_producer.sinks = body['sinks'] #save the tenant document tenant_util.save_tenant(tenant) resp.status = falcon.HTTP_200
def on_put(self, req, resp, tenant_id, event_producer_id, validated_body): """ Make an update to a specified Event Producer's configuration when an HTTP PUT is received """ body = validated_body['event_producer'] #verify the tenant exists tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() #verify the event_producer exists and belongs to the tenant event_producer = tenant_util.find_event_producer( tenant, producer_id=event_producer_id) if not event_producer: _producer_not_found() #if a key is present, update the event_producer with the value if 'name' in body.keys() and event_producer.name != body['name']: #if the tenant already has a profile with this name then abort duplicate_producer = tenant_util.find_event_producer( tenant, producer_name=body['name']) if duplicate_producer: api.abort( falcon.HTTP_400, 'EventProducer with name {0} already exists with id={1}.' .format(duplicate_producer.name, duplicate_producer.get_id())) event_producer.name = body['name'] if 'pattern' in body: event_producer.pattern = str(body['pattern']) if 'durable' in body: event_producer.durable = body['durable'] if 'encrypted' in body: event_producer.encrypted = body['encrypted'] if 'sinks' in body: event_producer.sinks = body['sinks'] #save the tenant document tenant_util.save_tenant(tenant) resp.status = falcon.HTTP_200
def test_find_event_producer_by_name_returns_none(self): with patch('meniscus.data.model.tenant_util._db_handler', self.ds_handler): tenant = tenant_util.find_tenant('12345') producer = tenant_util.find_event_producer( tenant, producer_name='not_name') self.assertEquals(producer, None)
def test_find_event_producer_by_name_returns_instance(self): with patch('meniscus.data.model.tenant_util._db_handler', self.ds_handler): tenant = tenant_util.find_tenant('12345') producer = tenant_util.find_event_producer( tenant, producer_name='system.auth') self.assertIsInstance(producer, EventProducer)
def on_post(self, req, resp, tenant_id, validated_body): """ Create a a new event Producer for a specified Tenant when an HTTP Post is received """ body = validated_body['event_producer'] tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = tenant_util.find_event_producer( tenant, producer_name=event_producer_name) if producer: api.abort( falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.' .format(producer.name, producer.get_id())) # Create the new profile for the host producer_id = tenant_util.create_event_producer( tenant, event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) resp.status = falcon.HTTP_201 resp.set_header('Location', '/v1/{0}/producers/{1}' .format(tenant_id, producer_id))
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): #Todo: (stevendgonzales) Examine whether or not to remove #Todo: persist_message as a linked subtask(callback) of the #Todo: normalization task instead Queue the task based on routing #Todo: determined at the end of the normalization process. # send the message to normalization then to the data dispatch normalizer.normalize_message.apply_async( (message,), link=dispatch.persist_message.subtask()) else: # Queue the message for indexing/storage dispatch.persist_message(message)
def on_post(self, req, resp, tenant_id, validated_body): """ Create a a new event Producer for a specified Tenant when an HTTP Post is received """ body = validated_body['event_producer'] tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() event_producer_name = body['name'] event_producer_pattern = body['pattern'] #if durable or encrypted aren't specified, set to False if 'durable' in body.keys(): event_producer_durable = body['durable'] else: event_producer_durable = False if 'encrypted' in body.keys(): event_producer_encrypted = body['encrypted'] else: event_producer_encrypted = False if 'sinks' in body.keys(): event_producer_sinks = body['sinks'] else: event_producer_sinks = None # Check if the tenant already has an event producer with this name producer = tenant_util.find_event_producer( tenant, producer_name=event_producer_name) if producer: api.abort( falcon.HTTP_400, 'Event producer with name {0} already exists with id={1}.'. format(producer.name, producer.get_id())) # Create the new profile for the host producer_id = tenant_util.create_event_producer( tenant, event_producer_name, event_producer_pattern, event_producer_durable, event_producer_encrypted, event_producer_sinks) resp.status = falcon.HTTP_201 resp.set_header('Location', '/v1/{0}/producers/{1}'.format(tenant_id, producer_id))
def on_delete(self, req, resp, tenant_id, event_producer_id): """ Delete a specified Event Producer from a Tenant's configuration when an HTTP DELETE is received """ #verify the tenant exists tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() #verify the event_producer exists and belongs to the tenant event_producer = tenant_util.find_event_producer( tenant, producer_id=event_producer_id) if not event_producer: _producer_not_found() tenant_util.delete_event_producer(tenant, event_producer) resp.status = falcon.HTTP_200
def on_get(self, req, resp, tenant_id, event_producer_id): """ Retrieve a specified Event Producer from a Tenant when an HTTP GET is received """ #verify the tenant exists tenant = tenant_util.find_tenant(tenant_id=tenant_id) if not tenant: _tenant_not_found() #verify the event_producer exists and belongs to the tenant event_producer = tenant_util.find_event_producer( tenant, producer_id=event_producer_id) if not event_producer: _producer_not_found() resp.status = falcon.HTTP_200 resp.body = api.format_response_body( {'event_producer': event_producer.format()})
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer( tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message