def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): #Todo: (stevendgonzales) Examine whether or not to remove #Todo: persist_message as a linked subtask(callback) of the #Todo: normalization task instead Queue the task based on routing #Todo: determined at the end of the normalization process. # send the message to normalization then to the data dispatch normalizer.normalize_message.apply_async( (message,), link=dispatch.persist_message.subtask()) else: # Queue the message for indexing/storage dispatch.persist_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def test_normalize_message(self): target = 'meniscus.normalization.normalizer.loaded_normalizer_rules' with patch(target, self.loaded_rules): self.assertTrue(should_normalize(self.good_message))