def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def normalize_message(message): """ This code takes a message and normalizes it into a dictionary. This normalized dictionary is assigned to a field matching the pattern name of the normalization. This dictionary is then assigned to the message under the normalized field. """ pattern = message['meniscus']['correlation']['pattern'] normalized_doc = json.loads( _normalizer.normalize(message['msg']).as_json()) message['normalized'] = { pattern: normalized_doc } sinks.route_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)