def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({ 'meniscus': { 'tenant': tenant.tenant_id, 'correlation': correlation_dict } }) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): # send the message to normalization then route to sink normalizer.normalize_message.delay(message) else: # Queue the message for indexing/storage sinks.route_message(message)
def _add_correlation_info_to_message(tenant, message): """ Pack the message with correlation data. The message will be update by adding a dictionary named "meniscus" that contains tenant specific information used in processing the message. """ #match the producer by the message pname producer = tenant_util.find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch for sink in producer.sinks: correlation_dict["destinations"][sink] = {'transaction_id': None, 'transaction_time': None} # After successful correlation remove meniscus information from structured # data so that the client's token is scrubbed form the message. message['native'].pop('meniscus', None) message.update({'meniscus': {'tenant': tenant.tenant_id, 'correlation': correlation_dict}}) # If the message data indicates that the message has normalization rules # that apply, Queue the message for normalization processing if normalizer.should_normalize(message): #Todo: (stevendgonzales) Examine whether or not to remove #Todo: persist_message as a linked subtask(callback) of the #Todo: normalization task instead Queue the task based on routing #Todo: determined at the end of the normalization process. # send the message to normalization then to the data dispatch normalizer.normalize_message.apply_async( (message,), link=dispatch.persist_message.subtask()) else: # Queue the message for indexing/storage dispatch.persist_message(message)
def __init__(self, **kwargs): if kwargs: self.cpu_cores = kwargs['cpu_cores'] self.os_type = kwargs['os_type'] self.memory_mb = kwargs['memory_mb'] self.architecture = kwargs['architecture'] self.load_average = kwargs['load_average'] self.disk_usage = kwargs['disk_usage'] self.timestamp = kwargs['timestamp'] else: self.cpu_cores = sys_assist.get_cpu_core_count() self.os_type = platform.platform() self.memory_mb = sys_assist.get_sys_mem_total_MB() self.architecture = platform.machine() self.load_average = sys_assist.get_load_average() self.disk_usage = sys_assist.get_disk_usage() self.timestamp = timeutils.utcnow()
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer( tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def add_correlation_info_to_message(tenant, message): #match the producer by the message pname producer = find_event_producer(tenant, producer_name=message['pname']) #if the producer is not found, create a default producer if not producer: producer = EventProducer(_id=None, name="default", pattern="default") #create correlation dictionary correlation_dict = { 'tenant_name': tenant.tenant_name, 'ep_id': producer.get_id(), 'pattern': producer.pattern, 'durable': producer.durable, 'encrypted': producer.encrypted, '@timestamp': timeutils.utcnow(), 'sinks': producer.sinks, "destinations": dict() } #configure sink dispatch destinations = dict() for sink in producer.sinks: correlation_dict["destinations"][sink] = { 'transaction_id': None, 'transaction_time': None } #todo(sgonzales) persist message and create job if producer.durable: durable_job_id = str(uuid4()) correlation_dict.update({'job_id': durable_job_id}) message.update({ "meniscus": { "tenant": tenant.tenant_id, "correlation": correlation_dict } }) return message
def __init__(self, **kwargs): """ An object can be initialized by passing in a dictionary representation of the data as **kwargs. Otherwise the constructor will retrieve system stats from the machine it is executing on. """ if kwargs: self.cpu_cores = kwargs['cpu_cores'] self.os_type = kwargs['os_type'] self.memory_mb = kwargs['memory_mb'] self.architecture = kwargs['architecture'] self.load_average = kwargs['load_average'] self.disk_usage = kwargs['disk_usage'] self.timestamp = kwargs['timestamp'] else: self.cpu_cores = sys_assist.get_cpu_core_count() self.os_type = platform.platform() self.memory_mb = sys_assist.get_sys_mem_total_MB() self.architecture = platform.machine() self.load_average = sys_assist.get_load_average() self.disk_usage = sys_assist.get_disk_usage() self.timestamp = str(timeutils.utcnow())
def __init__(self, **kwargs): """ An object can be initialized by passing in a dictionary representation of the data as **kwargs. Otherwise the constructor will retrieve system stats from the machine it is executing on. """ if kwargs: self.cpu_cores = kwargs['cpu_cores'] self.os_type = kwargs['os_type'] self.memory_mb = kwargs['memory_mb'] self.architecture = kwargs['architecture'] self.load_average = kwargs['load_average'] self.disk_usage = kwargs['disk_usage'] self.timestamp = kwargs['timestamp'] else: self.cpu_cores = sys_assist.get_cpu_core_count() self.os_type = platform.platform() self.memory_mb = sys_assist.get_sys_mem_total_MB() self.architecture = platform.machine() self.load_average = sys_assist.get_load_average() self.disk_usage = sys_assist.get_disk_usage() self.timestamp = timeutils.utcnow()