Example #1
0
 def test_message_sent_to_secondary_sink_only(self):
     self.message['meniscus']['correlation']['sinks'] = SECONDARY_SINKS
     with patch('meniscus.storage.dispatch.'
                'short_term_store.persist_message.delay',
                self.secondary_persist):
         dispatch.persist_message(self.message)
         self.secondary_persist.assert_called_once_with(self.message)
Example #2
0
 def test_message_sent_to_default_store_only(self):
     self.message['meniscus']['correlation']['sinks'] = [DEFAULT_SINK]
     with patch('meniscus.storage.dispatch.'
                'default_store.persist_message.delay',
                self.default_persist):
         dispatch.persist_message(self.message)
         self.default_persist.assert_called_once_with(self.message)
Example #3
0
 def test_message_sent_to_default_and_secondary_sinks(self):
     self.message['meniscus']['correlation']['sinks'] = VALID_SINKS
     with patch('meniscus.storage.dispatch.'
                'default_store.persist_message.delay',
                self.default_persist), \
         patch('meniscus.storage.dispatch.'
               'short_term_store.persist_message.delay',
               self.secondary_persist):
         dispatch.persist_message(self.message)
         self.default_persist.assert_called_once_with(self.message)
         self.secondary_persist.assert_called_once_with(self.message)
Example #4
0
def _add_correlation_info_to_message(tenant, message):
    """
    Pack the message with correlation data. The message will be update by
    adding a dictionary named "meniscus" that contains tenant specific
    information used in processing the message.
    """
    #match the producer by the message pname
    producer = tenant_util.find_event_producer(tenant,
                                               producer_name=message['pname'])

    #if the producer is not found, create a default producer
    if not producer:
        producer = EventProducer(_id=None, name="default", pattern="default")

    #create correlation dictionary
    correlation_dict = {
        'tenant_name': tenant.tenant_name,
        'ep_id': producer.get_id(),
        'pattern': producer.pattern,
        'durable': producer.durable,
        'encrypted': producer.encrypted,
        '@timestamp': timeutils.utcnow(),
        'sinks': producer.sinks,
        "destinations": dict()
    }

    #configure sink dispatch
    for sink in producer.sinks:
        correlation_dict["destinations"][sink] = {'transaction_id': None,
                                                  'transaction_time': None}

    # After successful correlation remove meniscus information from structured
    # data so that the client's token is scrubbed form the message.
    message['native'].pop('meniscus', None)
    message.update({'meniscus': {'tenant': tenant.tenant_id,
                                 'correlation': correlation_dict}})

    # If the message data indicates that the message has normalization rules
    # that apply, Queue the message for normalization processing
    if normalizer.should_normalize(message):
        #Todo: (stevendgonzales) Examine whether or not to remove
        #Todo: persist_message as a linked subtask(callback) of the
        #Todo: normalization task instead Queue the task based on routing
        #Todo: determined at the end of the normalization process.
        # send the message to normalization then to the data dispatch
        normalizer.normalize_message.apply_async(
            (message,),
            link=dispatch.persist_message.subtask())
    else:
        # Queue the message for indexing/storage
        dispatch.persist_message(message)
Example #5
0
    def on_post(self, req, resp, tenant_id, validated_body):
        """
        This method is passed log event data by a tenant. The request will
        have a message token and a tenant id which must be validated either
        by the local cache or by a call to this workers coordinator.
        """

        #read message token from header
        message_token = req.get_header(MESSAGE_TOKEN, required=True)

        #Validate the tenant's JSON event log data as valid JSON.
        message = validated_body['log_message']

        tenant_identification = correlator.TenantIdentification(
            tenant_id, message_token)

        try:
            tenant = tenant_identification.get_validated_tenant()
            message = correlator.add_correlation_info_to_message(
                tenant, message)

        except errors.MessageAuthenticationError as ex:
            abort(falcon.HTTP_401, ex.message)
        except errors.ResourceNotFoundError as ex:
            abort(falcon.HTTP_404, ex.message)
        except errors.CoordinatorCommunicationError:
            abort(falcon.HTTP_500)

        dispatch.persist_message(message)

        #if message is durable, return durable job info
        if message['meniscus']['correlation']['durable']:
            durable_job_id = message['meniscus']['correlation']['job_id']
            job_status_uri = "http://{0}/v1/job/{1}/status" \
                .format("meniscus_uri", durable_job_id)

            resp.status = falcon.HTTP_202
            resp.body = format_response_body(
                {
                    "job_id": durable_job_id,
                    "job_status_uri": job_status_uri
                }
            )

        else:
            resp.status = falcon.HTTP_204
Example #6
0
    def on_post(self, req, resp, tenant_id, validated_body):
        """
        This method is passed log event data by a tenant. The request will
        have a message token and a tenant id which must be validated either
        by the local cache or by a call to this workers coordinator.
        """

        #read message token from header
        message_token = req.get_header(MESSAGE_TOKEN, required=True)

        #Validate the tenant's JSON event log data as valid JSON.
        message = validated_body['log_message']

        tenant_identification = correlator.TenantIdentification(
            tenant_id, message_token)

        try:
            tenant = tenant_identification.get_validated_tenant()
            message = correlator.add_correlation_info_to_message(
                tenant, message)

        except errors.MessageAuthenticationError as ex:
            abort(falcon.HTTP_401, ex.message)
        except errors.ResourceNotFoundError as ex:
            abort(falcon.HTTP_404, ex.message)
        except errors.CoordinatorCommunicationError:
            abort(falcon.HTTP_500)

        dispatch.persist_message(message)

        #if message is durable, return durable job info
        if message['meniscus']['correlation']['durable']:
            durable_job_id = message['meniscus']['correlation']['job_id']
            job_status_uri = "http://{0}/v1/job/{1}/status" \
                .format("meniscus_uri", durable_job_id)

            resp.status = falcon.HTTP_202
            resp.body = format_response_body({
                "job_id": durable_job_id,
                "job_status_uri": job_status_uri
            })

        else:
            resp.status = falcon.HTTP_204
Example #7
0
    def message_complete(self, last_message_part):
        full_message = self.msg + last_message_part
        syslog_message = self.msg_head.as_dict()
        syslog_message['message'] = full_message.decode('utf-8')
        cee_message = _correlate_syslog_message(syslog_message)

        try:
            if should_normalize(cee_message):
                #send the message to normalization then to the data dispatch
                normalize_message.apply_async(
                    (cee_message,),
                    link=dispatch.persist_message.subtask())
            else:
                dispatch.persist_message(cee_message)
        except Exception as ex:
            _LOG.exception('unable to place persist_message task on queue')

        #reset for next message
        self.msg_head = None
        self.msg = b''