def put_log_events_model(self, log_group_name, log_stream_name, log_events,
                             sequence_token):
        # TODO: call/patch upstream method here, instead of duplicating the code!
        self.last_ingestion_time = int(unix_time_millis())
        self.stored_bytes += sum(
            [len(log_event["message"]) for log_event in log_events])
        events = [
            logs_models.LogEvent(self.last_ingestion_time, log_event)
            for log_event in log_events
        ]
        self.events += events
        self.upload_sequence_token += 1

        log_events = [{
            "id": event.event_id,
            "timestamp": event.timestamp,
            "message": event.message,
        } for event in events]

        data = {
            "messageType": "DATA_MESSAGE",
            "owner": aws_stack.get_account_id(),
            "logGroup": log_group_name,
            "logStream": log_stream_name,
            "subscriptionFilters": [self.filter_name],
            "logEvents": log_events,
        }

        output = io.BytesIO()
        with GzipFile(fileobj=output, mode="w") as f:
            f.write(json.dumps(data, separators=(",", ":")).encode("utf-8"))
        payload_gz_encoded = base64.b64encode(
            output.getvalue()).decode("utf-8")
        event = {"awslogs": {"data": payload_gz_encoded}}

        if self.destination_arn:
            if ":lambda:" in self.destination_arn:
                client = aws_stack.connect_to_service("lambda")
                lambda_name = aws_stack.lambda_function_name(
                    self.destination_arn)
                client.invoke(FunctionName=lambda_name,
                              Payload=json.dumps(event))
            if ":kinesis:" in self.destination_arn:
                client = aws_stack.connect_to_service("kinesis")
                stream_name = aws_stack.kinesis_stream_name(
                    self.destination_arn)
                client.put_record(
                    StreamName=stream_name,
                    Data=json.dumps(payload_gz_encoded),
                    PartitionKey=log_group_name,
                )
            if ":firehose:" in self.destination_arn:
                client = aws_stack.connect_to_service("firehose")
                firehose_name = aws_stack.firehose_name(self.destination_arn)
                client.put_record(
                    DeliveryStreamName=firehose_name,
                    Record={"Data": json.dumps(payload_gz_encoded)},
                )
Beispiel #2
0
    def put_log_events_model(self, log_group_name, log_stream_name, log_events,
                             sequence_token):
        self.lastIngestionTime = int(unix_time_millis())
        self.storedBytes += sum(
            [len(log_event['message']) for log_event in log_events])
        events = [
            logs_models.LogEvent(self.lastIngestionTime, log_event)
            for log_event in log_events
        ]
        self.events += events
        self.uploadSequenceToken += 1

        log_events = [{
            'id': event.eventId,
            'timestamp': event.timestamp,
            'message': event.message,
        } for event in events]

        data = {
            'messageType': 'DATA_MESSAGE',
            'owner': aws_stack.get_account_id(),
            'logGroup': log_group_name,
            'logStream': log_stream_name,
            'subscriptionFilters': [self.filter_name],
            'logEvents': log_events,
        }

        output = io.BytesIO()
        with GzipFile(fileobj=output, mode='w') as f:
            f.write(json.dumps(data, separators=(',', ':')).encode('utf-8'))
        payload_gz_encoded = base64.b64encode(
            output.getvalue()).decode('utf-8')
        event = {'awslogs': {'data': payload_gz_encoded}}

        if self.destination_arn:
            if ':lambda:' in self.destination_arn:
                client = aws_stack.connect_to_service('lambda')
                lambda_name = aws_stack.lambda_function_name(
                    self.destination_arn)
                client.invoke(FunctionName=lambda_name,
                              Payload=json.dumps(event))
            if ':kinesis:' in self.destination_arn:
                client = aws_stack.connect_to_service('kinesis')
                stream_name = aws_stack.kinesis_stream_name(
                    self.destination_arn)
                client.put_record(StreamName=stream_name,
                                  Data=json.dumps(payload_gz_encoded),
                                  PartitionKey=log_group_name)
            if ':firehose:' in self.destination_arn:
                client = aws_stack.connect_to_service('firehose')
                firehose_name = aws_stack.firehose_name(self.destination_arn)
                client.put_record(
                    DeliveryStreamName=firehose_name,
                    Record={'Data': json.dumps(payload_gz_encoded)})