Пример #1
0
        def send_log_event(*args, **kwargs):
            if backend.get_function(args[0]):
                return send_log_event_orig(*args, **kwargs)

            filter_name = args[1]
            log_group_name = args[2]
            log_stream_name = args[3]
            log_events = args[4]

            data = {
                'messageType': 'DATA_MESSAGE',
                'owner': aws_stack.get_account_id(),
                'logGroup': log_group_name,
                'logStream': log_stream_name,
                'subscriptionFilters': [filter_name],
                'logEvents': log_events,
            }

            payload = base64.b64encode(
                json.dumps(data,
                           separators=(',',
                                       ':')).encode('utf-8')).decode('utf-8')
            event = {'awslogs': {'data': payload}}
            client = aws_stack.connect_to_service('lambda')
            lambda_name = aws_stack.lambda_function_name(args[0])
            client.invoke(FunctionName=lambda_name, Payload=event)
Пример #2
0
        def send_log_event(*args, **kwargs):

            filter_name = args[1]
            log_group_name = args[2]
            log_stream_name = args[3]
            log_events = args[4]

            data = {
                'messageType': 'DATA_MESSAGE',
                'owner': aws_stack.get_account_id(),
                'logGroup': log_group_name,
                'logStream': log_stream_name,
                'subscriptionFilters': [filter_name],
                'logEvents': log_events,
            }

            output = io.BytesIO()
            with GzipFile(fileobj=output, mode='w') as f:
                f.write(
                    json.dumps(data, separators=(',', ':')).encode('utf-8'))
            payload_gz_encoded = base64.b64encode(
                output.getvalue()).decode('utf-8')
            event = {'awslogs': {'data': payload_gz_encoded}}

            client = aws_stack.connect_to_service('lambda')
            lambda_name = aws_stack.lambda_function_name(args[0])
            client.invoke(FunctionName=lambda_name, Payload=json.dumps(event))
Пример #3
0
    def put_log_events_model(self, log_group_name, log_stream_name, log_events,
                             sequence_token):
        # TODO: call/patch upstream method here, instead of duplicating the code!
        self.last_ingestion_time = int(unix_time_millis())
        self.stored_bytes += sum(
            [len(log_event["message"]) for log_event in log_events])
        events = [
            logs_models.LogEvent(self.last_ingestion_time, log_event)
            for log_event in log_events
        ]
        self.events += events
        self.upload_sequence_token += 1

        log_events = [{
            "id": event.event_id,
            "timestamp": event.timestamp,
            "message": event.message,
        } for event in events]

        data = {
            "messageType": "DATA_MESSAGE",
            "owner": aws_stack.get_account_id(),
            "logGroup": log_group_name,
            "logStream": log_stream_name,
            "subscriptionFilters": [self.filter_name],
            "logEvents": log_events,
        }

        output = io.BytesIO()
        with GzipFile(fileobj=output, mode="w") as f:
            f.write(json.dumps(data, separators=(",", ":")).encode("utf-8"))
        payload_gz_encoded = base64.b64encode(
            output.getvalue()).decode("utf-8")
        event = {"awslogs": {"data": payload_gz_encoded}}

        if self.destination_arn:
            if ":lambda:" in self.destination_arn:
                client = aws_stack.connect_to_service("lambda")
                lambda_name = aws_stack.lambda_function_name(
                    self.destination_arn)
                client.invoke(FunctionName=lambda_name,
                              Payload=json.dumps(event))
            if ":kinesis:" in self.destination_arn:
                client = aws_stack.connect_to_service("kinesis")
                stream_name = aws_stack.kinesis_stream_name(
                    self.destination_arn)
                client.put_record(
                    StreamName=stream_name,
                    Data=json.dumps(payload_gz_encoded),
                    PartitionKey=log_group_name,
                )
            if ":firehose:" in self.destination_arn:
                client = aws_stack.connect_to_service("firehose")
                firehose_name = aws_stack.firehose_name(self.destination_arn)
                client.put_record(
                    DeliveryStreamName=firehose_name,
                    Record={"Data": json.dumps(payload_gz_encoded)},
                )
Пример #4
0
    def put_log_events_model(self, log_group_name, log_stream_name, log_events,
                             sequence_token):
        self.lastIngestionTime = int(unix_time_millis())
        self.storedBytes += sum(
            [len(log_event['message']) for log_event in log_events])
        events = [
            logs_models.LogEvent(self.lastIngestionTime, log_event)
            for log_event in log_events
        ]
        self.events += events
        self.uploadSequenceToken += 1

        log_events = [{
            'id': event.eventId,
            'timestamp': event.timestamp,
            'message': event.message,
        } for event in events]

        data = {
            'messageType': 'DATA_MESSAGE',
            'owner': aws_stack.get_account_id(),
            'logGroup': log_group_name,
            'logStream': log_stream_name,
            'subscriptionFilters': [self.filter_name],
            'logEvents': log_events,
        }

        output = io.BytesIO()
        with GzipFile(fileobj=output, mode='w') as f:
            f.write(json.dumps(data, separators=(',', ':')).encode('utf-8'))
        payload_gz_encoded = base64.b64encode(
            output.getvalue()).decode('utf-8')
        event = {'awslogs': {'data': payload_gz_encoded}}

        if self.destination_arn:
            if ':lambda:' in self.destination_arn:
                client = aws_stack.connect_to_service('lambda')
                lambda_name = aws_stack.lambda_function_name(
                    self.destination_arn)
                client.invoke(FunctionName=lambda_name,
                              Payload=json.dumps(event))
            if ':kinesis:' in self.destination_arn:
                client = aws_stack.connect_to_service('kinesis')
                stream_name = aws_stack.kinesis_stream_name(
                    self.destination_arn)
                client.put_record(StreamName=stream_name,
                                  Data=json.dumps(payload_gz_encoded),
                                  PartitionKey=log_group_name)
            if ':firehose:' in self.destination_arn:
                client = aws_stack.connect_to_service('firehose')
                firehose_name = aws_stack.firehose_name(self.destination_arn)
                client.put_record(
                    DeliveryStreamName=firehose_name,
                    Record={'Data': json.dumps(payload_gz_encoded)})
Пример #5
0
    def create_from_cloudformation_json(cls, resource_name,
                                        cloudformation_json, region_name):
        props = cloudformation_json['Properties']
        name = props.get('StateMachineName') or resource_name
        definition = props.get('DefinitionString')
        role_arn = props.get('RoleArn')
        arn = 'arn:aws:states:' + region_name + ':' + str(
            aws_stack.get_account_id()) + ':stateMachine:' + name

        state_machine = StateMachine(arn,
                                     name,
                                     definition=definition,
                                     role_arn=role_arn)
        stepfunction_backends[region_name].state_machines.append(state_machine)

        return state_machine