def create_stream(stream_name,
                  delivery_stream_type='DirectPut',
                  delivery_stream_type_configuration=None,
                  s3_destination=None,
                  elasticsearch_destination=None):
    stream = {
        'DeliveryStreamType': delivery_stream_type,
        'KinesisStreamSourceConfiguration': delivery_stream_type_configuration,
        'HasMoreDestinations': False,
        'VersionId': '1',
        'CreateTimestamp': time.time(),
        'DeliveryStreamARN': firehose_stream_arn(stream_name),
        'DeliveryStreamStatus': 'ACTIVE',
        'DeliveryStreamName': stream_name,
        'Destinations': []
    }
    DELIVERY_STREAMS[stream_name] = stream
    if elasticsearch_destination:
        update_destination(stream_name=stream_name,
                           destination_id=short_uid(),
                           elasticsearch_update=elasticsearch_destination)
    if s3_destination:
        update_destination(stream_name=stream_name,
                           destination_id=short_uid(),
                           s3_update=s3_destination)

    if delivery_stream_type == 'KinesisStreamAsSource':
        kinesis_stream_name = delivery_stream_type_configuration.get(
            'KinesisStreamARN').split('/')[1]
        kinesis_connector.listen_to_kinesis(stream_name=kinesis_stream_name,
                                            fh_d_stream=stream_name,
                                            listener_func=process_records,
                                            wait_until_started=True,
                                            ddb_lease_table_suffix='-firehose')
    return stream
Exemple #2
0
def create_stream(stream_name, delivery_stream_type='DirectPut', delivery_stream_type_configuration=None,
                  s3_destination=None, elasticsearch_destination=None, tags=None, region_name=None):
    tags = tags or {}
    stream = {
        'DeliveryStreamType': delivery_stream_type,
        'KinesisStreamSourceConfiguration': delivery_stream_type_configuration,
        'HasMoreDestinations': False,
        'VersionId': '1',
        'CreateTimestamp': time.time(),
        'DeliveryStreamARN': firehose_stream_arn(stream_name),
        'DeliveryStreamStatus': 'ACTIVE',
        'DeliveryStreamName': stream_name,
        'Destinations': [],
        'Tags': tags
    }
    DELIVERY_STREAMS[stream_name] = stream
    if elasticsearch_destination:
        update_destination(stream_name=stream_name, destination_id=short_uid(),
                           elasticsearch_update=elasticsearch_destination)
    if s3_destination:
        update_destination(stream_name=stream_name, destination_id=short_uid(), s3_update=s3_destination)

    # record event
    event_publisher.fire_event(event_publisher.EVENT_FIREHOSE_CREATE_STREAM,
        payload={'n': event_publisher.get_hash(stream_name)})

    if delivery_stream_type == 'KinesisStreamAsSource':
        kinesis_stream_name = delivery_stream_type_configuration.get('KinesisStreamARN').split('/')[1]
        kinesis_connector.listen_to_kinesis(
            stream_name=kinesis_stream_name, fh_d_stream=stream_name,
            listener_func=process_records, wait_until_started=True,
            ddb_lease_table_suffix='-firehose', region_name=region_name)
    return stream
Exemple #3
0
def create_stream(stream_name, s3_destination=None):
    stream = {
        'HasMoreDestinations': False,
        'VersionId': '1',
        'CreateTimestamp': time.time(),
        'DeliveryStreamARN': firehose_stream_arn(stream_name),
        'DeliveryStreamStatus': 'ACTIVE',
        'DeliveryStreamName': stream_name,
        'Destinations': []
    }
    DELIVERY_STREAMS[stream_name] = stream
    if s3_destination:
        update_destination(stream_name=stream_name, destination_id=short_uid(), s3_update=s3_destination)
    return stream
Exemple #4
0
def create_stream(stream_name, s3_destination=None, elasticsearch_destination=None):
    stream = {
        'HasMoreDestinations': False,
        'VersionId': '1',
        'CreateTimestamp': time.time(),
        'DeliveryStreamARN': firehose_stream_arn(stream_name),
        'DeliveryStreamStatus': 'ACTIVE',
        'DeliveryStreamName': stream_name,
        'Destinations': []
    }
    DELIVERY_STREAMS[stream_name] = stream
    if elasticsearch_destination:
        update_destination(stream_name=stream_name,
            destination_id=short_uid(),
            elasticsearch_update=elasticsearch_destination)
    if s3_destination:
        update_destination(stream_name=stream_name, destination_id=short_uid(), s3_update=s3_destination)
    return stream
 def Firehose_get_cfn_attribute(self, attribute_name):
     if attribute_name == 'Arn':
         return aws_stack.firehose_stream_arn(
             self.params.get('DeliveryStreamName'))
     raise UnformattedGetAttTemplateException()
def create_stream(
    stream_name: str,
    delivery_stream_type: str = "DirectPut",
    delivery_stream_type_configuration: Dict = None,
    s3_destination: Dict = None,
    elasticsearch_destination: Dict = None,
    http_destination: Dict = None,
    tags: Dict[str, str] = None,
):
    """Create a firehose stream with destination configurations. In case 'KinesisStreamAsSource' is set,
    creates a listener to process records from the underlying kinesis stream."""
    region = FirehoseBackend.get()
    tags = tags or {}
    stream = {
        "DeliveryStreamType": delivery_stream_type,
        "KinesisStreamSourceConfiguration": delivery_stream_type_configuration,
        "HasMoreDestinations": False,
        "VersionId": "1",
        "CreateTimestamp": time.time(),
        "DeliveryStreamARN": firehose_stream_arn(stream_name),
        "DeliveryStreamStatus": "ACTIVE",
        "DeliveryStreamName": stream_name,
        "Destinations": [],
        "Tags": tags,
    }
    region.delivery_streams[stream_name] = stream
    if elasticsearch_destination:
        update_destination(
            stream_name=stream_name,
            destination_id=short_uid(),
            elasticsearch_update=elasticsearch_destination,
        )
    if s3_destination:
        update_destination(
            stream_name=stream_name,
            destination_id=short_uid(),
            s3_update=s3_destination,
        )
    if http_destination:
        update_destination(
            stream_name=stream_name,
            destination_id=short_uid(),
            http_update=http_destination,
        )

    # record event
    event_publisher.fire_event(
        event_publisher.EVENT_FIREHOSE_CREATE_STREAM,
        payload={"n": event_publisher.get_hash(stream_name)},
    )

    if delivery_stream_type == "KinesisStreamAsSource":
        kinesis_stream_name = delivery_stream_type_configuration.get(
            "KinesisStreamARN").split("/")[1]
        kinesis_connector.listen_to_kinesis(
            stream_name=kinesis_stream_name,
            fh_d_stream=stream_name,
            listener_func=process_records,
            wait_until_started=True,
            ddb_lease_table_suffix="-firehose",
        )
    return stream
Exemple #7
0
    def create_delivery_stream(
        self,
        context: RequestContext,
        delivery_stream_name: DeliveryStreamName,
        delivery_stream_type: DeliveryStreamType = DeliveryStreamType.
        DirectPut,
        kinesis_stream_source_configuration:
        KinesisStreamSourceConfiguration = None,
        delivery_stream_encryption_configuration_input:
        DeliveryStreamEncryptionConfigurationInput = None,
        s3_destination_configuration: S3DestinationConfiguration = None,
        extended_s3_destination_configuration:
        ExtendedS3DestinationConfiguration = None,
        redshift_destination_configuration:
        RedshiftDestinationConfiguration = None,
        elasticsearch_destination_configuration:
        ElasticsearchDestinationConfiguration = None,
        amazonopensearchservice_destination_configuration:
        AmazonopensearchserviceDestinationConfiguration = None,
        splunk_destination_configuration: SplunkDestinationConfiguration = None,
        http_endpoint_destination_configuration:
        HttpEndpointDestinationConfiguration = None,
        tags: TagDeliveryStreamInputTagList = None,
    ) -> CreateDeliveryStreamOutput:
        region = FirehoseBackend.get()

        destinations: DestinationDescriptionList = []
        if elasticsearch_destination_configuration:
            destinations.append(
                DestinationDescription(
                    DestinationId=short_uid(),
                    ElasticsearchDestinationDescription=
                    convert_es_config_to_desc(
                        elasticsearch_destination_configuration),
                ))
        if amazonopensearchservice_destination_configuration:
            destinations.append(
                DestinationDescription(
                    DestinationId=short_uid(),
                    AmazonopensearchserviceDestinationDescription=
                    convert_opensearch_config_to_desc(
                        amazonopensearchservice_destination_configuration),
                ))
        if s3_destination_configuration or extended_s3_destination_configuration:
            destinations.append(
                DestinationDescription(
                    DestinationId=short_uid(),
                    S3DestinationDescription=convert_s3_config_to_desc(
                        s3_destination_configuration),
                    ExtendedS3DestinationDescription=
                    convert_extended_s3_config_to_desc(
                        extended_s3_destination_configuration),
                ))
        if http_endpoint_destination_configuration:
            destinations.append(
                DestinationDescription(
                    DestinationId=short_uid(),
                    HttpEndpointDestinationDescription=
                    convert_http_config_to_desc(
                        http_endpoint_destination_configuration),
                ))
        if splunk_destination_configuration:
            LOG.warning(
                "Delivery stream contains a splunk destination (which is currently not supported)."
            )
        if redshift_destination_configuration:
            LOG.warning(
                "Delivery stream contains a redshift destination (which is currently not supported)."
            )

        stream = DeliveryStreamDescription(
            DeliveryStreamName=delivery_stream_name,
            DeliveryStreamARN=firehose_stream_arn(
                stream_name=delivery_stream_name,
                account_id=context.account_id,
                region_name=context.region,
            ),
            DeliveryStreamStatus=DeliveryStreamStatus.ACTIVE,
            DeliveryStreamType=delivery_stream_type,
            HasMoreDestinations=False,
            VersionId="1",
            CreateTimestamp=datetime.now(),
            LastUpdateTimestamp=datetime.now(),
            Destinations=destinations,
            Source=convert_source_config_to_desc(
                kinesis_stream_source_configuration),
        )
        FirehoseBackend.TAGS.tag_resource(stream["DeliveryStreamARN"], tags)
        region.delivery_streams[delivery_stream_name] = stream

        # record event
        event_publisher.fire_event(
            event_publisher.EVENT_FIREHOSE_CREATE_STREAM,
            payload={"n": event_publisher.get_hash(delivery_stream_name)},
        )

        if delivery_stream_type == DeliveryStreamType.KinesisStreamAsSource:
            if not kinesis_stream_source_configuration:
                raise InvalidArgumentException(
                    "Missing delivery stream configuration")
            kinesis_stream_name = kinesis_stream_source_configuration[
                "KinesisStreamARN"].split("/")[1]
            kinesis_connector.listen_to_kinesis(
                stream_name=kinesis_stream_name,
                fh_d_stream=delivery_stream_name,
                listener_func=self._process_records,
                wait_until_started=True,
                ddb_lease_table_suffix="-firehose",
            )
        return CreateDeliveryStreamOutput(
            DeliveryStreamARN=stream["DeliveryStreamARN"])