Exemplo n.º 1
0
def main(logger):
    try:
        stream_name = "SomeStream"
        status_stream_name = "SomeStatusStreamName"
        bucket_name = "SomeBucket"
        key_name = "SomeKey"
        file_url = "file:/path/to/some/file.someExtension"
        client = StreamManagerClient()

        # Try deleting the status stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=status_stream_name)
        except ResourceNotFoundException:
            pass

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(s3_task_executor=[
            S3ExportTaskExecutorConfig(
                identifier="S3TaskExecutor" + stream_name,  # Required
                # Optional. Add an export status stream to add statuses for all S3 upload tasks.
                status_config=StatusConfig(
                    status_level=StatusLevel.
                    INFO,  # Default is INFO level statuses.
                    # Status Stream should be created before specifying in S3 Export Config.
                    status_stream_name=status_stream_name,
                ),
            )
        ])

        # Create the Status Stream.
        client.create_message_stream(
            MessageStreamDefinition(
                name=status_stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData))

        # Create the message stream with the S3 Export definition.
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        # Append a S3 Task definition and print the sequence number
        s3_export_task_definition = S3ExportTaskDefinition(input_url=file_url,
                                                           bucket=bucket_name,
                                                           key=key_name)
        logger.info(
            "Successfully appended S3 Task Definition to stream with sequence number %d",
            client.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    s3_export_task_definition)),
        )

        # Read the statuses from the export status stream
        is_file_uploaded_to_s3 = False
        while not is_file_uploaded_to_s3:
            try:
                messages_list = client.read_messages(
                    status_stream_name,
                    ReadMessagesOptions(min_message_count=1,
                                        read_timeout_millis=1000))
                for message in messages_list:
                    # Deserialize the status message first.
                    status_message = Util.deserialize_json_bytes_to_obj(
                        message.payload, StatusMessage)

                    # Check the status of the status message. If the status is "Success",
                    # the file was successfully uploaded to S3.
                    # If the status was either "Failure" or "Cancelled", the server was unable to upload the file to S3.
                    # We will print the message for why the upload to S3 failed from the status message.
                    # If the status was "InProgress", the status indicates that the server has started uploading
                    # the S3 task.
                    if status_message.status == Status.Success:
                        logger.info("Successfully uploaded file at path " +
                                    file_url + " to S3.")
                        is_file_uploaded_to_s3 = True
                    elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
                        logger.info("Unable to upload file at path " +
                                    file_url + " to S3. Message: " +
                                    status_message.message)
                        is_file_uploaded_to_s3 = True
                time.sleep(5)
            except StreamManagerException:
                logger.exception("Exception while running")
    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        if client:
            client.close()
def main(logger):
    try:
        stream_name = "SomeStream"
        kinesis_stream_name = "MyKinesisStream"

        # Create a client for the StreamManager
        client = StreamManagerClient()

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(kinesis=[
            KinesisConfig(identifier="KinesisExport" + stream_name,
                          kinesis_stream_name=kinesis_stream_name)
        ])
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        # Append 2 messages and print their sequence numbers
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name,
                                  "ABCDEFGHIJKLMNO".encode("utf-8")),
        )
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name, "PQRSTUVWXYZ".encode("utf-8")),
        )

        # Try reading the 2 messages we just appended and print them out
        logger.info(
            "Successfully read 2 messages: %s",
            client.read_messages(
                stream_name,
                ReadMessagesOptions(min_message_count=2,
                                    read_timeout_millis=1000)),
        )

        logger.info(
            "Now going to start writing random integers between 0 and 1000 to the stream"
        )
        # Now start putting in random data between 0 and 1000 to emulate device sensor input
        while True:
            logger.debug("Appending new random integer to stream")
            client.append_message(
                stream_name,
                random.randint(0, 1000).to_bytes(length=4,
                                                 signed=True,
                                                 byteorder="big"))
            time.sleep(1)

    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        # Always close the client to avoid resource leaks
        if client:
            client.close()