Esempio n. 1
0
def send_to_gg_stream_manager(s3_stream_client: StreamManagerClient,
                              file_url: str, s3_key_name: str):
    print("In Send to GG Stream Manager Function", flush=True)
    #s3_key_name="processed_video_frames/"+file_prefix+"/"
    print("Input URL, bucket and key are ::::  {} - {} - {} ".format(
        "file://" + file_url, s3_bucket_name, s3_key_name),
          flush=True)
    try:
        s3_export_task_definition = S3ExportTaskDefinition(
            input_url="file://" + file_url,
            bucket=s3_bucket_name,
            key=s3_key_name)
        print("Task definition created successfully....", flush=True)
        sequence_number = s3_stream_client.append_message(
            stream_name,
            Util.validate_and_serialize_to_json_bytes(
                s3_export_task_definition))
        print("Successfully appended to stream with sequence number {}".format(
            sequence_number),
              flush=True)
        is_upload_success = False
        while not is_upload_success:
            try:
                messages_list = s3_stream_client.read_messages(
                    status_stream_name,
                    ReadMessagesOptions(min_message_count=1,
                                        read_timeout_millis=10000))
                for message in messages_list:
                    # Deserialize the status message first.
                    status_message = Util.deserialize_json_bytes_to_obj(
                        message.payload, StatusMessage)
                    if status_message.status == Status.Success:
                        print(
                            "Successfully uploaded file: {} to S3 bucket: {} and the location is: {}"
                            .format("file://" + file_url, s3_bucket_name,
                                    s3_key_name),
                            flush=True)
                        is_upload_success = True
                    elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
                        print(
                            "Unable to upload file:{} to S3 bucket:{}".format(
                                "file://" + file_url, s3_bucket_name),
                            flush=True)
                        is_upload_success = True
            except StreamManagerException:
                print("Exception occurred while sending message to S3.. {} ",
                      sys.exc_info()[0],
                      flush=True)
    except asyncio.TimeoutError:
        print("Timed out while executing.. {} ", sys.exc_info()[0], flush=True)
    except Exception:
        print("Exception while running.. {} ", sys.exc_info()[0], flush=True)
Esempio n. 2
0
def init_gg_stream_manager():
    print("Initializing Stream manager.....", flush=True)
    s3_stream_client = StreamManagerClient()
    try:
        s3_stream_client.delete_message_stream(stream_name=stream_name)
    except ResourceNotFoundException:
        pass
    try:
        s3_stream_client.delete_message_stream(stream_name=status_stream_name)
    except ResourceNotFoundException:
        pass

    try:
        # Create the Status Stream.
        s3_stream_client.create_message_stream(
            MessageStreamDefinition(
                name=status_stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                persistence=Persistence.Memory))
    except StreamManagerException:
        pass

    my_s3_export_definition = ExportDefinition(s3_task_executor=[
        S3ExportTaskExecutorConfig(
            identifier="s3_task_exe_" + stream_name,
            status_config=StatusConfig(
                status_level=StatusLevel.
                TRACE,  # Default is INFO level statuses.
                # Status Stream should be created before specifying in S3 Export Config.
                status_stream_name=status_stream_name,
            ),
        )
    ])

    try:
        s3_stream_client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                max_size=268435456,  # Default is 256 MB.
                stream_segment_size=16777216,  # Default is 16 MB.
                time_to_live_millis=None,  # By default, no TTL is enabled.
                strategy_on_full=StrategyOnFull.
                OverwriteOldestData,  # Required.
                persistence=Persistence.File,  # Default is File.
                flush_on_write=False,  # Default is false.
                export_definition=my_s3_export_definition))
    except StreamManagerException:
        pass
    return s3_stream_client
Esempio n. 3
0
def main(logger):
    try:
        stream_name = "SomeStream"
        client = StreamManagerClient()

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(iot_sitewise=[
            IoTSiteWiseConfig(identifier="IoTSiteWiseExport" + stream_name,
                              batch_size=5)
        ])
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        logger.info(
            "Now going to start writing random IoTSiteWiseEntry to the stream")
        # Now start putting in random site wise entries.
        while True:
            logger.debug("Appending new random IoTSiteWiseEntry to stream")
            client.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    get_random_site_wise_entry()))
            time.sleep(1)
    except asyncio.TimeoutError:
        print("Timed out")
    except Exception as e:
        print(e)
        print(type(e))
    finally:
        if client:
            client.close()
Esempio n. 4
0
def main(logger):
    try:
        stream_name = "SomeStream"
        status_stream_name = "SomeStatusStreamName"
        bucket_name = "SomeBucket"
        key_name = "SomeKey"
        file_url = "file:/path/to/some/file.someExtension"
        client = StreamManagerClient()

        # Try deleting the status stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=status_stream_name)
        except ResourceNotFoundException:
            pass

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(s3_task_executor=[
            S3ExportTaskExecutorConfig(
                identifier="S3TaskExecutor" + stream_name,  # Required
                # Optional. Add an export status stream to add statuses for all S3 upload tasks.
                status_config=StatusConfig(
                    status_level=StatusLevel.
                    INFO,  # Default is INFO level statuses.
                    # Status Stream should be created before specifying in S3 Export Config.
                    status_stream_name=status_stream_name,
                ),
            )
        ])

        # Create the Status Stream.
        client.create_message_stream(
            MessageStreamDefinition(
                name=status_stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData))

        # Create the message stream with the S3 Export definition.
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        # Append a S3 Task definition and print the sequence number
        s3_export_task_definition = S3ExportTaskDefinition(input_url=file_url,
                                                           bucket=bucket_name,
                                                           key=key_name)
        logger.info(
            "Successfully appended S3 Task Definition to stream with sequence number %d",
            client.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    s3_export_task_definition)),
        )

        # Read the statuses from the export status stream
        is_file_uploaded_to_s3 = False
        while not is_file_uploaded_to_s3:
            try:
                messages_list = client.read_messages(
                    status_stream_name,
                    ReadMessagesOptions(min_message_count=1,
                                        read_timeout_millis=1000))
                for message in messages_list:
                    # Deserialize the status message first.
                    status_message = Util.deserialize_json_bytes_to_obj(
                        message.payload, StatusMessage)

                    # Check the status of the status message. If the status is "Success",
                    # the file was successfully uploaded to S3.
                    # If the status was either "Failure" or "Cancelled", the server was unable to upload the file to S3.
                    # We will print the message for why the upload to S3 failed from the status message.
                    # If the status was "InProgress", the status indicates that the server has started uploading
                    # the S3 task.
                    if status_message.status == Status.Success:
                        logger.info("Successfully uploaded file at path " +
                                    file_url + " to S3.")
                        is_file_uploaded_to_s3 = True
                    elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
                        logger.info("Unable to upload file at path " +
                                    file_url + " to S3. Message: " +
                                    status_message.message)
                        is_file_uploaded_to_s3 = True
                time.sleep(5)
            except StreamManagerException:
                logger.exception("Exception while running")
    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        if client:
            client.close()
Esempio n. 5
0
        connection = Connection(
            host_name=hostname,
            port=8033,
            bootstrap=bootstrap,
            socket_options=socket_options,
            connect_message_amender=amender,
        )
        self.lifecycle_handler = LifecycleHandler()
        connect_future = connection.connect(self.lifecycle_handler)
        connect_future.result(TIMEOUT)
        return connection


try:
    stream_name = "OPCUAStream"
    streamClient = StreamManagerClient()

    try:
        streamClient.delete_message_stream(stream_name=stream_name)
    except ResourceNotFoundException:
        pass
    exports = ExportDefinition(iot_sitewise=[
        IoTSiteWiseConfig(identifier="IoTSiteWiseExport" + stream_name,
                          batch_size=5)
    ])
    streamClient.create_message_stream(
        MessageStreamDefinition(
            name=stream_name,
            strategy_on_full=StrategyOnFull.OverwriteOldestData,
            export_definition=exports))
    print("Now going to start writing IoTSiteWiseEntry to the stream")
Esempio n. 6
0
def main(logger):
    try:
        stream_name = "SomeStream"
        kinesis_stream_name = "MyKinesisStream"

        # Create a client for the StreamManager
        client = StreamManagerClient()

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(
            kinesis=[KinesisConfig(identifier="KinesisExport" + stream_name, kinesis_stream_name=kinesis_stream_name)]
        )
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name, strategy_on_full=StrategyOnFull.OverwriteOldestData, export_definition=exports
            )
        )

        # Append 2 messages and print their sequence numbers
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name, "ABCDEFGHIJKLMNO".encode("utf-8")),
        )
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name, "PQRSTUVWXYZ".encode("utf-8")),
        )

        # Try reading the 2 messages we just appended and print them out
        logger.info(
            "Successfully read 2 messages: %s",
            client.read_messages(stream_name, ReadMessagesOptions(min_message_count=2, read_timeout_millis=1000)),
        )

        logger.info("Now going to start writing random integers between 0 and 1000 to the stream")
        # Now start putting in random data between 0 and 1000 to emulate device sensor input
        while True:
            logger.debug("Appending new random integer to stream")
            client.append_message(stream_name, random.randint(0, 1000).to_bytes(length=4, signed=True, byteorder="big"))
            time.sleep(1)

    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        # Always close the client to avoid resource leaks
        if client:
            client.close()
Esempio n. 7
0
TIMEOUT = 10



def createSWEntry(propertyAlias, variant):
    time_in_nanos = TimeInNanos(
        time_in_seconds=calendar.timegm(time.gmtime()) - random.randint(0, 60), offset_in_nanos=random.randint(0, 10000)
    )
    asset = [AssetPropertyValue(value=variant, quality=Quality.GOOD, timestamp=time_in_nanos)]
    return PutAssetPropertyValueEntry(entry_id=str(uuid.uuid4()), property_alias=propertyAlias, property_values=asset)


try:
    stream_name = "TemperatureHumidityStream"
    streamClient = StreamManagerClient()

    try:
        streamClient.delete_message_stream(stream_name=stream_name)
    except ResourceNotFoundException:
        pass
    exports = ExportDefinition(
        iot_sitewise=[IoTSiteWiseConfig(identifier="IoTSiteWiseExport" + stream_name, batch_size=5)]
    )
    streamClient.create_message_stream(
        MessageStreamDefinition(
            name=stream_name, strategy_on_full=StrategyOnFull.OverwriteOldestData, export_definition=exports
        )
    )
    print("Now going to start writing IoTSiteWiseEntry to the stream")
except StreamManagerException as e: