Exemplo n.º 1
0
def main(logger):
    try:
        stream_name = "SomeStream"
        kinesis_stream_name = "MyKinesisStream"

        # Create a client for the StreamManager
        client = StreamManagerClient()

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(
            kinesis=[KinesisConfig(identifier="KinesisExport" + stream_name, kinesis_stream_name=kinesis_stream_name)]
        )
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name, strategy_on_full=StrategyOnFull.OverwriteOldestData, export_definition=exports
            )
        )

        # Append 2 messages and print their sequence numbers
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name, "ABCDEFGHIJKLMNO".encode("utf-8")),
        )
        logger.info(
            "Successfully appended message to stream with sequence number %d",
            client.append_message(stream_name, "PQRSTUVWXYZ".encode("utf-8")),
        )

        # Try reading the 2 messages we just appended and print them out
        logger.info(
            "Successfully read 2 messages: %s",
            client.read_messages(stream_name, ReadMessagesOptions(min_message_count=2, read_timeout_millis=1000)),
        )

        logger.info("Now going to start writing random integers between 0 and 1000 to the stream")
        # Now start putting in random data between 0 and 1000 to emulate device sensor input
        while True:
            logger.debug("Appending new random integer to stream")
            client.append_message(stream_name, random.randint(0, 1000).to_bytes(length=4, signed=True, byteorder="big"))
            time.sleep(1)

    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        # Always close the client to avoid resource leaks
        if client:
            client.close()
Exemplo n.º 2
0
def send_to_gg_stream_manager(s3_stream_client: StreamManagerClient,
                              file_url: str, s3_key_name: str):
    print("In Send to GG Stream Manager Function", flush=True)
    #s3_key_name="processed_video_frames/"+file_prefix+"/"
    print("Input URL, bucket and key are ::::  {} - {} - {} ".format(
        "file://" + file_url, s3_bucket_name, s3_key_name),
          flush=True)
    try:
        s3_export_task_definition = S3ExportTaskDefinition(
            input_url="file://" + file_url,
            bucket=s3_bucket_name,
            key=s3_key_name)
        print("Task definition created successfully....", flush=True)
        sequence_number = s3_stream_client.append_message(
            stream_name,
            Util.validate_and_serialize_to_json_bytes(
                s3_export_task_definition))
        print("Successfully appended to stream with sequence number {}".format(
            sequence_number),
              flush=True)
        is_upload_success = False
        while not is_upload_success:
            try:
                messages_list = s3_stream_client.read_messages(
                    status_stream_name,
                    ReadMessagesOptions(min_message_count=1,
                                        read_timeout_millis=10000))
                for message in messages_list:
                    # Deserialize the status message first.
                    status_message = Util.deserialize_json_bytes_to_obj(
                        message.payload, StatusMessage)
                    if status_message.status == Status.Success:
                        print(
                            "Successfully uploaded file: {} to S3 bucket: {} and the location is: {}"
                            .format("file://" + file_url, s3_bucket_name,
                                    s3_key_name),
                            flush=True)
                        is_upload_success = True
                    elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
                        print(
                            "Unable to upload file:{} to S3 bucket:{}".format(
                                "file://" + file_url, s3_bucket_name),
                            flush=True)
                        is_upload_success = True
            except StreamManagerException:
                print("Exception occurred while sending message to S3.. {} ",
                      sys.exc_info()[0],
                      flush=True)
    except asyncio.TimeoutError:
        print("Timed out while executing.. {} ", sys.exc_info()[0], flush=True)
    except Exception:
        print("Exception while running.. {} ", sys.exc_info()[0], flush=True)
Exemplo n.º 3
0
def main(logger):
    try:
        stream_name = "SomeStream"
        client = StreamManagerClient()

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(iot_sitewise=[
            IoTSiteWiseConfig(identifier="IoTSiteWiseExport" + stream_name,
                              batch_size=5)
        ])
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        logger.info(
            "Now going to start writing random IoTSiteWiseEntry to the stream")
        # Now start putting in random site wise entries.
        while True:
            logger.debug("Appending new random IoTSiteWiseEntry to stream")
            client.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    get_random_site_wise_entry()))
            time.sleep(1)
    except asyncio.TimeoutError:
        print("Timed out")
    except Exception as e:
        print(e)
        print(type(e))
    finally:
        if client:
            client.close()
Exemplo n.º 4
0
def main(logger):
    try:
        stream_name = "SomeStream"
        status_stream_name = "SomeStatusStreamName"
        bucket_name = "SomeBucket"
        key_name = "SomeKey"
        file_url = "file:/path/to/some/file.someExtension"
        client = StreamManagerClient()

        # Try deleting the status stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=status_stream_name)
        except ResourceNotFoundException:
            pass

        # Try deleting the stream (if it exists) so that we have a fresh start
        try:
            client.delete_message_stream(stream_name=stream_name)
        except ResourceNotFoundException:
            pass

        exports = ExportDefinition(s3_task_executor=[
            S3ExportTaskExecutorConfig(
                identifier="S3TaskExecutor" + stream_name,  # Required
                # Optional. Add an export status stream to add statuses for all S3 upload tasks.
                status_config=StatusConfig(
                    status_level=StatusLevel.
                    INFO,  # Default is INFO level statuses.
                    # Status Stream should be created before specifying in S3 Export Config.
                    status_stream_name=status_stream_name,
                ),
            )
        ])

        # Create the Status Stream.
        client.create_message_stream(
            MessageStreamDefinition(
                name=status_stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData))

        # Create the message stream with the S3 Export definition.
        client.create_message_stream(
            MessageStreamDefinition(
                name=stream_name,
                strategy_on_full=StrategyOnFull.OverwriteOldestData,
                export_definition=exports))

        # Append a S3 Task definition and print the sequence number
        s3_export_task_definition = S3ExportTaskDefinition(input_url=file_url,
                                                           bucket=bucket_name,
                                                           key=key_name)
        logger.info(
            "Successfully appended S3 Task Definition to stream with sequence number %d",
            client.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    s3_export_task_definition)),
        )

        # Read the statuses from the export status stream
        is_file_uploaded_to_s3 = False
        while not is_file_uploaded_to_s3:
            try:
                messages_list = client.read_messages(
                    status_stream_name,
                    ReadMessagesOptions(min_message_count=1,
                                        read_timeout_millis=1000))
                for message in messages_list:
                    # Deserialize the status message first.
                    status_message = Util.deserialize_json_bytes_to_obj(
                        message.payload, StatusMessage)

                    # Check the status of the status message. If the status is "Success",
                    # the file was successfully uploaded to S3.
                    # If the status was either "Failure" or "Cancelled", the server was unable to upload the file to S3.
                    # We will print the message for why the upload to S3 failed from the status message.
                    # If the status was "InProgress", the status indicates that the server has started uploading
                    # the S3 task.
                    if status_message.status == Status.Success:
                        logger.info("Successfully uploaded file at path " +
                                    file_url + " to S3.")
                        is_file_uploaded_to_s3 = True
                    elif status_message.status == Status.Failure or status_message.status == Status.Canceled:
                        logger.info("Unable to upload file at path " +
                                    file_url + " to S3. Message: " +
                                    status_message.message)
                        is_file_uploaded_to_s3 = True
                time.sleep(5)
            except StreamManagerException:
                logger.exception("Exception while running")
    except asyncio.TimeoutError:
        logger.exception("Timed out while executing")
    except Exception:
        logger.exception("Exception while running")
    finally:
        if client:
            client.close()
Exemplo n.º 5
0
        value = float(dataValue.Value)
        topic = "OPCUAServer1/test/myVariable"
        qos = QOS.AT_LEAST_ONCE

        request = PublishToIoTCoreRequest()
        request.topic_name = topic
        request.payload = bytes('{"value": "' + str(value) + '"}', "utf-8")
        request.qos = qos
        operation = ipc_client.new_publish_to_iot_core()
        operation.activate(request)
        future = operation.get_response()
        future.result(TIMEOUT)

        variant = Variant(double_value=value)
        siteWiseTopic = "/testvariable/opcua"

        try:
            print("Appending IoTSiteWiseEntry to stream")
            streamClient.append_message(
                stream_name,
                Util.validate_and_serialize_to_json_bytes(
                    createSWEntry(siteWiseTopic, variant)))
        except StreamManagerException as e:
            print(e)
            print(type(e))

        time.sleep(1)

finally:
    opcUAclient.disconnect()