def send_message_to_output(self, message, output_name): """Sends an event/message to the given module output. These are outgoing events and are meant to be "output events". This is a synchronous event, meaning that this function will not return until the event has been sent to the service and the service has acknowledged receipt of the event. If the connection to the service has not previously been opened by a call to connect, this function will open the connection before sending the event. :param message: message to send to the given output. Anything passed that is not an instance of the Message class will be converted to Message object. :param output_name: Name of the output to send the event to. """ if not isinstance(message, Message): message = Message(message) message.output_name = output_name logger.info("Sending message to output:" + output_name + "...") callback = EventedCallback() self._iothub_pipeline.send_output_event(message, callback=callback) callback.wait_for_completion() logger.info("Successfully sent message to output: " + output_name)
def __init__(self, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object. :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ # NOTE: This pipeline DOES NOT handle SasToken management! # (i.e. using a SasTokenStage) # It instead relies on the parallel MQTT pipeline to handle that. # # Because they share a pipeline configuration, and MQTT has renewal logic we can be sure # that the SasToken in the pipeline configuration is valid. # # Furthermore, because HTTP doesn't require constant connections or long running tokens, # there's no need to reauthorize connections, so we can just pass the token from the config # when needed for auth. # # This is not an ideal solution, but it's the simplest one for the time being. # Contains data and information shared globally within the pipeline self._nucleus = pipeline_nucleus.PipelineNucleus( pipeline_configuration) self._pipeline = (pipeline_stages_base.PipelineRootStage( self._nucleus).append_stage( pipeline_stages_iothub_http.IoTHubHTTPTranslationStage( )).append_stage(pipeline_stages_http.HTTPTransportStage())) callback = EventedCallback() op = pipeline_ops_base.InitializePipelineOperation(callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def test_calling_object_sets_event(self): callback = EventedCallback() assert not callback.completion_event.isSet() callback() sleep(0.1) # wait to give time to complete the callback assert callback.completion_event.isSet() assert not callback.exception callback.wait_for_completion()
def test_raises_error_without_return_arg_name(self, fake_error): callback = EventedCallback() assert not callback.completion_event.isSet() callback(error=fake_error) sleep(0.1) # wait to give time to complete the callback assert callback.completion_event.isSet() assert callback.exception == fake_error with pytest.raises(fake_error.__class__) as e_info: callback.wait_for_completion() assert e_info.value is fake_error
def test_raises_error_with_return_arg_name(self, arbitrary_exception): callback = EventedCallback(return_arg_name="arg_name") assert not callback.completion_event.isSet() callback(error=arbitrary_exception) sleep(0.1) # wait to give time to complete the callback assert callback.completion_event.isSet() assert callback.exception == arbitrary_exception with pytest.raises(arbitrary_exception.__class__) as e_info: callback.wait_for_completion() assert e_info.value is arbitrary_exception
def disconnect(self): """Disconnect the client from the Azure IoT Hub or Azure IoT Edge Hub instance. This is a synchronous call, meaning that this function will not return until the connection to the service has been completely closed. """ logger.info("Disconnecting from Hub...") callback = EventedCallback() self._iothub_pipeline.disconnect(callback=callback) callback.wait_for_completion() logger.info("Successfully disconnected from Hub")
def connect(self): """Connects the client to an Azure IoT Hub or Azure IoT Edge Hub instance. The destination is chosen based on the credentials passed via the auth_provider parameter that was provided when this object was initialized. This is a synchronous call, meaning that this function will not return until the connection to the service has been completely established. """ logger.info("Connecting to Hub...") callback = EventedCallback() self._iothub_pipeline.connect(callback=callback) callback.wait_for_completion() logger.info("Successfully connected to Hub")
def cancel(self): """ This is a synchronous call, meaning that this function will not return until the cancellation process has completed successfully or the attempt has resulted in a failure. Before returning the client will also disconnect from the provisioning service. In case there is no registration in process it will throw an error as there is no registration process to cancel. """ logger.info("Cancelling the current registration process") cancel_complete = EventedCallback() self._polling_machine.cancel(callback=cancel_complete) cancel_complete.wait_for_completion() logger.info("Successfully cancelled the current registration process")
def _enable_feature(self, feature_name): """Enable an Azure IoT Hub feature. This is a synchronous call, meaning that this function will not return until the feature has been enabled. :param feature_name: The name of the feature to enable. See azure.iot.device.common.pipeline.constant for possible values """ logger.info("Enabling feature:" + feature_name + "...") callback = EventedCallback() self._mqtt_pipeline.enable_feature(feature_name, callback=callback) callback.wait_for_completion() logger.info("Successfully enabled feature:" + feature_name)
def test_calling_object_sets_event_with_return_arg_name( self, fake_return_arg_value): callback = EventedCallback(return_arg_name="arg_name") assert not callback.completion_event.isSet() callback(arg_name=fake_return_arg_value) sleep(0.1) # wait to give time to complete the callback assert callback.completion_event.isSet() assert not callback.exception assert callback.wait_for_completion() == fake_return_arg_value
def _enable_feature(self, feature_name): """Enable an Azure IoT Hub feature. This is a synchronous call, meaning that this function will not return until the feature has been enabled. :param feature_name: The name of the feature to enable. See azure.iot.device.common.pipeline.constant for possible values """ logger.info("Enabling feature:" + feature_name + "...") if not self._mqtt_pipeline.feature_enabled[feature_name]: callback = EventedCallback() self._mqtt_pipeline.enable_feature(feature_name, callback=callback) callback.wait_for_completion() logger.info("Successfully enabled feature:" + feature_name) else: # This branch shouldn't be reached, but in case it is, log it logger.info("Feature ({}) already disabled - skipping".format(feature_name))
def __init__(self, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object. :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ self._pipeline = (pipeline_stages_base.PipelineRootStage( pipeline_configuration).append_stage( pipeline_stages_base.SasTokenRenewalStage()).append_stage( pipeline_stages_iothub_http.IoTHubHTTPTranslationStage( )).append_stage(pipeline_stages_http.HTTPTransportStage())) callback = EventedCallback() op = pipeline_ops_base.InitializePipelineOperation(callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def send_method_response(self, method_response): """Send a response to a method request via the Azure IoT Hub or Azure IoT Edge Hub. This is a synchronous event, meaning that this function will not return until the event has been sent to the service and the service has acknowledged receipt of the event. If the connection to the service has not previously been opened by a call to connect, this function will open the connection before sending the event. :param method_response: The MethodResponse to send. :type method_response: MethodResponse """ logger.info("Sending method response to Hub...") callback = EventedCallback() self._iothub_pipeline.send_method_response(method_response, callback=callback) callback.wait_for_completion() logger.info("Successfully sent method response to Hub")
def send_message(self, message): """Sends a message to the default events endpoint on the Azure IoT Hub or Azure IoT Edge Hub instance. This is a synchronous event, meaning that this function will not return until the event has been sent to the service and the service has acknowledged receipt of the event. If the connection to the service has not previously been opened by a call to connect, this function will open the connection before sending the event. :param message: The actual message to send. Anything passed that is not an instance of the Message class will be converted to Message object. """ if not isinstance(message, Message): message = Message(message) logger.info("Sending message to Hub...") callback = EventedCallback() self._iothub_pipeline.send_message(message, callback=callback) callback.wait_for_completion() logger.info("Successfully sent message to Hub")
def patch_twin_reported_properties(self, reported_properties_patch): """ Update reported properties with the Azure IoT Hub or Azure IoT Edge Hub service. This is a synchronous call, meaning that this function will not return until the patch has been sent to the service and acknowledged. If the service returns an error on the patch operation, this function will raise the appropriate error. :param reported_properties_patch: :type reported_properties_patch: dict, str, int, float, bool, or None (JSON compatible values) """ if not self._iothub_pipeline.feature_enabled[constant.TWIN]: self._enable_feature(constant.TWIN) callback = EventedCallback() self._iothub_pipeline.patch_twin_reported_properties( patch=reported_properties_patch, callback=callback) callback.wait_for_completion() logger.info("Successfully patched twin")
def __init__(self, auth_provider, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object. :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ self._pipeline = (pipeline_stages_base.PipelineRootStage( pipeline_configuration=pipeline_configuration).append_stage( pipeline_stages_iothub.UseAuthProviderStage()).append_stage( pipeline_stages_iothub_http.IoTHubHTTPTranslationStage( )).append_stage(pipeline_stages_http.HTTPTransportStage())) callback = EventedCallback() if isinstance(auth_provider, X509AuthenticationProvider): op = pipeline_ops_iothub.SetX509AuthProviderOperation( auth_provider=auth_provider, callback=callback) else: # Currently everything else goes via this block. op = pipeline_ops_iothub.SetAuthProviderOperation( auth_provider=auth_provider, callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def register(self): """ Register the device with the with thw provisioning service This is a synchronous call, meaning that this function will not return until the registration process has completed successfully or the attempt has resulted in a failure. Before returning the client will also disconnect from the provisioning service. If a registration attempt is made while a previous registration is in progress it may throw an error. """ logger.info("Registering with Provisioning Service...") register_complete = EventedCallback(return_arg_name="result") self._polling_machine.register(callback=register_complete) result = register_complete.wait_for_completion() log_on_register_complete(result) return result
def get_twin(self): """ Gets the device or module twin from the Azure IoT Hub or Azure IoT Edge Hub service. This is a synchronous call, meaning that this function will not return until the twin has been retrieved from the service. :returns: Twin object which was retrieved from the hub """ if not self._iothub_pipeline.feature_enabled[constant.TWIN]: self._enable_feature(constant.TWIN) callback = EventedCallback(return_arg_name="twin") self._iothub_pipeline.get_twin(callback=callback) twin = callback.wait_for_completion() logger.info("Successfully retrieved twin") return twin
def __init__(self, security_client): """ Constructor for instantiating a pipeline :param security_client: The security client which stores credentials """ # Event Handlers - Will be set by Client after instantiation of pipeline self.on_connected = None self.on_disconnected = None self.on_message_received = None self._pipeline = ( pipeline_stages_base.PipelineRootStage() .append_stage(pipeline_stages_provisioning.UseSecurityClientStage()) .append_stage(pipeline_stages_provisioning_mqtt.ProvisioningMQTTConverterStage()) .append_stage(pipeline_stages_base.EnsureConnectionStage()) .append_stage(pipeline_stages_base.SerializeConnectOpsStage()) .append_stage(pipeline_stages_mqtt.MQTTTransportStage()) ) def _on_pipeline_event(event): if isinstance(event, pipeline_events_provisioning.RegistrationResponseEvent): if self.on_message_received: self.on_message_received( event.request_id, event.status_code, event.key_values, event.response_payload, ) else: logger.warning("Provisioning event received with no handler. dropping.") else: logger.warning("Dropping unknown pipeline event {}".format(event.name)) def _on_connected(): if self.on_connected: self.on_connected("connected") def _on_disconnected(): if self.on_disconnected: self.on_disconnected("disconnected") self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() if isinstance(security_client, X509SecurityClient): op = pipeline_ops_provisioning.SetX509SecurityClientOperation( security_client=security_client, callback=callback ) elif isinstance(security_client, SymmetricKeySecurityClient): op = pipeline_ops_provisioning.SetSymmetricKeySecurityClientOperation( security_client=security_client, callback=callback ) else: logger.error("Provisioning not equipped to handle other security client.") self._pipeline.run_op(op) callback.wait_for_completion() if op.error: logger.error("{} failed: {}".format(op.name, op.error)) raise op.error
def __init__(self, security_client, pipeline_configuration): """ Constructor for instantiating a pipeline :param security_client: The security client which stores credentials """ self.responses_enabled = {provisioning_constants.REGISTER: False} # Event Handlers - Will be set by Client after instantiation of pipeline self.on_connected = None self.on_disconnected = None self.on_message_received = None self._registration_id = security_client.registration_id self._pipeline = ( pipeline_stages_base.PipelineRootStage( pipeline_configuration=pipeline_configuration).append_stage( pipeline_stages_provisioning.UseSecurityClientStage()). append_stage( pipeline_stages_provisioning.RegistrationStage()).append_stage( pipeline_stages_provisioning.PollingStatusStage()). append_stage( pipeline_stages_base.CoordinateRequestAndResponseStage() ).append_stage(pipeline_stages_provisioning_mqtt. ProvisioningMQTTTranslationStage()).append_stage( pipeline_stages_base.ReconnectStage()). append_stage(pipeline_stages_base.AutoConnectStage()).append_stage( pipeline_stages_base.ConnectionLockStage()).append_stage( pipeline_stages_base.RetryStage()).append_stage( pipeline_stages_base.OpTimeoutStage()).append_stage( pipeline_stages_mqtt.MQTTTransportStage())) def _on_pipeline_event(event): logger.warning("Dropping unknown pipeline event {}".format( event.name)) def _on_connected(): if self.on_connected: self.on_connected("connected") def _on_disconnected(): if self.on_disconnected: self.on_disconnected("disconnected") self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() if isinstance(security_client, X509SecurityClient): op = pipeline_ops_provisioning.SetX509SecurityClientOperation( security_client=security_client, callback=callback) elif isinstance(security_client, SymmetricKeySecurityClient): op = pipeline_ops_provisioning.SetSymmetricKeySecurityClientOperation( security_client=security_client, callback=callback) else: logger.error( "Provisioning not equipped to handle other security client.") self._pipeline.run_op(op) callback.wait_for_completion()
def __init__(self, auth_provider, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ self.feature_enabled = { constant.C2D_MSG: False, constant.INPUT_MSG: False, constant.METHODS: False, constant.TWIN: False, constant.TWIN_PATCHES: False, } # Event Handlers - Will be set by Client after instantiation of this object self.on_connected = None self.on_disconnected = None self.on_c2d_message_received = None self.on_input_message_received = None self.on_method_request_received = None self.on_twin_patch_received = None # Currently a single timeout stage and a single retry stage for MQTT retry only. # Later, a higher level timeout and a higher level retry stage. self._pipeline = ( # # The root is always the root. By definition, it's the first stage in the pipeline. # pipeline_stages_base.PipelineRootStage( pipeline_configuration=pipeline_configuration) # # UseAuthProviderStage comes near the root by default because it doesn't need to be after # anything, but it does need to be before IoTHubMQTTTranslationStage. # .append_stage(pipeline_stages_iothub.UseAuthProviderStage()) # # TwinRequestResponseStage comes near the root by default because it doesn't need to be # after anything # .append_stage(pipeline_stages_iothub.TwinRequestResponseStage()) # # CoordinateRequestAndResponseStage needs to be after TwinRequestResponseStage because # TwinRequestResponseStage creates the request ops that CoordinateRequestAndResponseStage # is coordinating. It needs to be before IoTHubMQTTTranslationStage because that stage # operates on ops that CoordinateRequestAndResponseStage produces # .append_stage( pipeline_stages_base.CoordinateRequestAndResponseStage()) # # IoTHubMQTTTranslationStage comes here because this is the point where we can translate # all operations directly into MQTT. After this stage, only pipeline_stages_base stages # are allowed because IoTHubMQTTTranslationStage removes all the IoTHub-ness from the ops # .append_stage( pipeline_stages_iothub_mqtt.IoTHubMQTTTranslationStage()) # # AutoConnectStage comes here because only MQTT ops have the need_connection flag set # and this is the first place in the pipeline wherer we can guaranetee that all network # ops are MQTT ops. # .append_stage(pipeline_stages_base.AutoConnectStage()) # # ReconnectStage needs to be after AutoConnectStage because ReconnectStage sets/clears # the virtually_conencted flag and we want an automatic connection op to set this flag so # we can reconnect autoconnect operations. This is important, for example, if a # send_message causes the transport to automatically connect, but that connection fails. # When that happens, the ReconenctState will hold onto the ConnectOperation until it # succeeds, and only then will return success to the AutoConnectStage which will # allow the publish to continue. # .append_stage(pipeline_stages_base.ReconnectStage()) # # ConnectionLockStage needs to be after ReconnectStage because we want any ops that # ReconnectStage creates to go through the ConnectionLockStage gate # .append_stage(pipeline_stages_base.ConnectionLockStage()) # # RetryStage needs to be near the end because it's retrying low-level MQTT operations. # .append_stage(pipeline_stages_base.RetryStage()) # # OpTimeoutStage needs to be after RetryStage because OpTimeoutStage returns the timeout # errors that RetryStage is watching for. # .append_stage(pipeline_stages_base.OpTimeoutStage()) # # MQTTTransportStage needs to be at the very end of the pipeline because this is where # operations turn into network traffic # .append_stage(pipeline_stages_mqtt.MQTTTransportStage())) def _on_pipeline_event(event): if isinstance(event, pipeline_events_iothub.C2DMessageEvent): if self.on_c2d_message_received: self.on_c2d_message_received(event.message) else: logger.warning( "C2D message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.InputMessageEvent): if self.on_input_message_received: self.on_input_message_received(event.input_name, event.message) else: logger.warning( "input message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.MethodRequestEvent): if self.on_method_request_received: self.on_method_request_received(event.method_request) else: logger.warning( "Method request event received with no handler. Dropping." ) elif isinstance( event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): if self.on_twin_patch_received: self.on_twin_patch_received(event.patch) else: logger.warning( "Twin patch event received with no handler. Dropping.") else: logger.warning("Dropping unknown pipeline event {}".format( event.name)) def _on_connected(): if self.on_connected: self.on_connected() def _on_disconnected(): if self.on_disconnected: self.on_disconnected() self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() if isinstance(auth_provider, X509AuthenticationProvider): op = pipeline_ops_iothub.SetX509AuthProviderOperation( auth_provider=auth_provider, callback=callback) else: # Currently everything else goes via this block. op = pipeline_ops_iothub.SetAuthProviderOperation( auth_provider=auth_provider, callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def __init__(self, pipeline_configuration): """ Constructor for instantiating a pipeline :param security_client: The security client which stores credentials """ self.responses_enabled = {provisioning_constants.REGISTER: False} # Event Handlers - Will be set by Client after instantiation of pipeline self.on_connected = None self.on_disconnected = None self.on_message_received = None self._registration_id = pipeline_configuration.registration_id self._pipeline = ( # # The root is always the root. By definition, it's the first stage in the pipeline. # pipeline_stages_base.PipelineRootStage( pipeline_configuration=pipeline_configuration) # # SasTokenRenewalStage comes near the root by default because it should be as close # to the top of the pipeline as possible, and does not need to be after anything. # .append_stage(pipeline_stages_base.SasTokenRenewalStage()) # # RegistrationStage needs to come early because this is the stage that converts registration # or query requests into request and response objects which are used by later stages # .append_stage(pipeline_stages_provisioning.RegistrationStage()) # # PollingStatusStage needs to come after RegistrationStage because RegistrationStage counts # on PollingStatusStage to poll until the registration is complete. # .append_stage(pipeline_stages_provisioning.PollingStatusStage()) # # CoordinateRequestAndResponseStage needs to be after RegistrationStage and PollingStatusStage # because these 2 stages create the request ops that CoordinateRequestAndResponseStage # is coordinating. It needs to be before ProvisioningMQTTTranslationStage because that stage # operates on ops that CoordinateRequestAndResponseStage produces # .append_stage( pipeline_stages_base.CoordinateRequestAndResponseStage()) # # ProvisioningMQTTTranslationStage comes here because this is the point where we can translate # all operations directly into MQTT. After this stage, only pipeline_stages_base stages # are allowed because ProvisioningMQTTTranslationStage removes all the provisioning-ness from the ops # .append_stage(pipeline_stages_provisioning_mqtt. ProvisioningMQTTTranslationStage()) # # AutoConnectStage comes here because only MQTT ops have the need_connection flag set # and this is the first place in the pipeline wherer we can guaranetee that all network # ops are MQTT ops. # .append_stage(pipeline_stages_base.AutoConnectStage()) # # ReconnectStage needs to be after AutoConnectStage because ReconnectStage sets/clears # the virtually_conencted flag and we want an automatic connection op to set this flag so # we can reconnect autoconnect operations. # .append_stage(pipeline_stages_base.ReconnectStage()) # # ConnectionLockStage needs to be after ReconnectStage because we want any ops that # ReconnectStage creates to go through the ConnectionLockStage gate # .append_stage(pipeline_stages_base.ConnectionLockStage()) # # RetryStage needs to be near the end because it's retrying low-level MQTT operations. # .append_stage(pipeline_stages_base.RetryStage()) # # OpTimeoutStage needs to be after RetryStage because OpTimeoutStage returns the timeout # errors that RetryStage is watching for. # .append_stage(pipeline_stages_base.OpTimeoutStage()) # # MQTTTransportStage needs to be at the very end of the pipeline because this is where # operations turn into network traffic # .append_stage(pipeline_stages_mqtt.MQTTTransportStage())) def _on_pipeline_event(event): logger.warning("Dropping unknown pipeline event {}".format( event.name)) def _on_connected(): if self.on_connected: self.on_connected("connected") def _on_disconnected(): if self.on_disconnected: self.on_disconnected("disconnected") self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() op = pipeline_ops_base.InitializePipelineOperation(callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def __init__(self, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ self.feature_enabled = { constant.C2D_MSG: False, constant.INPUT_MSG: False, constant.METHODS: False, constant.TWIN: False, constant.TWIN_PATCHES: False, } # Handlers - Will be set by Client after instantiation of this object self.on_connected = None self.on_disconnected = None self.on_new_sastoken_required = None self.on_background_exception = None self.on_c2d_message_received = None self.on_input_message_received = None self.on_method_request_received = None self.on_twin_patch_received = None # Contains data and information shared globally within the pipeline self._nucleus = pipeline_nucleus.PipelineNucleus(pipeline_configuration) self._pipeline = ( # # The root is always the root. By definition, it's the first stage in the pipeline. # pipeline_stages_base.PipelineRootStage(self._nucleus) # # SasTokenStage comes near the root by default because it should be as close # to the top of the pipeline as possible, and does not need to be after anything. # .append_stage(pipeline_stages_base.SasTokenStage()) # # EnsureDesiredPropertiesStage needs to be above TwinRequestResponseStage because it # sends GetTwinOperation ops and that stage handles those ops. # .append_stage(pipeline_stages_iothub.EnsureDesiredPropertiesStage()) # # TwinRequestResponseStage comes near the root by default because it doesn't need to be # after anything # .append_stage(pipeline_stages_iothub.TwinRequestResponseStage()) # # CoordinateRequestAndResponseStage needs to be after TwinRequestResponseStage because # TwinRequestResponseStage creates the request ops that CoordinateRequestAndResponseStage # is coordinating. It needs to be before IoTHubMQTTTranslationStage because that stage # operates on ops that CoordinateRequestAndResponseStage produces # .append_stage(pipeline_stages_base.CoordinateRequestAndResponseStage()) # # IoTHubMQTTTranslationStage comes here because this is the point where we can translate # all operations directly into MQTT. After this stage, only pipeline_stages_base stages # are allowed because IoTHubMQTTTranslationStage removes all the IoTHub-ness from the ops # .append_stage(pipeline_stages_iothub_mqtt.IoTHubMQTTTranslationStage()) # # AutoConnectStage comes here because only MQTT ops have the need_connection flag set # and this is the first place in the pipeline where we can guarantee that all network # ops are MQTT ops. # .append_stage(pipeline_stages_base.AutoConnectStage()) # # ConnectionStateStage needs to be after AutoConnectStage because the AutoConnectStage # can create ConnectOperations and we (may) want to queue connection related operations # in the ConnectionStateStage # .append_stage(pipeline_stages_base.ConnectionStateStage()) # # ConnectionLockStage needs to be after ConnectionStateStage because we want any ops that # ConnectionStateStage creates to go through the ConnectionLockStage gate # .append_stage(pipeline_stages_base.ConnectionLockStage()) # # RetryStage needs to be near the end because it's retrying low-level MQTT operations. # .append_stage(pipeline_stages_base.RetryStage()) # # OpTimeoutStage needs to be after RetryStage because OpTimeoutStage returns the timeout # errors that RetryStage is watching for. # .append_stage(pipeline_stages_base.OpTimeoutStage()) # # MQTTTransportStage needs to be at the very end of the pipeline because this is where # operations turn into network traffic # .append_stage(pipeline_stages_mqtt.MQTTTransportStage()) ) # Define behavior for domain-specific events def _on_pipeline_event(event): if isinstance(event, pipeline_events_iothub.C2DMessageEvent): if self.on_c2d_message_received: self.on_c2d_message_received(event.message) else: logger.error("C2D message event received with no handler. dropping.") elif isinstance(event, pipeline_events_iothub.InputMessageEvent): if self.on_input_message_received: self.on_input_message_received(event.message) else: logger.error("input message event received with no handler. dropping.") elif isinstance(event, pipeline_events_iothub.MethodRequestEvent): if self.on_method_request_received: self.on_method_request_received(event.method_request) else: logger.error("Method request event received with no handler. Dropping.") elif isinstance(event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): if self.on_twin_patch_received: self.on_twin_patch_received(event.patch) else: logger.error("Twin patch event received with no handler. Dropping.") else: logger.error("Dropping unknown pipeline event {}".format(event.name)) def _on_connected(): if self.on_connected: self.on_connected() else: logger.debug("IoTHub Pipeline was connected, but no handler was set") def _on_disconnected(): if self.on_disconnected: self.on_disconnected() else: logger.debug("IoTHub Pipeline was disconnected, but no handler was set") def _on_new_sastoken_required(): if self.on_new_sastoken_required: self.on_new_sastoken_required() else: logger.debug("IoTHub Pipeline requires new SASToken, but no handler was set") def _on_background_exception(e): if self.on_background_exception: self.on_background_exception(e) else: logger.debug( "IoTHub Pipeline experienced background exception, but no handler was set" ) # Set internal event handlers self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected self._pipeline.on_new_sastoken_required_handler = _on_new_sastoken_required self._pipeline.on_background_exception_handler = _on_background_exception # Initialize the pipeline callback = EventedCallback() op = pipeline_ops_base.InitializePipelineOperation(callback=callback) self._pipeline.run_op(op) callback.wait_for_completion() # Set the running flag self._running = True
def __init__(self, auth_provider, pipeline_configuration): """ Constructor for instantiating a pipeline adapter object :param auth_provider: The authentication provider :param pipeline_configuration: The configuration generated based on user inputs """ self.feature_enabled = { constant.C2D_MSG: False, constant.INPUT_MSG: False, constant.METHODS: False, constant.TWIN: False, constant.TWIN_PATCHES: False, } # Event Handlers - Will be set by Client after instantiation of this object self.on_connected = None self.on_disconnected = None self.on_c2d_message_received = None self.on_input_message_received = None self.on_method_request_received = None self.on_twin_patch_received = None # Currently a single timeout stage and a single retry stage for MQTT retry only. # Later, a higher level timeout and a higher level retry stage. self._pipeline = (pipeline_stages_base.PipelineRootStage( pipeline_configuration=pipeline_configuration ).append_stage(pipeline_stages_iothub.UseAuthProviderStage( )).append_stage(pipeline_stages_iothub.TwinRequestResponseStage( )).append_stage(pipeline_stages_base.CoordinateRequestAndResponseStage( )).append_stage( pipeline_stages_iothub_mqtt.IoTHubMQTTTranslationStage() ).append_stage(pipeline_stages_base.ReconnectStage()).append_stage( pipeline_stages_base.AutoConnectStage()).append_stage( pipeline_stages_base.ConnectionLockStage()).append_stage( pipeline_stages_base.RetryStage()).append_stage( pipeline_stages_base.OpTimeoutStage()).append_stage( pipeline_stages_mqtt.MQTTTransportStage())) def _on_pipeline_event(event): if isinstance(event, pipeline_events_iothub.C2DMessageEvent): if self.on_c2d_message_received: self.on_c2d_message_received(event.message) else: logger.warning( "C2D message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.InputMessageEvent): if self.on_input_message_received: self.on_input_message_received(event.input_name, event.message) else: logger.warning( "input message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.MethodRequestEvent): if self.on_method_request_received: self.on_method_request_received(event.method_request) else: logger.warning( "Method request event received with no handler. Dropping." ) elif isinstance( event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): if self.on_twin_patch_received: self.on_twin_patch_received(event.patch) else: logger.warning( "Twin patch event received with no handler. Dropping.") else: logger.warning("Dropping unknown pipeline event {}".format( event.name)) def _on_connected(): if self.on_connected: self.on_connected() def _on_disconnected(): if self.on_disconnected: self.on_disconnected() self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() if isinstance(auth_provider, X509AuthenticationProvider): op = pipeline_ops_iothub.SetX509AuthProviderOperation( auth_provider=auth_provider, callback=callback) else: # Currently everything else goes via this block. op = pipeline_ops_iothub.SetAuthProviderOperation( auth_provider=auth_provider, callback=callback) self._pipeline.run_op(op) callback.wait_for_completion()
def __init__(self, auth_provider): """ Constructor for instantiating a pipeline adapter object :param auth_provider: The authentication provider """ self.feature_enabled = { constant.C2D_MSG: False, constant.INPUT_MSG: False, constant.METHODS: False, constant.TWIN: False, constant.TWIN_PATCHES: False, } # Event Handlers - Will be set by Client after instantiation of this object self.on_connected = None self.on_disconnected = None self.on_c2d_message_received = None self.on_input_message_received = None self.on_method_request_received = None self.on_twin_patch_received = None self._pipeline = ( pipeline_stages_base.PipelineRootStage().append_stage( pipeline_stages_iothub.UseAuthProviderStage()).append_stage( pipeline_stages_iothub.HandleTwinOperationsStage() ).append_stage( pipeline_stages_base.CoordinateRequestAndResponseStage()). append_stage(pipeline_stages_iothub_mqtt.IoTHubMQTTConverterStage( )).append_stage(pipeline_stages_base.EnsureConnectionStage( )).append_stage( pipeline_stages_base.SerializeConnectOpsStage()).append_stage( pipeline_stages_mqtt.MQTTTransportStage())) def _on_pipeline_event(event): if isinstance(event, pipeline_events_iothub.C2DMessageEvent): if self.on_c2d_message_received: self.on_c2d_message_received(event.message) else: logger.warning( "C2D message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.InputMessageEvent): if self.on_input_message_received: self.on_input_message_received(event.input_name, event.message) else: logger.warning( "input message event received with no handler. dropping." ) elif isinstance(event, pipeline_events_iothub.MethodRequestEvent): if self.on_method_request_received: self.on_method_request_received(event.method_request) else: logger.warning( "Method request event received with no handler. Dropping." ) elif isinstance( event, pipeline_events_iothub.TwinDesiredPropertiesPatchEvent): if self.on_twin_patch_received: self.on_twin_patch_received(event.patch) else: logger.warning( "Twin patch event received with no handler. Dropping.") else: logger.warning("Dropping unknown pipeline event {}".format( event.name)) def _on_connected(): if self.on_connected: self.on_connected() def _on_disconnected(): if self.on_disconnected: self.on_disconnected() self._pipeline.on_pipeline_event_handler = _on_pipeline_event self._pipeline.on_connected_handler = _on_connected self._pipeline.on_disconnected_handler = _on_disconnected callback = EventedCallback() if isinstance(auth_provider, X509AuthenticationProvider): op = pipeline_ops_iothub.SetX509AuthProviderOperation( auth_provider=auth_provider, callback=callback) else: # Currently everything else goes via this block. op = pipeline_ops_iothub.SetAuthProviderOperation( auth_provider=auth_provider, callback=callback) self._pipeline.run_op(op) callback.wait_for_completion() if op.error: logger.error("{} failed: {}".format(op.name, op.error)) raise op.error