def test_requests_data_signing(self, mocker, hsm): data_str = "somedata" data_str_b64 = "c29tZWRhdGE=" mock_request_post = mocker.patch.object(requests, "post") mock_request_post.return_value.json.return_value = { "digest": "somedigest" } expected_url = "{workload_uri}modules/{module_id}/genid/{module_generation_id}/sign".format( workload_uri=hsm.workload_uri, module_id=hsm.module_id, module_generation_id=hsm.module_generation_id, ) expected_params = {"api-version": hsm.api_version} expected_headers = { "User-Agent": urllib.parse.quote_plus(ProductInfo.get_iothub_user_agent()) } expected_json = json.dumps({ "keyId": "primary", "algo": "HMACSHA256", "data": data_str_b64 }) hsm.sign(data_str) assert mock_request_post.call_count == 1 assert mock_request_post.call_args == mocker.call( url=expected_url, params=expected_params, headers=expected_headers, data=expected_json)
def test_get_iothub_user_agent(self): user_agent = ProductInfo.get_iothub_user_agent() assert IOTHUB_IDENTIFIER in user_agent assert VERSION in user_agent assert platform.python_version() in user_agent assert platform.system() in user_agent assert platform.version() in user_agent assert platform.machine() in user_agent
def test_checks_format_iothub_agent(self): expected_part_agent = check_agent_format.format( identifier=IOTHUB_IDENTIFIER, version=VERSION, python_runtime=platform.python_version(), os_type=platform.system(), os_release=platform.version(), architecture=platform.machine(), ) user_agent = ProductInfo.get_iothub_user_agent() assert expected_part_agent in user_agent
def test_requests_trust_bundle(self, mocker, hsm): mock_request_get = mocker.patch.object(requests, "get") expected_url = hsm.workload_uri + "trust-bundle" expected_params = {"api-version": hsm.api_version} expected_headers = { "User-Agent": urllib.parse.quote_plus(ProductInfo.get_iothub_user_agent()) } hsm.get_trust_bundle() assert mock_request_get.call_count == 1 assert mock_request_get.call_args == mocker.call( expected_url, params=expected_params, headers=expected_headers)
def sign(self, data_str): """ Use the IoTEdge HSM to sign a piece of string data. The caller should then insert the returned value (the signature) into the 'sig' field of a SharedAccessSignature string. :param str data_str: The data string to sign :return: The signature, as a URI-encoded and base64-encoded value that is ready to directly insert into the SharedAccessSignature string. :raises: IoTEdgeError if unable to sign the data. """ encoded_data_str = base64.b64encode(data_str.encode("utf-8")).decode() path = (self.workload_uri + "modules/" + self.module_id + "/genid/" + self.module_generation_id + "/sign") sign_request = { "keyId": "primary", "algo": "HMACSHA256", "data": encoded_data_str } r = requests.post( # TODO: can we use json field instead of data? url=path, params={"api-version": self.api_version}, headers={ "User-Agent": urllib.parse.quote_plus(ProductInfo.get_iothub_user_agent()) }, data=json.dumps(sign_request), ) try: r.raise_for_status() except requests.exceptions.HTTPError as e: raise IoTEdgeError(message="Unable to sign data", cause=e) try: sign_response = r.json() except ValueError as e: raise IoTEdgeError(message="Unable to decode signed data", cause=e) try: signed_data_str = sign_response["digest"] except KeyError as e: raise IoTEdgeError(message="No signed data received", cause=e) return urllib.parse.quote(signed_data_str)
def test_new_op_headers(self, mocker, stage, op, custom_user_agent): stage.pipeline_root.pipeline_configuration.product_info = custom_user_agent stage.run_op(op) # Op was sent down assert stage.send_op_down.call_count == 1 new_op = stage.send_op_down.call_args[0][0] assert isinstance(new_op, pipeline_ops_http.HTTPRequestAndResponseOperation) # Validate headers expected_user_agent = urllib.parse.quote_plus( ProductInfo.get_iothub_user_agent() + str(custom_user_agent)) assert new_op.headers["Host"] == stage.hostname assert new_op.headers[ "Content-Type"] == "application/json; charset=utf-8" assert new_op.headers["Content-Length"] == len(new_op.body) assert new_op.headers["User-Agent"] == expected_user_agent
def get_trust_bundle(self): """ Return the trust bundle that can be used to validate the server-side SSL TLS connection that we use to talk to edgeHub. :return: The server verification certificate to use for connections to the Azure IoT Edge instance, as a PEM certificate in string form. :raises: IoTEdgeError if unable to retrieve the certificate. """ r = requests.get( self.workload_uri + "trust-bundle", params={"api-version": self.api_version}, headers={ "User-Agent": urllib.parse.quote_plus(ProductInfo.get_iothub_user_agent()) }, ) # Validate that the request was successful try: r.raise_for_status() except requests.exceptions.HTTPError as e: raise IoTEdgeError( message="Unable to get trust bundle from EdgeHub", cause=e) # Decode the trust bundle try: bundle = r.json() except ValueError as e: raise IoTEdgeError(message="Unable to decode trust bundle", cause=e) # Retrieve the certificate try: cert = bundle["certificate"] except KeyError as e: raise IoTEdgeError(message="No certificate in trust bundle", cause=e) return cert
def _run_op(self, op): if isinstance(op, pipeline_ops_iothub.SetIoTHubConnectionArgsOperation): self.device_id = op.device_id self.module_id = op.module_id # if we get auth provider args from above, we save some, use some to build topic names, # and always pass it down because we know that the MQTT protocol stage will also want # to receive these args. self._set_topic_names(device_id=op.device_id, module_id=op.module_id) if op.module_id: client_id = "{}/{}".format(op.device_id, op.module_id) else: client_id = op.device_id # For MQTT, the entire user agent string should be appended to the username field in the connect packet # For example, the username may look like this without custom parameters: # yosephsandboxhub.azure-devices.net/alpha/?api-version=2018-06-30&DeviceClientType=py-azure-iot-device%2F2.0.0-preview.12 # The customer user agent string would simply be appended to the end of this username, in URL Encoded format. query_param_seq = [ ("api-version", pkg_constant.IOTHUB_API_VERSION), ("DeviceClientType", ProductInfo.get_iothub_user_agent()), ] username = "******".format( hostname=op.hostname, client_id=client_id, query_params=urllib.parse.urlencode(query_param_seq), optional_product_info=urllib.parse.quote( str(self.pipeline_root.pipeline_configuration.product_info) ), ) if op.gateway_hostname: hostname = op.gateway_hostname else: hostname = op.hostname # TODO: test to make sure client_cert and sas_token travel down correctly worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt. SetMQTTConnectionArgsOperation, client_id=client_id, hostname=hostname, username=username, server_verification_cert=op.server_verification_cert, client_cert=op.client_cert, sas_token=op.sas_token, ) self.send_op_down(worker_op) elif (isinstance(op, pipeline_ops_base.UpdateSasTokenOperation) and self.pipeline_root.connected): logger.debug( "{}({}): Connected. Passing op down and reauthorizing after token is updated." .format(self.name, op.name)) # make a callback that either fails the UpdateSasTokenOperation (if the lower level failed it), # or issues a ReauthorizeConnectionOperation (if the lower level returned success for the UpdateSasTokenOperation) def on_token_update_complete(op, error): if error: logger.error( "{}({}) token update failed. returning failure {}". format(self.name, op.name, error)) else: logger.debug( "{}({}) token update succeeded. reauthorizing".format( self.name, op.name)) # Stop completion of Token Update op, and only continue upon completion of ReauthorizeConnectionOperation op.halt_completion() worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_base. ReauthorizeConnectionOperation) self.send_op_down(worker_op) # now, pass the UpdateSasTokenOperation down with our new callback. op.add_callback(on_token_update_complete) self.send_op_down(op) elif isinstance( op, pipeline_ops_iothub.SendD2CMessageOperation) or isinstance( op, pipeline_ops_iothub.SendOutputEventOperation): # Convert SendTelementry and SendOutputEventOperation operations into MQTT Publish operations topic = mqtt_topic_iothub.encode_properties( op.message, self.telemetry_topic) worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt.MQTTPublishOperation, topic=topic, payload=op.message.data, ) self.send_op_down(worker_op) elif isinstance(op, pipeline_ops_iothub.SendMethodResponseOperation): # Sending a Method Response gets translated into an MQTT Publish operation topic = mqtt_topic_iothub.get_method_topic_for_publish( op.method_response.request_id, str(op.method_response.status)) payload = json.dumps(op.method_response.payload) worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt.MQTTPublishOperation, topic=topic, payload=payload) self.send_op_down(worker_op) elif isinstance(op, pipeline_ops_base.EnableFeatureOperation): # Enabling a feature gets translated into an MQTT subscribe operation topic = self.feature_to_topic[op.feature_name] worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt.MQTTSubscribeOperation, topic=topic) self.send_op_down(worker_op) elif isinstance(op, pipeline_ops_base.DisableFeatureOperation): # Disabling a feature gets turned into an MQTT unsubscribe operation topic = self.feature_to_topic[op.feature_name] worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt.MQTTUnsubscribeOperation, topic=topic) self.send_op_down(worker_op) elif isinstance(op, pipeline_ops_base.RequestOperation): if op.request_type == pipeline_constant.TWIN: topic = mqtt_topic_iothub.get_twin_topic_for_publish( method=op.method, resource_location=op.resource_location, request_id=op.request_id, ) worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_mqtt.MQTTPublishOperation, topic=topic, payload=op.request_body, ) self.send_op_down(worker_op) else: raise pipeline_exceptions.OperationError( "RequestOperation request_type {} not supported".format( op.request_type)) else: # All other operations get passed down super(IoTHubMQTTTranslationStage, self)._run_op(op)
def _run_op(self, op): if isinstance(op, pipeline_ops_iothub.SetIoTHubConnectionArgsOperation): self.device_id = op.device_id self.module_id = op.module_id if op.gateway_hostname: logger.debug( "Gateway Hostname Present. Setting Hostname to: {}".format(op.gateway_hostname) ) self.hostname = op.gateway_hostname else: logger.debug( "Gateway Hostname not present. Setting Hostname to: {}".format( op.gateway_hostname ) ) self.hostname = op.hostname worker_op = op.spawn_worker_op( worker_op_type=pipeline_ops_http.SetHTTPConnectionArgsOperation, hostname=self.hostname, server_verification_cert=op.server_verification_cert, client_cert=op.client_cert, sas_token=op.sas_token, ) self.send_op_down(worker_op) elif isinstance(op, pipeline_ops_iothub_http.MethodInvokeOperation): logger.debug( "{}({}): Translating Method Invoke Operation for HTTP.".format(self.name, op.name) ) query_params = "api-version={apiVersion}".format( apiVersion=pkg_constant.IOTHUB_API_VERSION ) # if the target is a module. body = json.dumps(op.method_params) path = http_path_iothub.get_method_invoke_path(op.target_device_id, op.target_module_id) # Note we do not add the sas Authorization header here. Instead we add it later on in the stage above # the transport layer, since that stage stores the updated SAS and also X509 certs if that is what is # being used. x_ms_edge_string = "{deviceId}/{moduleId}".format( deviceId=self.device_id, moduleId=self.module_id ) # these are the identifiers of the current module user_agent = urllib.parse.quote_plus( ProductInfo.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info) ) headers = { "Host": self.hostname, "Content-Type": "application/json", "Content-Length": len(str(body)), "x-ms-edge-moduleId": x_ms_edge_string, "User-Agent": user_agent, } op_waiting_for_response = op def on_request_response(op, error): logger.debug( "{}({}): Got response for MethodInvokeOperation".format(self.name, op.name) ) error = map_http_error(error=error, http_op=op) if not error: op_waiting_for_response.method_response = json.loads( op.response_body.decode("utf-8") ) op_waiting_for_response.complete(error=error) self.send_op_down( pipeline_ops_http.HTTPRequestAndResponseOperation( method="POST", path=path, headers=headers, body=body, query_params=query_params, callback=on_request_response, ) ) elif isinstance(op, pipeline_ops_iothub_http.GetStorageInfoOperation): logger.debug( "{}({}): Translating Get Storage Info Operation to HTTP.".format(self.name, op.name) ) query_params = "api-version={apiVersion}".format( apiVersion=pkg_constant.IOTHUB_API_VERSION ) path = http_path_iothub.get_storage_info_for_blob_path(self.device_id) body = json.dumps({"blobName": op.blob_name}) user_agent = urllib.parse.quote_plus( ProductInfo.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info) ) headers = { "Host": self.hostname, "Accept": "application/json", "Content-Type": "application/json", "Content-Length": len(str(body)), "User-Agent": user_agent, } op_waiting_for_response = op def on_request_response(op, error): logger.debug( "{}({}): Got response for GetStorageInfoOperation".format(self.name, op.name) ) error = map_http_error(error=error, http_op=op) if not error: op_waiting_for_response.storage_info = json.loads( op.response_body.decode("utf-8") ) op_waiting_for_response.complete(error=error) self.send_op_down( pipeline_ops_http.HTTPRequestAndResponseOperation( method="POST", path=path, headers=headers, body=body, query_params=query_params, callback=on_request_response, ) ) elif isinstance(op, pipeline_ops_iothub_http.NotifyBlobUploadStatusOperation): logger.debug( "{}({}): Translating Get Storage Info Operation to HTTP.".format(self.name, op.name) ) query_params = "api-version={apiVersion}".format( apiVersion=pkg_constant.IOTHUB_API_VERSION ) path = http_path_iothub.get_notify_blob_upload_status_path(self.device_id) body = json.dumps( { "correlationId": op.correlation_id, "isSuccess": op.is_success, "statusCode": op.request_status_code, "statusDescription": op.status_description, } ) user_agent = urllib.parse.quote_plus( ProductInfo.get_iothub_user_agent() + str(self.pipeline_root.pipeline_configuration.product_info) ) # Note we do not add the sas Authorization header here. Instead we add it later on in the stage above # the transport layer, since that stage stores the updated SAS and also X509 certs if that is what is # being used. headers = { "Host": self.hostname, "Content-Type": "application/json; charset=utf-8", "Content-Length": len(str(body)), "User-Agent": user_agent, } op_waiting_for_response = op def on_request_response(op, error): logger.debug( "{}({}): Got response for GetStorageInfoOperation".format(self.name, op.name) ) error = map_http_error(error=error, http_op=op) op_waiting_for_response.complete(error=error) self.send_op_down( pipeline_ops_http.HTTPRequestAndResponseOperation( method="POST", path=path, headers=headers, body=body, query_params=query_params, callback=on_request_response, ) ) else: # All other operations get passed down self.send_op_down(op)