def test_inline_json_fail(self, content, argname): with pytest.raises(CLIError) as exc_info: process_json_arg(content, argument_name=argname) assert str(exc_info.value).startswith( "Failed to parse json for argument '{}' with exception:\n".format( argname))
def test_file_json_fail_invalidcontent(self, content, argname, set_cwd, mocker): mocked_util_logger = mocker.patch.object(logger, "warning", autospec=True) with pytest.raises(CLIError) as exc_info: process_json_arg(content, argument_name=argname) assert str(exc_info.value).startswith( "Failed to parse json from file: '{}' for argument '{}' with exception:\n".format( content, argname ) ) assert mocked_util_logger.call_count == 0
def test_file_json_fail_invalidpath(self, content, argname, set_cwd, mocker): mocked_util_logger = mocker.patch.object(logger, "warning", autospec=True) with pytest.raises(CLIError) as exc_info: process_json_arg(content, argument_name=argname) assert str(exc_info.value).startswith( "Failed to parse json for argument '{}' with exception:\n".format(argname) ) assert mocked_util_logger.call_count == 1 assert ( mocked_util_logger.call_args[0][0] == "The json payload for argument '%s' looks like its intended from a file. Please ensure the file path is correct." ) assert mocked_util_logger.call_args[0][1] == argname
def update_relationship(self, twin_id, relationship_id, json_patch, etag=None): json_patch = process_json_arg(content=json_patch, argument_name="json-patch") json_patch_collection = [] if isinstance(json_patch, dict): json_patch_collection.append(json_patch) if isinstance(json_patch, list): json_patch_collection.extend(json_patch) logger.info("Patch payload %s", json.dumps(json_patch_collection)) try: options = TwinOptions(if_match=(etag if etag else "*")) self.twins_sdk.update_relationship( id=twin_id, relationship_id=relationship_id, patch_document=json_patch_collection, digital_twins_update_relationship_options=options, ) return self.get_relationship(twin_id=twin_id, relationship_id=relationship_id) except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def add_relationship( self, twin_id, target_twin_id, relationship_id, relationship, replace=False, properties=None, ): relationship_request = { "$targetId": target_twin_id, "$relationshipName": relationship, } if properties: properties = process_json_arg(content=properties, argument_name="properties") relationship_request.update(properties) logger.info("Relationship payload %s", json.dumps(relationship_request)) try: options = TwinOptions(if_none_match=(None if replace else "*")) return self.twins_sdk.add_relationship( id=twin_id, relationship_id=relationship_id, relationship=relationship_request, digital_twins_add_relationship_options=options, raw=True, ).response.json() except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def add(self, models=None, from_directory=None): if not any([models, from_directory]): raise CLIError("Provide either --models or --from-directory.") # If both arguments are provided. --models wins. payload = [] if models: models_result = process_json_arg(content=models, argument_name="models") # TODO: if isinstance(models_result, list): payload.extend(models_result) elif isinstance(models_result, dict): payload.append(models_result) elif from_directory: payload = self._process_directory(from_directory=from_directory) logger.info("Models payload %s", json.dumps(payload)) # TODO: Not standard - have to revisit. response = self.model_sdk.add(payload) if response.status_code not in [200, 201]: error_text = response.text if response.status_code == 403 and not error_text: error_text = "Current principal access is forbidden. Please validate rbac role assignments." else: try: error_text = response.json() except Exception: pass raise CLIError(error_text) return response.json()
def send_telemetry(self, twin_id, telemetry=None, dt_id=None, component_path=None): from uuid import uuid4 from datetime import datetime, timezone local_time = datetime.now(timezone.utc).astimezone() dt_timestamp = local_time.isoformat() telemetry_request = {} if telemetry: telemetry = process_json_arg(content=telemetry, argument_name="telemetry") else: telemetry = {} telemetry_request.update(telemetry) logger.info("Telemetry payload: {}".format(json.dumps(telemetry_request))) if not dt_id: dt_id = str(uuid4()) if component_path: self.twins_sdk.send_component_telemetry( id=twin_id, message_id=dt_id, dt_timestamp=dt_timestamp, component_path=component_path, telemetry=telemetry_request, ) self.twins_sdk.send_telemetry( id=twin_id, message_id=dt_id, dt_timestamp=dt_timestamp, telemetry=telemetry_request, )
def update(self, twin_id, json_patch, etag=None): json_patch = process_json_arg(content=json_patch, argument_name="json-patch") json_patch_collection = [] if isinstance(json_patch, dict): json_patch_collection.append(json_patch) elif isinstance(json_patch, list): json_patch_collection.extend(json_patch) else: raise CLIError( f"--json-patch content must be an object or array. Actual type was: {type(json_patch).__name__}" ) logger.info("Patch payload %s", json.dumps(json_patch_collection)) try: options = TwinOptions(if_match=(etag if etag else "*")) self.twins_sdk.update(id=twin_id, patch_document=json_patch_collection, digital_twins_update_options=options, raw=True) return self.get(twin_id=twin_id) except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def create_device_template( cmd, app_id: str, device_template_id: str, content: str, token=None, central_dns_suffix=CENTRAL_ENDPOINT, api_version=ApiVersion.v1.value, ): if not isinstance(content, str): raise CLIError("content must be a string: {}".format(content)) payload = utility.process_json_arg(content, argument_name="content") if api_version == ApiVersion.preview.value: provider = CentralDeviceTemplateProviderPreview(cmd=cmd, app_id=app_id, token=token) else: provider = CentralDeviceTemplateProviderV1(cmd=cmd, app_id=app_id, token=token) template = provider.create_device_template( device_template_id=device_template_id, payload=payload, central_dns_suffix=central_dns_suffix, ) return template.raw_template
def run_command( cmd, app_id: str, device_id: str, command_name: str, content: str, interface_id=None, token=None, central_dns_suffix=CENTRAL_ENDPOINT, api_version=ApiVersion.v1.value, ): if not isinstance(content, str): raise CLIError("content must be a string: {}".format(content)) payload = utility.process_json_arg(content, argument_name="content") if api_version == ApiVersion.preview.value: provider = CentralDeviceProviderPreview(cmd=cmd, app_id=app_id, token=token) else: provider = CentralDeviceProviderV1(cmd=cmd, app_id=app_id, token=token) return provider.run_command( device_id=device_id, interface_id=interface_id, command_name=command_name, payload=payload, central_dns_suffix=central_dns_suffix, )
def add_relationship( self, source_twin_id, target_twin_id, relationship_id, relationship, properties=None, ): relationship_request = { "$targetId": target_twin_id, "$relationshipName": relationship, } if properties: properties = process_json_arg(content=properties, argument_name="properties") relationship_request.update(properties) logger.info("Relationship payload %s", json.dumps(relationship_request)) return self.twins_sdk.add_relationship( id=source_twin_id, relationship_id=relationship_id, relationship=relationship_request, if_none_match="*", raw=True, ).response.json()
def iot_pnp_model_create(cmd, model, pnp_dns_suffix=None): if not model: raise CLIError("Please provide a model definition [--model]") ap = ModelApiProvider(cmd, pnp_dns_suffix) model = process_json_arg(model, argument_name="model") model_id = model.get("@id") if not model_id: raise CLIError("Model is invalid - @id attribute required.") return ap.create_model(model_id, model)
def update(cmd, test_id, configuration_file, base_url=None): import os if not os.path.exists(configuration_file): raise CLIError("Specified configuration file does not exist") ap = AICSProvider(cmd, base_url) return ap.update_test_cases( test_id=test_id, patch=process_json_arg(configuration_file, "configuration_file"), )
def _get_models_from_directory(from_directory): payload = [] for entry in scantree(from_directory): if not entry.name.endswith(".json"): logger.debug( "Skipping {} - model file must end with .json".format(entry.path) ) continue entry_json = process_json_arg(content=entry.path, argument_name=entry.name) payload.append(entry_json) return _get_models_metadata(payload)
def _create_device_template(self): template = utility.process_json_arg( device_template_path, argument_name="device_template_path") template_name = template["displayName"] template_id = template_name + "id" self.cmd( "iot central device-template create --app-id {} --device-template-id {} -k '{}'" .format(APP_ID, template_id, device_template_path), checks=[ self.check("displayName", template_name), self.check("id", template_id), ], ) return (template_id, template_name)
def _create_device_template(self): template = utility.process_json_arg( DEVICE_TEMPLATE_PATH, argument_name="DEVICE_TEMPLATE_PATH") template_name = template["displayName"] template_id = template_name + "id" self.cmd( "iot central app device-template create --app-id {} --device-template-id {} -k {}" .format(APP_ID, template_id, DEVICE_TEMPLATE_PATH), checks=[ self.check("displayName", template_name), self.check("id", template_id), ], ) return (template_id, template_name)
def invoke_device_command( self, device_id, command_name, timeout=30, payload="{}", component_path=None, connect_timeout=None, response_timeout=None, ): # Prevent msrest locking up shell self.runtime_sdk.config.retry_policy.retries = 1 try: if payload: payload = process_json_arg(payload, argument_name="payload") api_timeout_kwargs = { "connect_timeout_in_seconds": timeout, "response_timeout_in_seconds": timeout, } response = (self.runtime_sdk.invoke_component_command( id=device_id, command_name=command_name, payload=payload, timeout=timeout, component_path=component_path, raw=True, **api_timeout_kwargs, ).response if component_path else self.runtime_sdk.invoke_root_level_command( id=device_id, command_name=command_name, payload=payload, timeout=timeout, raw=True, **api_timeout_kwargs, ).response) return { "payload": response.json(), "status": response.headers.get("x-ms-command-statuscode"), } except CloudError as e: raise CLIError(unpack_msrest_error(e))
def update(self, twin_id, json_patch): json_patch = process_json_arg(content=json_patch, argument_name="json-patch") json_patch_collection = [] if isinstance(json_patch, dict): json_patch_collection.append(json_patch) if isinstance(json_patch, list): json_patch_collection.extend(json_patch) logger.info("Patch payload %s", json.dumps(json_patch_collection)) try: self.twins_sdk.update( id=twin_id, patch_document=json_patch_collection, if_match="*", raw=True ) return self.get(twin_id=twin_id) except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def validate(self, content): if isinstance(content, str): content = process_json_arg(content, argument_name="content") validator = self._get_validator() if not validator: logger.info( "Json schema type not supported, skipping validation...") return self.errors try: for error in sorted(validator.iter_errors(content), key=str): self._add_error(error.message, error.path, error.schema_path) except Exception: logger.info("Invalid json schema, skipping validation...") return self.errors
def create_device_template( cmd, app_id: str, device_template_id: str, content: str, token=None, central_dns_suffix="azureiotcentral.com", ): if not isinstance(content, str): raise CLIError("content must be a string: {}".format(content)) payload = utility.process_json_arg(content, argument_name="content") provider = CentralDeviceTemplateProvider(cmd=cmd, app_id=app_id, token=token) return provider.create_device_template( device_template_id=device_template_id, payload=payload, central_dns_suffix=central_dns_suffix, )
def _process_directory(self, from_directory): logger.debug( "Documents contained in directory: {}, processing...".format( from_directory)) payload = [] for entry in scantree(from_directory): if all([ not entry.name.endswith(".json"), not entry.name.endswith(".dtdl") ]): logger.debug( "Skipping {} - model file must end with .json or .dtdl". format(entry.path)) continue entry_json = process_json_arg(content=entry.path, argument_name=entry.name) payload.append(entry_json) return payload
def create(self, twin_id, model_id, properties=None): target_model = self.model_provider.get(id=model_id) twin_request = { "$dtId": twin_id, "$metadata": {"$model": target_model["id"]}, } if properties: properties = process_json_arg( content=properties, argument_name="properties" ) twin_request.update(properties) logger.info("Twin payload %s", json.dumps(twin_request)) try: return self.twins_sdk.add(id=twin_id, twin=twin_request, if_none_match="*") except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def _process_models_directory(from_directory): from azext_iot.common.utility import scantree, process_json_arg, read_file_content # we need to double-encode the JSON string from json import dumps models = [] if os.path.isfile(from_directory) and (from_directory.endswith(".json") or from_directory.endswith(".dtdl")): models.append(dumps(read_file_content(file_path=from_directory))) return models for entry in scantree(from_directory): if not any([entry.name.endswith(".json"), entry.name.endswith(".dtdl")]): logger.debug( "Skipping {} - model file must end with .json or .dtdl".format( entry.path ) ) continue entry_json = process_json_arg(content=entry.path, argument_name=entry.name) models.append(dumps(entry_json)) return models
def update_component(self, twin_id, component_path, json_patch): json_patch = process_json_arg(content=json_patch, argument_name="json-patch") json_patch_collection = [] if isinstance(json_patch, dict): json_patch_collection.append(json_patch) if isinstance(json_patch, list): json_patch_collection.extend(json_patch) logger.info("Patch payload %s", json.dumps(json_patch_collection)) try: # TODO: API does not return response self.twins_sdk.update_component( id=twin_id, component_path=component_path, patch_document=json_patch_collection, if_match="*", ) except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def run_command( cmd, app_id: str, device_id: str, interface_id: str, command_name: str, content: str, token=None, central_dns_suffix=CENTRAL_ENDPOINT, ): if not isinstance(content, str): raise CLIError("content must be a string: {}".format(content)) payload = utility.process_json_arg(content, argument_name="content") provider = CentralDeviceProvider(cmd=cmd, app_id=app_id, token=token) return provider.run_component_command( device_id=device_id, interface_id=interface_id, command_name=command_name, payload=payload, )
def create(self, twin_id, model_id, replace=False, properties=None): twin_request = { "$dtId": twin_id, "$metadata": { "$model": model_id }, } if properties: properties = process_json_arg(content=properties, argument_name="properties") twin_request.update(properties) logger.info("Twin payload %s", json.dumps(twin_request)) try: options = TwinOptions(if_none_match=(None if replace else "*")) return self.twins_sdk.add(id=twin_id, twin=twin_request, digital_twins_add_options=options) except ErrorResponseException as e: raise CLIError(unpack_msrest_error(e))
def patch_digital_twin(self, device_id, json_patch): json_patch = process_json_arg(content=json_patch, argument_name="json-patch") json_patch_collection = [] if isinstance(json_patch, dict): json_patch_collection.append(json_patch) if isinstance(json_patch, list): json_patch_collection.extend(json_patch) logger.info("Patch payload %s", json.dumps(json_patch_collection)) try: # Currently no response text is returned from the update self.runtime_sdk.update_digital_twin( id=device_id, digital_twin_patch=json_patch_collection, if_match="*", raw=True).response return self.get_digital_twin(device_id=device_id) except CloudError as e: raise CLIError(unpack_msrest_error(e))
def test_file_json(self, content, argname, set_cwd): result = process_json_arg(content, argument_name=argname) assert result == json.loads(read_file_content(content))
def test_inline_json(self, content, argname): result = process_json_arg(content, argument_name=argname) assert result == json.loads(content)
def test_dt_models(self): self.wait_for_capacity() instance_name = generate_resource_id() models_directory = "./models" inline_model = "./models/Floor.json" component_dtmi = "dtmi:com:example:Thermostat;1" room_dtmi = "dtmi:com:example:Room;1" create_output = self.cmd( "dt create -n {} -g {} -l {}".format(instance_name, self.rg, self.region) ).get_output_in_json() self.track_instance(create_output) self.wait_for_hostname(create_output) self.cmd( "dt role-assignment create -n {} -g {} --assignee {} --role '{}'".format( instance_name, self.rg, self.current_user, self.role_map["owner"] ) ) # Wait for RBAC to catch-up sleep(60) create_models_output = self.cmd( "dt model create -n {} --from-directory '{}'".format( instance_name, models_directory ) ).get_output_in_json() assert_create_models_attributes( create_models_output, directory_path=models_directory ) list_models_output = self.cmd( "dt model list -n {}".format(instance_name) ).get_output_in_json() assert len(list_models_output) == len(create_models_output) for model in list_models_output: assert model["id"] list_models_output = self.cmd( "dt model list -n {} -g {} --definition".format(instance_name, self.rg) ).get_output_in_json() assert len(list_models_output) == len(create_models_output) for model in list_models_output: assert model["id"] assert model["model"] model_dependencies_output = self.cmd( "dt model list -n {} -g {} --dependencies-for '{}'".format( instance_name, self.rg, room_dtmi, ) ).get_output_in_json() assert len(model_dependencies_output) == 2 for model in create_models_output: model_show_output = self.cmd( "dt model show -n {} --dtmi '{}'".format(instance_name, model["id"]) ).get_output_in_json() assert model_show_output["id"] == model["id"] model_show_def_output = self.cmd( "dt model show -n {} -g {} --dtmi '{}' --definition".format( instance_name, self.rg, model["id"] ) ).get_output_in_json() assert model_show_def_output["id"] == model["id"] assert model_show_def_output["model"] assert model_show_def_output["model"]["@id"] == model["id"] model_json = process_json_arg(inline_model, "models") model_id = model_json["@id"] inc_model_id = _increment_model_id(model_id) model_json["@id"] = inc_model_id self.kwargs["modelJson"] = json.dumps(model_json) create_models_inline_output = self.cmd( "dt model create -n {} --models '{}'".format(instance_name, "{modelJson}") ).get_output_in_json() assert create_models_inline_output[0]["id"] == inc_model_id update_model_output = self.cmd( "dt model update -n {} --dtmi '{}' --decommission".format( instance_name, inc_model_id ) ).get_output_in_json() assert update_model_output["id"] == inc_model_id assert update_model_output["decommissioned"] is True list_models_output = self.cmd( "dt model list -n {}".format(instance_name) ).get_output_in_json() # Delete non-referenced models first for model in list_models_output: if model["id"] != component_dtmi: self.cmd( "dt model delete -n {} --dtmi {}".format(instance_name, model["id"]) ) # Now referenced component self.cmd( "dt model delete -n {} --dtmi {}".format(instance_name, component_dtmi) ) assert ( len( self.cmd( "dt model list -n {}".format(instance_name) ).get_output_in_json() ) == 0 )