Exemple #1
0
    def _load_file(path):
        if path == '-':
            content = sys.stdin.read()
        else:
            content = read_file_content(os.path.expanduser(path), allow_binary=True)

        return content.rstrip(os.linesep)
Exemple #2
0
    def _load_file(path):
        from azure.cli.core.util import read_file_content
        if path == '-':
            content = sys.stdin.read()
        else:
            content = read_file_content(os.path.expanduser(path), allow_binary=True)

        return content.rstrip(os.linesep)
Exemple #3
0
    def _load_file(path):
        if path == '-':
            content = sys.stdin.read()
        else:
            content = read_file_content(os.path.expanduser(path),
                                        allow_binary=True)

        return content[0:-1] if content and content[-1] == '\n' else content
    def test_interface_create_invalid_payload(self, serviceclient):

        payload = str(read_file_content(_pnp_create_interface_payload_file))
        payload = json.loads(payload)
        del payload['@id']
        payload = json.dumps(payload)
        with pytest.raises(CLIError):
            subject.iot_pnp_interface_create(fixture_cmd,
                                             login=mock_target['cs'],
                                             interface_definition=payload)
Exemple #5
0
    def _load_file(path):
        from azure.cli.core.util import read_file_content
        if path == '-':
            content = sys.stdin.read()
        else:
            import os
            content = read_file_content(os.path.expanduser(path),
                                        allow_binary=True)

        return content[0:-1] if content and content[-1] == '\n' else content
Exemple #6
0
def update_spark_pool(cmd, client, resource_group_name, workspace_name, spark_pool_name,
                      node_size=None, node_count=None, enable_auto_scale=None,
                      min_node_count=None, max_node_count=None,
                      enable_auto_pause=None, delay=None,
                      library_requirements=None,
                      package_action=None, package=None,
                      tags=None, force=False, no_wait=False):
    existing_spark_pool = client.get(resource_group_name, workspace_name, spark_pool_name)

    if node_size:
        existing_spark_pool.node_size = node_size
    if node_count:
        existing_spark_pool.node_count = node_count

    if library_requirements:
        library_requirements_content = read_file_content(library_requirements)
        existing_spark_pool.library_requirements = LibraryRequirements(filename=library_requirements,
                                                                       content=library_requirements_content)
    if tags:
        existing_spark_pool.tags = tags

    if existing_spark_pool.auto_scale is not None:
        if enable_auto_scale is not None:
            existing_spark_pool.auto_scale.enabled = enable_auto_scale
        if min_node_count:
            existing_spark_pool.auto_scale.min_node_count = min_node_count
        if max_node_count:
            existing_spark_pool.auto_scale.max_node_count = max_node_count
    else:
        existing_spark_pool.auto_scale = AutoScaleProperties(enabled=enable_auto_scale, min_node_count=min_node_count,
                                                             max_node_count=max_node_count)

    if existing_spark_pool.auto_pause is not None:
        if enable_auto_pause is not None:
            existing_spark_pool.auto_pause.enabled = enable_auto_pause
        if delay:
            existing_spark_pool.auto_pause.delay_in_minutes = delay
    else:
        existing_spark_pool.auto_pause = AutoPauseProperties(enabled=enable_auto_pause,
                                                             delay_in_minutes=delay)

    if package_action and package:
        if package_action == "Add":
            if existing_spark_pool.custom_libraries is None:
                existing_spark_pool.custom_libraries = []
            for item in package:
                package_get = get_workspace_package(cmd, workspace_name, item)
                library = LibraryInfo(name=package_get.name, type=package_get.properties.type, path=package_get.properties.path, container_name=package_get.properties.container_name)
                existing_spark_pool.custom_libraries.append(library)
        if package_action == "Remove":
            existing_spark_pool.custom_libraries = [library for library in existing_spark_pool.custom_libraries if library.name not in package]

    return sdk_no_wait(no_wait, client.begin_create_or_update, resource_group_name, workspace_name, spark_pool_name,
                       existing_spark_pool, force=force)
    def test_model_update_invalid_payload(self, serviceclient):

        payload = str(read_file_content(_pnp_create_model_payload_file))
        payload = json.loads(payload)
        payload['@id'] = 'fake_invalid_id'
        payload = json.dumps(payload)
        with pytest.raises(CLIError):
            subject.iot_pnp_model_update(fixture_cmd,
                                         model_definition=payload,
                                         repo_endpoint=mock_target['entity'],
                                         repo_id=mock_target['repository_id'])
 def test_model_update_error(self, fixture_cmd, serviceclient_generic_error,
                             payload_scenario):
     if not payload_scenario[0]:
         payload = payload_scenario[1]
     else:
         payload = str(read_file_content(_pnp_create_model_payload_file))
     with pytest.raises(CLIError):
         subject.iot_pnp_model_update(fixture_cmd,
                                      model_definition=payload,
                                      repo_endpoint=mock_target['entity'],
                                      repo_id=mock_target['repository_id'])
Exemple #9
0
def pack(cmd, template_file):
    """
    Packs the specified template and its referenced artifacts for use in a Template Spec.
    :param template_file: The path to the template spec .json file.
    :type name: str
    """
    root_template_file_path = os.path.abspath(template_file)
    context = PackingContext(os.path.dirname(root_template_file_path))
    template_content = read_file_content(template_file)
    template_json = json.loads(json.dumps(process_template(template_content)))
    _pack_artifacts(cmd, root_template_file_path, context)
    return PackagedTemplate(template_json, getattr(context, 'Artifact'))
Exemple #10
0
def create_spark_pool(cmd,
                      client,
                      resource_group_name,
                      workspace_name,
                      spark_pool_name,
                      spark_version,
                      node_size,
                      node_count,
                      node_size_family=NodeSizeFamily.memory_optimized.value,
                      enable_auto_scale=None,
                      min_node_count=None,
                      max_node_count=None,
                      enable_auto_pause=None,
                      delay=None,
                      spark_events_folder="/events",
                      library_requirements=None,
                      spark_log_folder="/logs",
                      tags=None,
                      no_wait=False):

    workspace_client = cf_synapse_client_workspace_factory(cmd.cli_ctx)
    workspace_object = workspace_client.get(resource_group_name,
                                            workspace_name)
    location = workspace_object.location

    big_data_pool_info = BigDataPoolResourceInfo(
        location=location,
        spark_version=spark_version,
        node_size=node_size,
        node_count=node_count,
        node_size_family=node_size_family,
        spark_events_folder=spark_events_folder,
        spark_log_folder=spark_log_folder,
        tags=tags)

    big_data_pool_info.auto_scale = AutoScaleProperties(
        enabled=enable_auto_scale,
        min_node_count=min_node_count,
        max_node_count=max_node_count)

    big_data_pool_info.auto_pause = AutoPauseProperties(
        enabled=enable_auto_pause, delay_in_minutes=delay)

    if library_requirements:
        library_requirements_content = read_file_content(library_requirements)
        big_data_pool_info.library_requirements = LibraryRequirements(
            filename=library_requirements,
            content=library_requirements_content)
    return sdk_no_wait(no_wait, client.begin_create_or_update,
                       resource_group_name, workspace_name, spark_pool_name,
                       big_data_pool_info)
    def setUp(self):
        change_dir()
        if(self._testMethodName == 'test_interface_life_cycle'):
            interface = str(read_file_content(_interface_payload))
            _interface_id = '{}{}'.format(json.loads(interface)['@id'], TestPnPModel.rand_val)
            self.kwargs.update({'interface_id': _interface_id})
            interface_newContent = interface.replace(json.loads(interface)['@id'], self.kwargs['interface_id'])
            interface_newContent = interface_newContent.replace('\n', '')

            fo = open(self.kwargs['interface'], "w+", encoding='utf-8')
            fo.write(interface_newContent)
            fo.close()

        if(self._testMethodName == 'test_model_life_cycle'):
            model = str(read_file_content(_capability_model_payload))
            _model_id = '{}{}'.format(json.loads(model)['@id'], TestPnPModel.rand_val)
            self.kwargs.update({'model_id': _model_id})
            model_newContent = model.replace(json.loads(model)['@id'], self.kwargs['model_id'])
            model_newContent = model_newContent.replace('\n', '')

            fo = open(self.kwargs['model'], "w+", encoding='utf-8')
            fo.write(model_newContent)
            fo.close()
def get_certificate(cert):
    """ Extract certificate from file or from string """
    from azure.cli.core.util import read_file_content
    import os
    certificate = ''
    if cert is not None:
        if os.path.exists(cert):
            certificate = read_file_content(cert)
        else:
            certificate = cert
    else:
        raise InvalidArgumentValueError(
            """One of the value provided for the certificates is empty.
    Please verify there aren't any spaces.""")
    return certificate
Exemple #13
0
def iot_digitaltwin_invoke_command(cmd,
                                   interface,
                                   device_id,
                                   command_name,
                                   command_payload=None,
                                   timeout=10,
                                   hub_name=None,
                                   resource_group_name=None,
                                   login=None):
    device_interfaces = _iot_digitaltwin_interface_list(
        cmd, device_id, hub_name, resource_group_name, login)
    interface_list = _get_device_default_interface_dict(device_interfaces)

    target_interface = next(
        (item for item in interface_list if item['name'] == interface), None)

    if not target_interface:
        raise CLIError('Target interface is not implemented by the device!')

    if command_payload:
        if exists(command_payload):
            command_payload = str(read_file_content(command_payload))

        target_json = None
        try:
            target_json = shell_safe_json_parse(command_payload)
        except ValueError:
            pass

        if target_json or isinstance(target_json, bool):
            command_payload = target_json

    target = get_iot_hub_connection_string(cmd,
                                           hub_name,
                                           resource_group_name,
                                           login=login)
    service_sdk, errors = _bind_sdk(target, SdkType.service_sdk)
    try:
        result = service_sdk.invoke_interface_command(
            device_id,
            interface,
            command_name,
            command_payload,
            connect_timeout_in_seconds=timeout,
            response_timeout_in_seconds=timeout)
        return result
    except errors.CloudError as e:
        raise CLIError(unpack_msrest_error(e))
 def test_iot_digitaltwin_property_update_error(self, fixture_cmd,
                                                serviceclient_error,
                                                command_payload, interface,
                                                command):
     payload = None
     if not command_payload[0]:
         payload = command_payload[1]
     else:
         payload = str(
             read_file_content(_device_digitaltwin_invoke_command_payload))
     with pytest.raises(CLIError):
         subject.iot_digitaltwin_invoke_command(fixture_cmd,
                                                device_id=device_id,
                                                interface=interface,
                                                command_name=command,
                                                command_payload=payload,
                                                login=mock_target['cs'])
def _validate_model_definition(model_def):
    if exists(model_def):
        model_def = str(read_file_content(model_def))
    else:
        logger.info('Definition not from file path or incorrect path given.')

    try:
        return shell_safe_json_parse(model_def)
    except ValueError as e:
        logger.debug('Received definition: %s', model_def)
        if _looks_like_file(model_def):
            raise CLIError(
                'The definition content looks like its from a file. Please ensure the path is correct.'
            )
        raise CLIError(
            'Malformed capability model definition. '
            'Use --debug to see what was received. Error details: {}'.format(
                e))
    def test_model_life_cycle(self):

        # Checking the Capability-Model list
        self.cmd('iot pnp capability-model list -e {endpoint} -r {repo}',
                 checks=[self.check('length([*])', 0)])

        # Error: missing repo-id or login
        self.cmd('iot pnp capability-model create -e {endpoint} --def {model}', expect_failure=True)

        # Error: Invalid Capability-Model definition file
        self.cmd('iot pnp capability-model create -e {endpoint} -r {repo} --def model', expect_failure=True)

        # Error: wrong path of Capability-Model definition
        self.cmd('iot pnp capability-model create -e {endpoint} -r {repo} --def model.json', expect_failure=True)

        # Success: Create new Capability-Model
        self.cmd('iot pnp capability-model create -e {endpoint} -r {repo} --def {model}', checks=self.is_empty())

        # Checking the Capability-Model list
        self.cmd('iot pnp capability-model list -e {endpoint} -r {repo}',
                 checks=[self.check('length([*])', 1)])

        # Get Capability-Model
        model = self.cmd('iot pnp capability-model show -e {endpoint} -r {repo} -m {model_id}').get_output_in_json()
        assert json.dumps(model)
        assert model['@id'] == self.kwargs['model_id']
        assert len(model['implements']) > 0

        # Success: Update Capability-Model
        model = str(read_file_content(self.kwargs['model']))
        display_name = json.loads(model)['displayName']
        model_newContent = model.replace(display_name, '{}-Updated'.format(display_name))
        model_newContent = model_newContent.replace('\n', '')
        fo = open(self.kwargs['model-updated'], "w+", encoding='utf-8')
        fo.write(model_newContent)
        fo.close()
        self.cmd('iot pnp capability-model update -e {endpoint} -r {repo} --def {model-updated}', checks=self.is_empty())

        # Todo: Publish Capability-Model
        self.cmd('iot pnp capability-model publish -e {endpoint} -r {repo} -m {model_id}', checks=self.is_empty())

        # Success: Delete Capability-Model
        self.cmd('iot pnp capability-model delete -e {endpoint} -r {repo} -m {model_id}', checks=self.is_empty())
def update_spark_pool(cmd, client, resource_group_name, workspace_name, spark_pool_name,
                      node_size=None, node_count=None, enable_auto_scale=None,
                      min_node_count=None, max_node_count=None,
                      enable_auto_pause=None, delay=None,
                      library_requirements_file=None, tags=None, force=False, no_wait=False):
    existing_spark_pool = client.get(resource_group_name, workspace_name, spark_pool_name)

    if node_size:
        existing_spark_pool.node_size = node_size
    if node_count:
        existing_spark_pool.node_count = node_count

    if library_requirements_file:
        library_requirements_content = read_file_content(library_requirements_file)
        existing_spark_pool.library_requirements = LibraryRequirements(filename=library_requirements_file,
                                                                       content=library_requirements_content)
    if tags:
        existing_spark_pool.tags = tags

    if existing_spark_pool.auto_scale is not None:
        if enable_auto_scale is not None:
            existing_spark_pool.auto_scale.enabled = enable_auto_scale
        if min_node_count:
            existing_spark_pool.auto_scale.min_node_count = min_node_count
        if max_node_count:
            existing_spark_pool.auto_scale.max_node_count = max_node_count
    else:
        existing_spark_pool.auto_scale = AutoScaleProperties(enabled=enable_auto_scale, min_node_count=min_node_count,
                                                             max_node_count=max_node_count)

    if existing_spark_pool.auto_pause is not None:
        if enable_auto_pause is not None:
            existing_spark_pool.auto_pause.enabled = enable_auto_pause
        if delay:
            existing_spark_pool.auto_pause.delay_in_minutes = delay
    else:
        existing_spark_pool.auto_pause = AutoPauseProperties(enabled=enable_auto_pause,
                                                             delay_in_minutes=delay)

    return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, workspace_name, spark_pool_name,
                       existing_spark_pool, force=force)
    def test_interface_life_cycle(self):

        # Error: missing repo-id or login
        self.cmd('iot pnp interface create -e {endpoint} --def {interface}', expect_failure=True)

        # Error: Invalid Interface definition file
        self.cmd('iot pnp interface create -e {endpoint} -r {repo} --def interface', expect_failure=True)

        # Error: wrong path of Interface definition
        self.cmd('iot pnp interface create -e {endpoint} -r {repo} --def interface.json', expect_failure=True)

        # Success: Create new Interface
        self.cmd('iot pnp interface create -e {endpoint} -r {repo} --def {interface}', checks=self.is_empty())

        # Checking the Interface list
        self.cmd('iot pnp interface list -e {endpoint} -r {repo}',
                 checks=[self.greater_than('length([*])', 0)])

        # Get Interface
        interface = self.cmd('iot pnp interface show -e {endpoint} -r {repo} -i {interface_id}').get_output_in_json()
        assert json.dumps(interface)
        assert interface['@id'] == self.kwargs['interface_id']
        assert interface['displayName'] == 'MXChip1'
        assert len(interface['contents']) > 0

        # Success: Update Interface
        interface = str(read_file_content(self.kwargs['interface']))
        display_name = json.loads(interface)['displayName']
        interface_newContent = interface.replace(display_name, '{}-Updated'.format(display_name))
        interface_newContent = interface_newContent.replace('\n', '')
        fo = open(self.kwargs['interface-updated'], "w+", encoding='utf-8')
        fo.write(interface_newContent)
        fo.close()
        self.cmd('iot pnp interface update -e {endpoint} -r {repo} --def {interface-updated}', checks=self.is_empty())

        # Todo: Publish Interface
        self.cmd('iot pnp interface publish -e {endpoint} -r {repo} -i {interface_id}', checks=self.is_empty())

        # Success: Delete Interface
        self.cmd('iot pnp interface delete -e {endpoint} -r {repo} -i {interface_id}', checks=self.is_empty())
Exemple #19
0
def synapse_kusto_script_create(cmd, client,
                                resource_group_name,
                                workspace_name,
                                script_name,
                                definition_file,
                                kusto_pool_name=None,
                                kusto_database_name=None,
                                no_wait=False):

    client = cf_kusto_script(cmd.cli_ctx, workspace_name)
    query = read_file_content(definition_file)
    metadata = KqlScriptContentMetadata(language="kql")
    current_connection = KqlScriptContentCurrentConnection(pool_name=kusto_pool_name,
                                                           database_name=kusto_database_name)
    script_content = KqlScriptContent(query=query, metadata=metadata, current_connection=current_connection)
    properties = KqlScript(content=script_content)
    kql_script = KqlScriptResource(name=script_name, properties=properties)
    return sdk_no_wait(no_wait,
                       client.begin_create_or_update,
                       kql_script_name=script_name,
                       kql_script=kql_script
                       )
    def test_iot_digitaltwin_property_update(self, fixture_cmd, serviceclient,
                                             payload_scenario):

        payload = None

        # If file path provided
        if not payload_scenario[0]:
            payload = payload_scenario[1]
        else:
            payload = str(
                read_file_content(
                    _device_digitaltwin_property_update_payload_file))

        subject.iot_digitaltwin_property_update(fixture_cmd,
                                                device_id=device_id,
                                                interface_payload=payload,
                                                login=mock_target['cs'])
        args = serviceclient.call_args
        url = args[0][0].url
        method = args[0][0].method

        assert method == 'PATCH'
        assert '{}/digitalTwins/{}/interfaces?'.format(mock_target['entity'],
                                                       device_id) in url
    def test_hub_module_twins(self):
        self.kwargs['generic_dict'] = {'key': 'value'}
        edge_device_count = 1
        module_count = 1

        names = self._create_entity_names(edge_devices=edge_device_count,
                                          modules=module_count)
        edge_device_ids = names['edge_device_ids']
        module_ids = names['module_ids']

        self.cmd('iot hub device-identity create -d {} -n {} -g {} -ee'.format(
            edge_device_ids[0], LIVE_HUB, LIVE_RG),
                 checks=[self.check('deviceId', edge_device_ids[0])])

        self.cmd(
            'iot hub module-identity create -d {} -n {} -g {} -m {}'.format(
                edge_device_ids[0], LIVE_HUB, LIVE_RG, module_ids[0]),
            checks=[
                self.check('deviceId', edge_device_ids[0]),
                self.check('moduleId', module_ids[0]),
                self.check('managedBy', 'iotEdge'),
                self.exists('authentication.symmetricKey.primaryKey'),
                self.exists('authentication.symmetricKey.secondaryKey')
            ])

        self.cmd('iot hub module-twin show -d {} -n {} -g {} -m {}'.format(
            edge_device_ids[0], LIVE_HUB, LIVE_RG, module_ids[0]),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('moduleId', module_ids[0]),
                     self.exists('properties.desired'),
                     self.exists('properties.reported')
                 ])

        # With connection string
        self.cmd('iot hub module-twin show -d {} --login {} -m {}'.format(
            edge_device_ids[0], LIVE_HUB_CS, module_ids[0]),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('moduleId', module_ids[0]),
                     self.exists('properties.desired'),
                     self.exists('properties.reported')
                 ])

        self.cmd(
            'iot hub module-twin update -d {} -n {} -g {} -m {} --set properties.desired.special={}'
            .format(edge_device_ids[0], LIVE_HUB, LIVE_RG, module_ids[0],
                    '"{generic_dict}"'),
            checks=[
                self.check('deviceId', edge_device_ids[0]),
                self.check('moduleId', module_ids[0]),
                self.check('properties.desired.special.key', 'value')
            ])

        # With connection string
        self.cmd(
            'iot hub module-twin update -d {} --login {} -m {} --set properties.desired.special={}'
            .format(edge_device_ids[0], LIVE_HUB_CS, module_ids[0],
                    '"{generic_dict}"'),
            checks=[
                self.check('deviceId', edge_device_ids[0]),
                self.check('moduleId', module_ids[0]),
                self.check('properties.desired.special.key', 'value')
            ])

        content_path = os.path.join(CWD, 'test_generic_replace.json')
        self.cmd("iot hub module-twin replace -d {} -n {} -g {} -m {} -j '{}'".
                 format(edge_device_ids[0], LIVE_HUB, LIVE_RG, module_ids[0],
                        content_path),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('moduleId', module_ids[0]),
                     self.check('properties.desired.awesome', 9001),
                     self.check('properties.desired.temperature.min', 10),
                     self.check('properties.desired.temperature.max', 100),
                     self.check('tags.location.region', 'US')
                 ])

        # With connection string
        self.cmd("iot hub module-twin replace -d {} --login {} -m {} -j '{}'".
                 format(edge_device_ids[0], LIVE_HUB_CS, module_ids[0],
                        content_path),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('moduleId', module_ids[0]),
                     self.check('properties.desired.awesome', 9001),
                     self.check('properties.desired.temperature.min', 10),
                     self.check('properties.desired.temperature.max', 100),
                     self.check('tags.location.region', 'US')
                 ])

        self.kwargs['twin_payload'] = read_file_content(content_path)
        self.cmd("iot hub module-twin replace -d {} -n {} -g {} -m {} -j '{}'".
                 format(edge_device_ids[0], LIVE_HUB, LIVE_RG, module_ids[0],
                        '{twin_payload}'),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('moduleId', module_ids[0]),
                     self.check('properties.desired.awesome', 9001),
                     self.check('properties.desired.temperature.min', 10),
                     self.check('properties.desired.temperature.max', 100),
                     self.check('tags.location.region', 'US')
                 ])

        for i in module_ids:
            self.cmd(
                'iot hub module-identity delete -d {} -n {} -g {} --module-id {}'
                .format(edge_device_ids[0], LIVE_HUB, LIVE_RG, i),
                checks=self.is_empty())
 def _get_zone_object(self, file_name, zone_name):  # pylint: disable=no-self-use
     from azure.cli.core.util import read_file_content
     file_path = os.path.join(TEST_DIR, 'zone_files', file_name)
     file_text = None
     file_text = read_file_content(file_path)
     return parse_zone_file(file_text, zone_name)
Exemple #23
0
def import_zone(cmd, resource_group_name, private_zone_name, file_name):
    from azure.cli.core.util import read_file_content
    import sys
    from azure.mgmt.privatedns.models import RecordSet

    file_text = read_file_content(file_name)
    zone_obj = parse_zone_file(file_text, private_zone_name)
    origin = private_zone_name
    record_sets = {}

    for record_set_name in zone_obj:
        for record_set_type in zone_obj[record_set_name]:
            record_set_obj = zone_obj[record_set_name][record_set_type]

            if record_set_type == 'soa':
                origin = record_set_name.rstrip('.')

            if not isinstance(record_set_obj, list):
                record_set_obj = [record_set_obj]

            for entry in record_set_obj:

                record_set_ttl = entry['ttl']
                record_set_key = '{}{}'.format(record_set_name.lower(),
                                               record_set_type)

                record = _build_record(cmd, entry)
                if not record:
                    logger.warning(
                        'Cannot import %s. RecordType is not found. Skipping...',
                        entry['delim'].lower())
                    continue

                record_set = record_sets.get(record_set_key, None)
                if not record_set:

                    # Workaround for issue #2824
                    relative_record_set_name = record_set_name.rstrip('.')
                    if not relative_record_set_name.endswith(origin):
                        logger.warning(
                            'Cannot import %s. Only records relative to origin may be '
                            'imported at this time. Skipping...',
                            relative_record_set_name)
                        continue

                    record_set = RecordSet(ttl=record_set_ttl)
                    record_sets[record_set_key] = record_set
                _privatedns_add_record(record_set,
                                       record,
                                       record_set_type,
                                       is_list=record_set_type.lower()
                                       not in ['soa', 'cname'])

    total_records = 0
    for key, rs in record_sets.items():
        rs_name, rs_type = key.lower().rsplit('.', 1)
        rs_name = rs_name[:-(len(origin) + 1)] if rs_name != origin else '@'
        try:
            record_count = len(
                getattr(rs, _privatedns_type_to_property_name(rs_type)))
        except TypeError:
            record_count = 1
        total_records += record_count
    cum_records = 0

    from azure.mgmt.privatedns import PrivateDnsManagementClient
    from azure.mgmt.privatedns.models import PrivateZone
    client = get_mgmt_service_client(cmd.cli_ctx, PrivateDnsManagementClient)

    print('== BEGINNING ZONE IMPORT: {} ==\n'.format(private_zone_name),
          file=sys.stderr)

    if private_zone_name.endswith(".local"):
        logger.warning((
            "Please be aware that DNS names ending with .local are reserved for use with multicast DNS "
            "and may not work as expected with some operating systems. For details refer to your operating systems documentation."
        ))
    zone = PrivateZone(location='global')
    result = LongRunningOperation(cmd.cli_ctx)(
        client.private_zones.create_or_update(resource_group_name,
                                              private_zone_name, zone))
    if result.provisioning_state != 'Succeeded':
        raise CLIError(
            'Error occured while creating or updating private dns zone.')

    for key, rs in record_sets.items():

        rs_name, rs_type = key.lower().rsplit('.', 1)
        rs_name = '@' if rs_name == origin else rs_name
        if rs_name.endswith(origin):
            rs_name = rs_name[:-(len(origin) + 1)]

        try:
            record_count = len(
                getattr(rs, _privatedns_type_to_property_name(rs_type)))
        except TypeError:
            record_count = 1
        if rs_name == '@' and rs_type == 'soa':
            root_soa = client.record_sets.get(resource_group_name,
                                              private_zone_name, 'soa', '@')
            rs.soa_record.host = root_soa.soa_record.host
            rs_name = '@'
        try:
            client.record_sets.create_or_update(resource_group_name,
                                                private_zone_name, rs_type,
                                                rs_name, rs)
            cum_records += record_count
            print("({}/{}) Imported {} records of type '{}' and name '{}'".
                  format(cum_records, total_records, record_count, rs_type,
                         rs_name),
                  file=sys.stderr)
        except CloudError as ex:
            logger.error(ex)
    print("\n== {}/{} RECORDS IMPORTED SUCCESSFULLY: '{}' ==".format(
        cum_records, total_records, private_zone_name),
          file=sys.stderr)
Exemple #24
0
 def _get_zone_object(self, file_name, zone_name):  # pylint: disable=no-self-use
     from azure.cli.core.util import read_file_content
     file_path = os.path.join(TEST_DIR, 'zone_files', file_name)
     file_text = None
     file_text = read_file_content(file_path)
     return parse_zone_file(file_text, zone_name)
def generate_pnp_interface_list_payload():
    change_dir()
    return json.loads(read_file_content(_pnp_list_interface_file))
def generate_device_interfaces_payload():
    change_dir()
    return json.loads(read_file_content(_device_digitaltwin_payload_file))
Exemple #27
0
def update_spark_pool(cmd,
                      client,
                      resource_group_name,
                      workspace_name,
                      spark_pool_name,
                      node_size=None,
                      node_count=None,
                      enable_auto_scale=None,
                      min_node_count=None,
                      max_node_count=None,
                      enable_auto_pause=None,
                      delay=None,
                      library_requirements=None,
                      spark_config_file_path=None,
                      package_action=None,
                      package=None,
                      tags=None,
                      force=False,
                      no_wait=False):
    existing_spark_pool = client.get(resource_group_name, workspace_name,
                                     spark_pool_name)

    if node_size:
        existing_spark_pool.node_size = node_size
    if node_count:
        existing_spark_pool.node_count = node_count

    if library_requirements:
        library_requirements_content = read_file_content(library_requirements)
        existing_spark_pool.library_requirements = LibraryRequirements(
            filename=library_requirements,
            content=library_requirements_content)
    if tags:
        existing_spark_pool.tags = tags

    if existing_spark_pool.auto_scale is not None:
        if enable_auto_scale is not None:
            existing_spark_pool.auto_scale.enabled = enable_auto_scale
        if min_node_count:
            existing_spark_pool.auto_scale.min_node_count = min_node_count
        if max_node_count:
            existing_spark_pool.auto_scale.max_node_count = max_node_count
    else:
        existing_spark_pool.auto_scale = AutoScaleProperties(
            enabled=enable_auto_scale,
            min_node_count=min_node_count,
            max_node_count=max_node_count)

    if existing_spark_pool.auto_pause is not None:
        if enable_auto_pause is not None:
            existing_spark_pool.auto_pause.enabled = enable_auto_pause
        if delay:
            existing_spark_pool.auto_pause.delay_in_minutes = delay
    else:
        existing_spark_pool.auto_pause = AutoPauseProperties(
            enabled=enable_auto_pause, delay_in_minutes=delay)

    if package_action and package:
        if package_action == "Add":
            if existing_spark_pool.custom_libraries is None:
                existing_spark_pool.custom_libraries = []
            for item in package:
                package_get = get_workspace_package(cmd, workspace_name, item)
                library = LibraryInfo(
                    name=package_get.name,
                    type=package_get.properties.type,
                    path=package_get.properties.path,
                    container_name=package_get.properties.container_name,
                    uploaded_timestamp=package_get.properties.
                    uploaded_timestamp)
                existing_spark_pool.custom_libraries.append(library)
        if package_action == "Remove":
            existing_spark_pool.custom_libraries = [
                library for library in existing_spark_pool.custom_libraries
                if library.name not in package
            ]

    if spark_config_file_path:
        filename = Path(spark_config_file_path).stem
        try:
            with open(spark_config_file_path, 'r') as stream:
                content = stream.read()
        except:
            from azure.cli.core.azclierror import InvalidArgumentValueError
            err_msg = 'Spark config file path is invalid'
            raise InvalidArgumentValueError(err_msg)
        existing_spark_pool.spark_config_properties = SparkConfigProperties(
            content=content, filename=filename)
    return sdk_no_wait(no_wait,
                       client.begin_create_or_update,
                       resource_group_name,
                       workspace_name,
                       spark_pool_name,
                       existing_spark_pool,
                       force=force)
Exemple #28
0
def _pack_artifacts(cmd, template_abs_file_path, context):
    """
    Recursively packs the specified template and its referenced artifacts and
     adds the artifacts to the current packing context.

    :param template_abs_file_path: The path to the template spec .json file to pack.
    :type template_abs_file_path : str
    :param context : The packing context of the current packing operation
    :type content : PackingContext
    :param artifactableTemplateObj : The packageable template object
    :type artifactableTemplateObj : JSON
    """
    original_directory = getattr(context, 'CurrentDirectory')
    try:
        context.CurrentDirectory = os.path.dirname(template_abs_file_path)
        template_content = read_file_content(template_abs_file_path)
        artifactable_template_obj = _remove_comments_from_json(
            template_content)
        template_link_to_artifact_objs = _get_template_links_to_artifacts(
            cmd, artifactable_template_obj, includeNested=True)

        for template_link_obj in template_link_to_artifact_objs:
            relative_path = str(template_link_obj['relativePath'])
            if not relative_path:
                continue
            # This is a templateLink to a local template... Get the absolute path of the
            # template based on its relative path from the current template directory and
            # make sure it exists:

            abs_local_path = os.path.join(getattr(context, 'CurrentDirectory'),
                                          relative_path)
            if not os.path.isfile(abs_local_path):
                raise CLIError('File ' + abs_local_path + 'not found.')

            # Let's make sure we're not referencing a file outside of our root directory
            # hierarchy. We won't allow such references for security purposes:

            if (not os.path.commonpath([
                    getattr(context, 'RootTemplateDirectory')
            ]) == os.path.commonpath(
                [getattr(context, 'RootTemplateDirectory'), abs_local_path])):
                raise CLIError(
                    'Unable to handle the reference to file ' +
                    abs_local_path + 'from ' + template_abs_file_path +
                    'because it exists outside of the root template directory of '
                    + getattr(context, 'RootTemplateDirectory'))

            # Convert the template relative path to one that is relative to our root
            # directory path, and then if we haven't already processed that template into
            # an artifact elsewhere, we'll do so here...

            as_relative_path = _absolute_to_relative_path(
                getattr(context, 'RootTemplateDirectory'), abs_local_path)
            for prev_added_artifact in getattr(context, 'Artifact'):
                prev_added_artifact = os.path.join(
                    getattr(context, 'RootTemplateDirectory'),
                    getattr(prev_added_artifact, 'path'))
                if os.path.samefile(prev_added_artifact, abs_local_path):
                    continue
            _pack_artifacts(cmd, abs_local_path, context)
            LinkedTemplateArtifact = get_sdk(
                cmd.cli_ctx,
                ResourceType.MGMT_RESOURCE_TEMPLATESPECS,
                'LinkedTemplateArtifact',
                mod='models')
            template_content = read_file_content(abs_local_path)
            template_json = json.loads(
                json.dumps(process_template(template_content)))
            artifact = LinkedTemplateArtifact(path=as_relative_path,
                                              template=template_json)
            context.Artifact.append(artifact)
    finally:
        context.CurrentDirectory = original_directory
    def test_hub_devices(self):
        device_count = 3
        edge_device_count = 2

        names = self._create_entity_names(devices=device_count,
                                          edge_devices=edge_device_count)
        device_ids = names['device_ids']
        edge_device_ids = names['edge_device_ids']

        for i in range(edge_device_count):
            self.cmd(
                'iot hub device-identity create -d {} -n {} -g {} -ee'.format(
                    edge_device_ids[i], LIVE_HUB, LIVE_RG),
                checks=[
                    self.check('deviceId', edge_device_ids[i]),
                    self.check('status', 'enabled'),
                    self.check('statusReason', None),
                    self.check('connectionState', 'Disconnected'),
                    self.check('capabilities.iotEdge', True),
                    self.exists('authentication.symmetricKey.primaryKey'),
                    self.exists('authentication.symmetricKey.secondaryKey')
                ])

        query_checks = [self.check('length([*])', edge_device_count)]
        for i in range(edge_device_count):
            query_checks.append(
                self.exists('[?deviceId==`{}`]'.format(edge_device_ids[i])))

        self.cmd('iot hub query --hub-name {} -g {} -q "{}"'.format(
            LIVE_HUB, LIVE_RG, "select * from devices"),
                 checks=query_checks)

        # With connection string
        self.cmd('iot hub query -q "{}" --login {}'.format(
            "select * from devices", LIVE_HUB_CS),
                 checks=query_checks)

        self.cmd(
            '''iot hub device-identity create --device-id {} --hub-name {} --resource-group {}
                    --auth-method x509_thumbprint --primary-thumbprint {} --secondary-thumbprint {}'''
            .format(device_ids[0], LIVE_HUB, LIVE_RG, PRIMARY_THUMBPRINT,
                    SECONDARY_THUMBPRINT),
            checks=[
                self.check('deviceId', device_ids[0]),
                self.check('status', 'enabled'),
                self.check('statusReason', None),
                self.check('capabilities.iotEdge', False),
                self.check('connectionState', 'Disconnected'),
                self.check('authentication.symmetricKey.primaryKey', None),
                self.check('authentication.symmetricKey.secondaryKey', None),
                self.check('authentication.x509Thumbprint.primaryThumbprint',
                           PRIMARY_THUMBPRINT),
                self.check('authentication.x509Thumbprint.secondaryThumbprint',
                           SECONDARY_THUMBPRINT)
            ])

        self.cmd(
            '''iot hub device-identity create --device-id {} --hub-name {} --resource-group {}
                    --auth-method x509_thumbprint --valid-days {}'''.format(
                device_ids[1], LIVE_HUB, LIVE_RG, 10),
            checks=[
                self.check('deviceId', device_ids[1]),
                self.check('status', 'enabled'),
                self.check('statusReason', None),
                self.check('capabilities.iotEdge', False),
                self.check('connectionState', 'Disconnected'),
                self.check('authentication.symmetricKey.primaryKey', None),
                self.check('authentication.symmetricKey.secondaryKey', None),
                self.exists('authentication.x509Thumbprint.primaryThumbprint'),
                self.check('authentication.x509Thumbprint.secondaryThumbprint',
                           None)
            ])

        # With connection string
        status_reason = "Test Status Reason"
        self.cmd(
            '''iot hub device-identity create --device-id {} --login {}
                    --auth-method x509_ca --status disabled --status-reason "{}"'''
            .format(device_ids[2], LIVE_HUB_CS, status_reason),
            checks=[
                self.check('deviceId', device_ids[2]),
                self.check('status', 'disabled'),
                self.check('statusReason', status_reason),
                self.check('capabilities.iotEdge', False),
                self.check('connectionState', 'Disconnected'),
                self.check('authentication.symmetricKey.primaryKey', None),
                self.check('authentication.symmetricKey.secondaryKey', None),
                self.check('authentication.x509Thumbprint.primaryThumbprint',
                           None),
                self.check('authentication.x509Thumbprint.secondaryThumbprint',
                           None)
            ])

        self.cmd('iot hub device-identity show -d {} -n {} -g {}'.format(
            edge_device_ids[0], LIVE_HUB, LIVE_RG),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('status', 'enabled'),
                     self.check('statusReason', None),
                     self.check('connectionState', 'Disconnected'),
                     self.check('capabilities.iotEdge', True),
                     self.exists('authentication.symmetricKey.primaryKey'),
                     self.exists('authentication.symmetricKey.secondaryKey')
                 ])

        # With connection string
        self.cmd('iot hub device-identity show -d {} --login {}'.format(
            edge_device_ids[0], LIVE_HUB_CS),
                 checks=[
                     self.check('deviceId', edge_device_ids[0]),
                     self.check('status', 'enabled'),
                     self.check('statusReason', None),
                     self.check('connectionState', 'Disconnected'),
                     self.check('capabilities.iotEdge', True),
                     self.exists('authentication.symmetricKey.primaryKey'),
                     self.exists('authentication.symmetricKey.secondaryKey')
                 ])

        self.cmd(
            'iot hub device-identity list --hub-name {} --resource-group {}'.
            format(LIVE_HUB, LIVE_RG),
            checks=[
                self.check('length([*])', device_count + edge_device_count)
            ])

        self.cmd('iot hub device-identity list -n {} -g {} -ee'.format(
            LIVE_HUB, LIVE_RG),
                 checks=[self.check('length([*])', edge_device_count)])

        # With connection string
        self.cmd(
            'iot hub device-identity list -ee --login {}'.format(LIVE_HUB_CS),
            checks=[self.check('length([*])', edge_device_count)])

        self.cmd(
            'iot hub device-identity update -d {} -n {} -g {} --set capabilities.iotEdge={}'
            .format(device_ids[0], LIVE_HUB, LIVE_RG, True),
            checks=[
                self.check('deviceId', device_ids[0]),
                self.check('status', 'enabled'),
                self.check('capabilities.iotEdge', True),
                self.check('authentication.symmetricKey.primaryKey', None),
                self.check('authentication.symmetricKey.secondaryKey', None),
                self.check('authentication.x509Thumbprint.primaryThumbprint',
                           PRIMARY_THUMBPRINT),
                self.check('authentication.x509Thumbprint.secondaryThumbprint',
                           SECONDARY_THUMBPRINT)
            ])

        self.cmd(
            '''iot hub device-identity update -d {} -n {} -g {} --set authentication.symmetricKey.primaryKey=""
                    authentication.symmetricKey.secondaryKey=""'''.format(
                edge_device_ids[1], LIVE_HUB, LIVE_RG),
            checks=[
                self.check('deviceId', edge_device_ids[1]),
                self.check('status', 'enabled'),
                self.exists('authentication.symmetricKey.primaryKey'),
                self.exists('authentication.symmetricKey.secondaryKey')
            ])

        # With connection string
        self.cmd(
            '''iot hub device-identity update -d {} --login {} --set authentication.symmetricKey.primaryKey=""
                 authentication.symmetricKey.secondaryKey=""'''.format(
                edge_device_ids[1], LIVE_HUB_CS),
            checks=[
                self.check('deviceId', edge_device_ids[1]),
                self.check('status', 'enabled'),
                self.exists('authentication.symmetricKey.primaryKey'),
                self.exists('authentication.symmetricKey.secondaryKey')
            ])

        content_path = os.path.join(CWD, 'test_config_content.json')
        self.cmd(
            "iot hub apply-configuration -d {} -n {} -g {} -k '{}'".format(
                edge_device_ids[1], LIVE_HUB, LIVE_RG, content_path),
            checks=[self.check('length([*])', 3)])

        # With connection string
        content_path = os.path.join(CWD, 'test_config_content.json')
        self.cmd("iot hub apply-configuration -d {} --login {} -k '{}'".format(
            edge_device_ids[1], LIVE_HUB_CS, content_path),
                 checks=[self.check('length([*])', 3)])

        self.kwargs['generic_content'] = read_file_content(content_path)
        self.cmd(
            "iot hub apply-configuration -d {} -n {} -g {} --content '{}'".
            format(edge_device_ids[1], LIVE_HUB, LIVE_RG, '{generic_content}'),
            self.check('length([*])', 3))

        sym_conn_str_pattern = r'^HostName={}\.azure-devices\.net;DeviceId={};SharedAccessKey='.format(
            LIVE_HUB, edge_device_ids[0])
        cer_conn_str_pattern = r'^HostName={}\.azure-devices\.net;DeviceId={};x509=true'.format(
            LIVE_HUB, device_ids[2])

        self.cmd(
            'iot hub device-identity show-connection-string -d {} -n {} -g {}'.
            format(edge_device_ids[0], LIVE_HUB, LIVE_RG),
            checks=[self.check_pattern('cs', sym_conn_str_pattern)])

        self.cmd(
            'iot hub device-identity show-connection-string -d {} -n {} -g {} -kt {}'
            .format(edge_device_ids[0], LIVE_HUB, LIVE_RG, 'secondary'),
            checks=[self.check_pattern('cs', sym_conn_str_pattern)])

        self.cmd(
            'iot hub device-identity show-connection-string -d {} -n {} -g {}'.
            format(device_ids[2], LIVE_HUB, LIVE_RG),
            checks=[self.check_pattern('cs', cer_conn_str_pattern)])

        self.cmd('iot hub generate-sas-token -n {} -g {} -d {}'.format(
            LIVE_HUB, LIVE_RG, edge_device_ids[0]),
                 checks=[self.exists('sas')])

        # None SAS device auth
        self.cmd('iot hub generate-sas-token -n {} -g {} -d {}'.format(
            LIVE_HUB, LIVE_RG, device_ids[1]),
                 expect_failure=True)

        self.cmd(
            'iot hub generate-sas-token -n {} -g {} -d {} -kt "secondary"'.
            format(LIVE_HUB, LIVE_RG, edge_device_ids[1]),
            checks=[self.exists('sas')])

        # With connection string
        self.cmd('iot hub generate-sas-token -d {} --login {}'.format(
            edge_device_ids[0], LIVE_HUB_CS),
                 checks=[self.exists('sas')])

        self.cmd('iot hub generate-sas-token -d {} --login {} -kt "secondary"'.
                 format(edge_device_ids[1], LIVE_HUB_CS),
                 checks=[self.exists('sas')])

        self.cmd('iot hub generate-sas-token -d {} --login {} -pn "mypolicy"'.
                 format(edge_device_ids[1], LIVE_HUB_CS),
                 expect_failure=True)
    def test_hub_device_twins(self):
        self.kwargs['generic_dict'] = {'key': 'value'}
        device_count = 2

        names = self._create_entity_names(devices=device_count)
        device_ids = names['device_ids']

        for i in range(device_count):
            self.cmd('iot hub device-identity create -d {} -n {} -g {}'.format(
                device_ids[i], LIVE_HUB, LIVE_RG),
                     checks=[self.check('deviceId', device_ids[i])])

        self.cmd('iot hub device-twin show -d {} -n {} -g {}'.format(
            device_ids[0], LIVE_HUB, LIVE_RG),
                 checks=[
                     self.check('deviceId', device_ids[0]),
                     self.check('status', 'enabled'),
                     self.exists('properties.desired'),
                     self.exists('properties.reported')
                 ])

        # With connection string
        self.cmd('iot hub device-twin show -d {} --login {}'.format(
            device_ids[0], LIVE_HUB_CS),
                 checks=[
                     self.check('deviceId', device_ids[0]),
                     self.check('status', 'enabled'),
                     self.exists('properties.desired'),
                     self.exists('properties.reported')
                 ])

        result = self.cmd(
            'iot hub device-twin update -d {} -n {} -g {} --set properties.desired.special={}'
            .format(device_ids[0], LIVE_HUB, LIVE_RG,
                    '"{generic_dict}"')).get_output_in_json()
        assert result['deviceId'] == device_ids[0]
        assert result['properties']['desired']['special']['key'] == 'value'

        result = self.cmd(
            'iot hub device-twin update -d {} -n {} -g {} --set properties.desired.special="null"'
            .format(device_ids[0], LIVE_HUB, LIVE_RG)).get_output_in_json()
        assert result['deviceId'] == device_ids[0]
        assert result['properties']['desired'].get('special') is None

        # With connection string
        result = self.cmd(
            'iot hub device-twin update -d {} --login {} --set properties.desired.special={}'
            .format(device_ids[0], LIVE_HUB_CS,
                    '"{generic_dict}"')).get_output_in_json()
        assert result['deviceId'] == device_ids[0]
        assert result['properties']['desired']['special']['key'] == 'value'

        content_path = os.path.join(CWD, 'test_generic_replace.json')
        self.cmd(
            "iot hub device-twin replace -d {} -n {} -g {} -j '{}'".format(
                device_ids[0], LIVE_HUB, LIVE_RG, content_path),
            checks=[
                self.check('deviceId', device_ids[0]),
                self.check('properties.desired.awesome', 9001),
                self.check('properties.desired.temperature.min', 10),
                self.check('properties.desired.temperature.max', 100),
                self.check('tags.location.region', 'US')
            ])

        self.kwargs['twin_payload'] = read_file_content(content_path)
        self.cmd(
            "iot hub device-twin replace -d {} -n {} -g {} -j '{}'".format(
                device_ids[1], LIVE_HUB, LIVE_RG, '{twin_payload}'),
            checks=[
                self.check('deviceId', device_ids[1]),
                self.check('properties.desired.awesome', 9001),
                self.check('properties.desired.temperature.min', 10),
                self.check('properties.desired.temperature.max', 100),
                self.check('tags.location.region', 'US')
            ])

        # With connection string
        self.cmd("iot hub device-twin replace -d {} --login {} -j '{}'".format(
            device_ids[1], LIVE_HUB_CS, '{twin_payload}'),
                 checks=[
                     self.check('deviceId', device_ids[1]),
                     self.check('properties.desired.awesome', 9001),
                     self.check('properties.desired.temperature.min', 10),
                     self.check('properties.desired.temperature.max', 100),
                     self.check('tags.location.region', 'US')
                 ])
    def test_edge_deployments(self):
        self.kwargs['generic_dict'] = {'key': 'value'}
        config_count = 2
        names = self._create_entity_names(configs=config_count)
        config_ids = names['config_ids']

        content_path = os.path.join(CWD, 'test_config_content.json')
        priority = random.randint(1, 10)
        condition = 'tags.building=9 and tags.environment=\'test\''

        # With connection string
        self.cmd(
            "iot edge deployment create -c {} --login {} -pri {} -tc \"{}\" -lab {} -k '{}'"
            .format(config_ids[0], LIVE_HUB_CS, priority, condition,
                    '"{generic_dict}"', content_path),
            checks=[
                self.check('id', config_ids[0]),
                self.check('priority', priority),
                self.check('targetCondition', condition),
                self.check('contentType', 'assignments'),
                self.check('labels', self.kwargs['generic_dict'])
            ])

        self.kwargs['deployment_payload'] = read_file_content(content_path)
        self.cmd(
            """iot edge deployment create --config-id {} --hub-name {} --resource-group {} --priority {}
                    --target-condition \"{}\" --labels {} --content '{}'""".
            format(config_ids[1], LIVE_HUB, LIVE_RG, priority, condition,
                   '"{generic_dict}"', '{deployment_payload}'),
            checks=[
                self.check('id', config_ids[1]),
                self.check('priority', priority),
                self.check('targetCondition', condition),
                self.check('contentType', 'assignments'),
                self.check('labels', self.kwargs['generic_dict'])
            ])

        self.cmd('iot edge deployment show -c {} -n {} -g {}'.format(
            config_ids[0], LIVE_HUB, LIVE_RG),
                 checks=[
                     self.check('id', config_ids[0]),
                     self.check('priority', priority),
                     self.check('targetCondition', condition),
                     self.check('contentType', 'assignments'),
                     self.check('labels', self.kwargs['generic_dict'])
                 ])

        # With connection string
        self.cmd('iot edge deployment show -c {} --login {}'.format(
            config_ids[1], LIVE_HUB_CS),
                 checks=[
                     self.check('id', config_ids[1]),
                     self.check('priority', priority),
                     self.check('targetCondition', condition),
                     self.check('contentType', 'assignments'),
                     self.check('labels', self.kwargs['generic_dict'])
                 ])

        priority = random.randint(1, 10)
        condition = "tags.building=43 and tags.environment='dev'"
        self.kwargs['generic_dict_updated'] = {'key': 'super_value'}
        self.cmd(
            'iot edge deployment update -c {} -n {} -g {} --set priority={} targetCondition="{}" labels={}'
            .format(config_ids[0], LIVE_HUB, LIVE_RG, priority, condition,
                    '"{generic_dict_updated}"'),
            checks=[
                self.check('id', config_ids[0]),
                self.check('priority', priority),
                self.check('targetCondition', condition),
                self.check('labels', self.kwargs['generic_dict_updated'])
            ])

        # With connection string
        self.cmd(
            'iot edge deployment update -c {} --login {} --set priority={} targetCondition="{}" labels={}'
            .format(config_ids[0], LIVE_HUB_CS, priority, condition,
                    '"{generic_dict_updated}"'),
            checks=[
                self.check('id', config_ids[0]),
                self.check('priority', priority),
                self.check('targetCondition', condition),
                self.check('labels', self.kwargs['generic_dict_updated'])
            ])

        self.cmd("iot edge deployment list -n {} -g {}".format(
            LIVE_HUB, LIVE_RG),
                 checks=[
                     self.check('length([*])', 2),
                     self.exists("[?id=='{}']".format(config_ids[0])),
                     self.exists("[?id=='{}']".format(config_ids[1]))
                 ])

        # With connection string
        self.cmd("iot edge deployment list --login {}".format(LIVE_HUB_CS),
                 checks=[
                     self.check('length([*])', 2),
                     self.exists("[?id=='{}']".format(config_ids[0])),
                     self.exists("[?id=='{}']".format(config_ids[1]))
                 ])