def test_shell_safe_json_parse(self): dict_obj = {'a': 'b & c'} list_obj = [{'a': 'b & c'}] failed_strings = [] valid_dict_strings = [ '{"a": "b & c"}', "{'a': 'b & c'}", "{\"a\": \"b & c\"}" ] for string in valid_dict_strings: actual = shell_safe_json_parse(string) try: self.assertEqual(actual, dict_obj) except AssertionError: failed_strings.append(string) valid_list_strings = [ '[{"a": "b & c"}]', "[{'a': 'b & c'}]", "[{\"a\": \"b & c\"}]" ] for string in valid_list_strings: actual = shell_safe_json_parse(string) try: self.assertEqual(actual, list_obj) except AssertionError: failed_strings.append(string) self.assertEqual( len(failed_strings), 0, 'The following patterns failed: {}'.format(failed_strings))
def create_task(client, resource_group_name, service_name, project_name, task_name, source_connection_json, target_connection_json, database_options_json, enable_schema_validation=False, enable_data_integrity_validation=False, enable_query_analysis_validation=False): if os.path.exists(source_connection_json): source_connection_json = get_file_json(source_connection_json) else: source_connection_json = shell_safe_json_parse(source_connection_json) source_connection_info = create_sql_connection_info(source_connection_json, 'Source Database ') if os.path.exists(target_connection_json): target_connection_json = get_file_json(target_connection_json) else: target_connection_json = shell_safe_json_parse(target_connection_json) target_connection_info = create_sql_connection_info(target_connection_json, 'Target Database ') if os.path.exists(database_options_json): database_options_json = get_file_json(database_options_json) else: database_options_json = shell_safe_json_parse(database_options_json) database_options = [] for d in database_options_json: database_options.append( MigrateSqlServerSqlDbDatabaseInput(name=d.get('name', None), target_database_name=d.get('target_database_name', None), make_source_db_read_only=d.get('make_source_db_read_only', None), table_map=d.get('table_map', None))) validation_options = MigrationValidationOptions(enable_schema_validation=enable_schema_validation, enable_data_integrity_validation=enable_data_integrity_validation, enable_query_analysis_validation=enable_query_analysis_validation) task_input = MigrateSqlServerSqlDbTaskInput(source_connection_info=source_connection_info, target_connection_info=target_connection_info, selected_databases=database_options, validation_options=validation_options) migration_properties = MigrateSqlServerSqlDbTaskProperties(input=task_input) return client.create_or_update(group_name=resource_group_name, service_name=service_name, project_name=project_name, task_name=task_name, properties=migration_properties)
def create_management_policies(client, resource_group_name, account_name, policy=None): if policy: if os.path.exists(policy): policy = get_file_json(policy) else: policy = shell_safe_json_parse(policy) return client.create_or_update_management_policies(resource_group_name, account_name, policy=policy)
def _try_parse_key_value_object(template_param_defs, parameters, value): try: key, value = value.split('=', 1) except ValueError: return False param = template_param_defs.get(key, None) if param is None: raise CLIError("unrecognized template parameter '{}'. Allowed parameters: {}" .format(key, ', '.join(sorted(template_param_defs.keys())))) param_type = param.get('type', None) if param_type: param_type = param_type.lower() if param_type in ['object', 'array']: parameters[key] = {'value': shell_safe_json_parse(value)} elif param_type in ['string', 'securestring']: parameters[key] = {'value': value} elif param_type == 'bool': parameters[key] = {'value': value.lower() == 'true'} elif param_type == 'int': parameters[key] = {'value': int(value)} else: logger.warning("Unrecognized type '%s' for parameter '%s'. Interpretting as string.", param_type, key) parameters[key] = {'value': value} return True
def __init__(self, value): super(JsonString, self).__init__() if value[0] in ("'", '"') and value[-1] == value[0]: # Remove leading and trailing quotes for dos/cmd.exe users value = value[1:-1] dictval = shell_safe_json_parse(value) self.update(dictval)
def _deploy_arm_template_core(resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, deployment_name=None, parameters=None, mode='incremental', validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk(ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') parameters = parameters or {} template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8')) else: template = get_file_json(template_file) template_obj = template parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) smc = get_mgmt_service_client(ResourceType.MGMT_RESOURCE_RESOURCES) if validate_only: return smc.deployments.validate(resource_group_name, deployment_name, properties, raw=no_wait) return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=no_wait)
def __init__(self, value): super(ScheduleEntryList, self).__init__() if value[0] in ("'", '"') and value[-1] == value[0]: # Remove leading and trailing quotes for dos/cmd.exe users value = value[1:-1] dictval = shell_safe_json_parse(value) self.extend([ScheduleEntry(row['dayOfWeek'], int(row['startHourUtc']), row.get('maintenanceWindow', None)) for row in dictval])
def load_acs_service_principals(config_path): if not os.path.exists(config_path): return None fd = os.open(config_path, os.O_RDONLY) try: with os.fdopen(fd) as f: return shell_safe_json_parse(f.read()) except: # pylint: disable=bare-except return None
def validate_file_or_dict(string): import os string = os.path.expanduser(string) if os.path.exists(string): from azure.cli.core.util import get_file_json return get_file_json(string) from azure.cli.core.util import shell_safe_json_parse return shell_safe_json_parse(string)
def get_file_or_parse_json(value, value_type): if os.path.exists(value): return get_file_json(value) # Test if provided value is a valid json try: json_parse = shell_safe_json_parse(value) except: raise CLIError("The supplied input for '" + value_type + "' is not a valid file path or a valid json object.") else: return json_parse
def create_policy_definition(name, rules, display_name=None, description=None): if os.path.exists(rules): rules = get_file_json(rules) else: rules = shell_safe_json_parse(rules) policy_client = _resource_policy_client_factory() PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models') parameters = PolicyDefinition(policy_rule=rules, description=description, display_name=display_name) return policy_client.policy_definitions.create_or_update(name, parameters)
def __init__(self, value): super(ScheduleEntryList, self).__init__() from azure.mgmt.redis.models import ScheduleEntry if value[0] in ("'", '"') and value[-1] == value[0]: # Remove leading and trailing quotes for dos/cmd.exe users value = value[1:-1] dictval = shell_safe_json_parse(value) self.extend([ScheduleEntry(day_of_week=row['dayOfWeek'], start_hour_utc=int(row['startHourUtc']), maintenance_window=row.get('maintenanceWindow', None)) for row in dictval])
def _try_parse_key_value_object(parameters, value): try: key, value = value.split('=', 1) except ValueError: return False try: parameters[key] = {'value': shell_safe_json_parse(value)} except (ValueError, CLIError): parameters[key] = {'value': value} return True
def _update_instance(instance, part, path): # pylint: disable=too-many-return-statements, inconsistent-return-statements try: index = index_or_filter_regex.match(part) if index and not isinstance(instance, list): throw_and_show_options(instance, part, path) if index and '=' in index.group(1): key, value = index.group(1).split('=', 1) try: value = shell_safe_json_parse(value) except: # pylint: disable=bare-except pass matches = [] for x in instance: if isinstance(x, dict) and x.get(key, None) == value: matches.append(x) elif not isinstance(x, dict): snake_key = make_snake_case(key) if hasattr(x, snake_key) and getattr(x, snake_key, None) == value: matches.append(x) if len(matches) == 1: return matches[0] elif len(matches) > 1: raise CLIError("non-unique key '{}' found multiple matches on {}. Key must be unique." .format(key, path[-2])) else: if key in getattr(instance, 'additional_properties', {}): instance.enable_additional_properties_sending() return instance.additional_properties[key] raise CLIError("item with value '{}' doesn\'t exist for key '{}' on {}".format(value, key, path[-2])) if index: try: index_value = int(index.group(1)) return instance[index_value] except IndexError: raise CLIError('index {} doesn\'t exist on {}'.format(index_value, path[-2])) if isinstance(instance, dict): return instance[part] if hasattr(instance, make_snake_case(part)): return getattr(instance, make_snake_case(part), None) elif part in getattr(instance, 'additional_properties', {}): instance.enable_additional_properties_sending() return instance.additional_properties[part] raise AttributeError() except (AttributeError, KeyError): throw_and_show_options(instance, part, path)
def create_volume(client, resource_group_name, name, location, template_file=None, template_uri=None): """Create a volume. """ volume_properties = None if template_uri: volume_properties = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: volume_properties = get_file_json(template_file, preserve_order=True) volume_properties = json.loads(json.dumps(volume_properties)) else: raise CLIError('One of --template-file or --template-uri has to be specified') volume_properties['location'] = location return client.create(resource_group_name, name, volume_properties)
def create_appliance(resource_group_name, appliance_name, managedby_resource_group_id, location, kind, managedapp_definition_id=None, plan_name=None, plan_publisher=None, plan_product=None, plan_version=None, tags=None, parameters=None): """ Create a new managed application. :param str resource_group_name:the desired resource group name :param str appliance_name:the managed application name :param str kind:the managed application kind. can be marketplace or servicecatalog :param str plan_name:the managed application package plan name :param str plan_publisher:the managed application package plan publisher :param str plan_product:the managed application package plan product :param str plan_version:the managed application package plan version :param str tags:tags in 'a=b c' format """ racf = _resource_managedapps_client_factory() appliance = Appliance( location=location, managed_resource_group_id=managedby_resource_group_id, kind=kind, tags=tags ) if kind.lower() == 'servicecatalog': if managedapp_definition_id: appliance.appliance_definition_id = managedapp_definition_id else: raise CLIError('--managedapp-definition-id is required if kind is ServiceCatalog') elif kind.lower() == 'marketplace': if (plan_name is None and plan_product is None and plan_publisher is None and plan_version is None): raise CLIError('--plan-name, --plan-product, --plan-publisher and \ --plan-version are all required if kind is MarketPlace') else: appliance.plan = Plan(plan_name, plan_publisher, plan_product, plan_version) applianceParameters = None if parameters: if os.path.exists(parameters): applianceParameters = get_file_json(parameters) else: applianceParameters = shell_safe_json_parse(parameters) appliance.parameters = applianceParameters return racf.appliances.create_or_update(resource_group_name, appliance_name, appliance)
def _create_update_role_definition(role_definition, for_update): definitions_client = _auth_client_factory().role_definitions if os.path.exists(role_definition): role_definition = get_file_json(role_definition) else: role_definition = shell_safe_json_parse(role_definition) # to workaround service defects, ensure property names are camel case names = [p for p in role_definition if p[:1].isupper()] for n in names: new_name = n[:1].lower() + n[1:] role_definition[new_name] = role_definition.pop(n) role_name = role_definition.get('name', None) if not role_name: raise CLIError("please provide role name") if for_update: # for update, we need to use guid style unique name scopes_in_definition = role_definition.get('assignableScopes', None) scope = (scopes_in_definition[0] if scopes_in_definition else '/subscriptions/' + definitions_client.config.subscription_id) matched = _search_role_definitions(definitions_client, role_name, scope) if len(matched) != 1: raise CLIError('Please provide the unique logic name of an existing role') role_definition['name'] = matched[0].name # ensure correct logical name and guid name. For update we accept both role_name = matched[0].properties.role_name role_id = matched[0].name else: role_id = uuid.uuid4() if not for_update and 'assignableScopes' not in role_definition: raise CLIError("please provide 'assignableScopes'") permission = Permission(actions=role_definition.get('actions', None), not_actions=role_definition.get('notActions', None)) properties = RoleDefinitionProperties(role_name=role_name, description=role_definition.get('description', None), type=_CUSTOM_RULE, assignable_scopes=role_definition['assignableScopes'], permissions=[permission]) definition = RoleDefinition(name=role_id, properties=properties) return definitions_client.create_or_update(role_definition_id=role_id, scope=properties.assignable_scopes[0], role_definition=definition)
def update_policy_definition(policy_definition_name, rules=None, display_name=None, description=None): if rules is not None: if os.path.exists(rules): rules = get_file_json(rules) else: rules = shell_safe_json_parse(rules) policy_client = _resource_policy_client_factory() definition = policy_client.policy_definitions.get(policy_definition_name) # pylint: disable=line-too-long,no-member PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models') parameters = PolicyDefinition( policy_rule=rules if rules is not None else definition.policy_rule, description=description if description is not None else definition.description, display_name=display_name if display_name is not None else definition.display_name) return policy_client.policy_definitions.create_or_update(policy_definition_name, parameters)
def _update_instance(instance, part, path): try: # pylint: disable=too-many-nested-blocks index = index_or_filter_regex.match(part) if index: # indexing on anything but a list is not allowed if not isinstance(instance, list): show_options(instance, part, path) if '=' in index.group(1): key, value = index.group(1).split('=', 1) try: value = shell_safe_json_parse(value) except: # pylint: disable=bare-except pass matches = [] for x in instance: if isinstance(x, dict) and x.get(key, None) == value: matches.append(x) elif not isinstance(x, dict): key = make_snake_case(key) if hasattr(x, key) and getattr(x, key, None) == value: matches.append(x) if len(matches) == 1: instance = matches[0] elif len(matches) > 1: raise CLIError("non-unique key '{}' found multiple matches on {}. " "Key must be unique.".format( key, path[-2])) else: raise CLIError("item with value '{}' doesn\'t exist for key '{}' on {}".format( value, key, path[-2])) else: try: index_value = int(index.group(1)) instance = instance[index_value] except IndexError: raise CLIError('index {} doesn\'t exist on {}'.format( index_value, path[-2])) elif isinstance(instance, dict): instance = instance[part] else: instance = getattr(instance, make_snake_case(part)) except (AttributeError, KeyError): show_options(instance, part, path) return instance
def _deploy_arm_template_core(cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, input_yaml_files=None, deployment_name=None, parameters=None, mode=None, validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: template = get_file_json(template_file, preserve_order=True) template_obj = template else: output_file_path = _invoke_mergeutil(input_yaml_files, parameters) parameters = None template = get_file_json(output_file_path, preserve_order=True) template_obj = template template_param_defs = template_obj.get('parameters', {}) template_obj['resources'] = template_obj.get('resources', []) template = json.loads(json.dumps(template)) if parameters is not None: parameters = _process_parameters(template_param_defs, parameters) or {} parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) # workaround properties.mode = 'incremental' smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) logger.warning("Deploying . . .") logger.warning("You can get the state of the deployment with the cmd") logger.warning("az group deployment show --name {0} --resource-group {1}".format(deployment_name, resource_group_name)) if validate_only: return sdk_no_wait(no_wait, smc.deployments.validate, resource_group_name, deployment_name, properties) return sdk_no_wait(no_wait, smc.deployments.create_or_update, resource_group_name, deployment_name, properties)
def set_properties(instance, expression, force_string): key, value = _split_key_value_pair(expression) if not force_string: try: value = shell_safe_json_parse(value) except: # pylint:disable=bare-except pass # name should be the raw casing as it could refer to a property OR a dictionary key name, path = _get_name_path(key) parent_name = path[-1] if path else 'root' root = instance instance = _find_property(instance, path) if instance is None: parent = _find_property(root, path[:-1]) set_properties(parent, '{}={{}}'.format(parent_name), force_string) instance = _find_property(root, path) match = index_or_filter_regex.match(name) index_value = int(match.group(1)) if match else None try: if index_value is not None: instance[index_value] = value elif isinstance(instance, dict): instance[name] = value elif isinstance(instance, list): throw_and_show_options(instance, name, key.split('.')) else: # must be a property name if hasattr(instance, make_snake_case(name)): setattr(instance, make_snake_case(name), value) else: if instance.additional_properties is None: instance.additional_properties = {} instance.additional_properties[name] = value instance.enable_additional_properties_sending() logger.warning( "Property '%s' not found on %s. Send it as an additional property .", name, parent_name) except IndexError: raise CLIError('index {} doesn\'t exist on {}'.format(index_value, name)) except (AttributeError, KeyError, TypeError): throw_and_show_options(instance, name, key.split('.'))
def test_file_string_or_uri(self): data = '{ "some": "data here"}' with tempfile.NamedTemporaryFile(delete=False) as tmp: tmp.write(data.encode('utf-8')) tmp.close() output = _load_file_string_or_uri(tmp.name, 'test') self.assertEqual(get_file_json(tmp.name), output) uri = urljoin('file:', pathname2url(tmp.name)) output = _load_file_string_or_uri(uri, 'test') self.assertEqual(get_file_json(tmp.name), output) os.unlink(tmp.name) output = _load_file_string_or_uri(data, 'test') self.assertEqual(shell_safe_json_parse(data), output) self.assertEqual(None, _load_file_string_or_uri(None, 'test', required=False)) self.assertRaises(CLIError, _load_file_string_or_uri, None, 'test')
def test_file_string_or_uri(self): data = '{ "some": "data here"}' with tempfile.NamedTemporaryFile(delete=False) as tmp: tmp.write(data.encode('utf-8')) tmp.close() output = _load_file_string_or_uri(tmp.name, 'test') self.assertEqual(get_file_json(tmp.name), output) uri = urljoin('file:', pathname2url(tmp.name)) output = _load_file_string_or_uri(uri, 'test') self.assertEqual(get_file_json(tmp.name), output) os.unlink(tmp.name) output = _load_file_string_or_uri(data, 'test') self.assertEqual(shell_safe_json_parse(data), output) self.assertEqual( None, _load_file_string_or_uri(None, 'test', required=False)) self.assertRaises(CLIError, _load_file_string_or_uri, None, 'test')
def create_volume(client, resource_group_name, name, location, template_file=None, template_uri=None): """Create a volume. """ volume_properties = None if template_uri: volume_properties = shell_safe_json_parse( _urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: volume_properties = get_file_json(template_file, preserve_order=True) volume_properties = json.loads(json.dumps(volume_properties)) else: raise CLIError( 'One of --template-file or --template-uri has to be specified') volume_properties['location'] = location return client.create(resource_group_name, name, volume_properties)
def validate_api_properties(ns): """ Extracts JSON format or 'a=b c=d' format as api properties """ api_properties = ns.api_properties if api_properties is None: return if len(api_properties) > 1: ns.api_properties = extract_key_values_pairs(api_properties) else: string = api_properties[0] try: ns.api_properties = shell_safe_json_parse(string) return except CLIError: result = extract_key_values_pairs([string]) if _is_suspected_json(string): logger.warning('Api properties looks like a JSON format but not valid, interpreted as key=value pairs:' ' %s', str(result)) ns.api_properties = result return
def add_properties(instance, argument_values, force_string): # The first argument indicates the path to the collection to add to. argument_values = list(argument_values) list_attribute_path = _get_internal_path(argument_values.pop(0)) list_to_add_to = _find_property(instance, list_attribute_path) if list_to_add_to is None: parent = _find_property(instance, list_attribute_path[:-1]) set_properties(parent, '{}=[]'.format(list_attribute_path[-1]), force_string) list_to_add_to = _find_property(instance, list_attribute_path) if not isinstance(list_to_add_to, list): raise ValueError dict_entry = {} for argument in argument_values: if '=' in argument: # consecutive key=value entries get added to the same dictionary split_arg = argument.split('=', 1) dict_entry[split_arg[0]] = split_arg[1] else: if dict_entry: # if an argument is supplied that is not key=value, append any dictionary entry # to the list and reset. A subsequent key=value pair will be added to another # dictionary. list_to_add_to.append(dict_entry) dict_entry = {} if not force_string: # attempt to convert anything else to JSON and fallback to string if error try: argument = shell_safe_json_parse(argument) except (ValueError, CLIError): pass list_to_add_to.append(argument) # if only key=value pairs used, must check at the end to append the dictionary if dict_entry: list_to_add_to.append(dict_entry)
def set_properties(instance, expression): key, value = _split_key_value_pair(expression) try: value = shell_safe_json_parse(value) except: # pylint:disable=bare-except pass # name should be the raw casing as it could refer to a property OR a dictionary key name, path = _get_name_path(key) parent_name = path[-1] if path else 'root' root = instance instance = _find_property(instance, path) if instance is None: parent = _find_property(root, path[:-1]) set_properties(parent, '{}={{}}'.format(parent_name)) instance = _find_property(root, path) match = index_or_filter_regex.match(name) index_value = int(match.group(1)) if match else None try: if index_value is not None: instance[index_value] = value elif isinstance(instance, dict): instance[name] = value elif isinstance(instance, list): throw_and_show_options(instance, name, key.split('.')) else: # must be a property name name = make_snake_case(name) if not hasattr(instance, name): logger.warning( "Property '%s' not found on %s. Update may be ignored.", name, parent_name) setattr(instance, name, value) except IndexError: raise CLIError('index {} doesn\'t exist on {}'.format( index_value, name)) except (AttributeError, KeyError, TypeError): throw_and_show_options(instance, name, key.split('.'))
def validate_api_properties(ns): """ Extracts JSON format or 'a=b c=d' format as api properties """ api_properties = ns.api_properties if api_properties is None: return if len(api_properties) > 1: ns.api_properties = extract_key_values_pairs(api_properties) else: string = api_properties[0] try: ns.api_properties = shell_safe_json_parse(string) return except CLIError: result = extract_key_values_pairs([string]) if _is_suspected_json(string): logger.warning( 'Api properties looks like a JSON format but not valid, interpreted as key=value pairs:' ' %s', str(result)) ns.api_properties = result return
def add_properties(instance, argument_values, force_string): # The first argument indicates the path to the collection to add to. argument_values = list(argument_values) list_attribute_path = _get_internal_path(argument_values.pop(0)) list_to_add_to = _find_property(instance, list_attribute_path) if list_to_add_to is None: parent = _find_property(instance, list_attribute_path[:-1]) set_properties(parent, '{}=[]'.format(list_attribute_path[-1]), force_string) list_to_add_to = _find_property(instance, list_attribute_path) if not isinstance(list_to_add_to, list): raise ValueError dict_entry = {} for argument in argument_values: if '=' in argument: # consecutive key=value entries get added to the same dictionary split_arg = argument.split('=', 1) dict_entry[split_arg[0]] = split_arg[1] else: if dict_entry: # if an argument is supplied that is not key=value, append any dictionary entry # to the list and reset. A subsequent key=value pair will be added to another # dictionary. list_to_add_to.append(dict_entry) dict_entry = {} if not force_string: # attempt to convert anything else to JSON and fallback to string if error try: argument = shell_safe_json_parse(argument) except (ValueError, CLIError): pass list_to_add_to.append(argument) # if only key=value pairs used, must check at the end to append the dictionary if dict_entry: list_to_add_to.append(dict_entry)
def create_blob_inventory_policy(cmd, client, resource_group_name, account_name, policy): # BlobInventoryPolicy = cmd.get_models('BlobInventoryPolicy') # TODO: add again with rule management if bandwidth is allowed # BlobInventoryPolicy, BlobInventoryPolicySchema, BlobInventoryPolicyRule, BlobInventoryPolicyDefinition, \ # BlobInventoryPolicyFilter = cmd.get_models('BlobInventoryPolicy', 'BlobInventoryPolicySchema', # 'BlobInventoryPolicyRule', 'BlobInventoryPolicyDefinition', # 'BlobInventoryPolicyFilter') # filters = BlobInventoryPolicyFilter(prefix_match=prefix_match, blob_types=blob_types, # include_blob_versions=include_blob_versions, # include_snapshots=include_snapshots) # rule = BlobInventoryPolicyRule(enabled=True, name=rule_name, # definition=BlobInventoryPolicyDefinition(filters=filters)) # policy = BlobInventoryPolicySchema(enabled=enabled, destination=destination, # type=type, rules=[rule]) # blob_inventory_policy = BlobInventoryPolicy(policy=policy) # # return client.create_or_update(resource_group_name=resource_group_name, account_name=account_name, # blob_inventory_policy_name=blob_inventory_policy_name, # properties=blob_inventory_policy, # **kwargs) if os.path.exists(policy): policy = get_file_json(policy) else: policy = shell_safe_json_parse(policy) BlobInventoryPolicy, InventoryRuleType, BlobInventoryPolicyName = \ cmd.get_models('BlobInventoryPolicy', 'InventoryRuleType', 'BlobInventoryPolicyName') properties = BlobInventoryPolicy() if 'type' not in policy: policy['type'] = InventoryRuleType.INVENTORY properties.policy = policy return client.create_or_update( resource_group_name=resource_group_name, account_name=account_name, blob_inventory_policy_name=BlobInventoryPolicyName.DEFAULT, properties=properties)
def update_policy_definition(policy_definition_name, rules=None, display_name=None, description=None): if rules is not None: if os.path.exists(rules): rules = get_file_json(rules) else: rules = shell_safe_json_parse(rules) policy_client = _resource_policy_client_factory() definition = policy_client.policy_definitions.get(policy_definition_name) # pylint: disable=line-too-long,no-member PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models') parameters = PolicyDefinition( policy_rule=rules if rules is not None else definition.policy_rule, description=description if description is not None else definition.description, display_name=display_name if display_name is not None else definition.display_name) return policy_client.policy_definitions.create_or_update( policy_definition_name, parameters)
def validate_mongo_role_definition_body(cmd, ns): """ Extracts role definition body """ from azext_cosmosdb_preview.vendored_sdks.azure_mgmt_cosmosdb.models import RoleDefinitionType from azure.cli.core.util import get_file_json, shell_safe_json_parse import os if ns.mongo_role_definition_body is not None: if os.path.exists(ns.mongo_role_definition_body): mongo_role_definition = get_file_json( ns.mongo_role_definition_body) else: mongo_role_definition = shell_safe_json_parse( ns.mongo_role_definition_body) if not isinstance(mongo_role_definition, dict): raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role definition. A valid dictionary JSON representation is expected.' ) if 'Id' not in mongo_role_definition or not isinstance( mongo_role_definition['Id'], str) or len( mongo_role_definition['Id']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role id. A valid string <DatabaseName>.<RoleName> is expected.' ) mongo_role_definition['Id'] = _parse_resource_path( mongo_role_definition['Id'], False, "mongodbRoleDefinitions") if 'RoleName' not in mongo_role_definition or not isinstance( mongo_role_definition['RoleName'], str) or len( mongo_role_definition['RoleName']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role name. A valid string role name is expected.' ) if 'DatabaseName' not in mongo_role_definition or not isinstance( mongo_role_definition['DatabaseName'], str) or len( mongo_role_definition['DatabaseName']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo database name. A valid string database name is expected.' ) if 'Privileges' not in mongo_role_definition or not isinstance( mongo_role_definition['Privileges'], list) or len( mongo_role_definition['Privileges']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role Privileges. A valid List JSON representation is expected.' ) else: for privilege in mongo_role_definition['Privileges']: if 'Resource' not in privilege or not isinstance( privilege['Resource'], dict): raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role Resources for Privileges. A valid dictionary JSON representation is expected.' ) else: if 'Db' not in privilege['Resource'] or not isinstance( privilege['Resource']['Db'], str): raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo database name under Privileges->Resoures. A valid string database name is expected.' ) if 'Collection' in privilege['Resource'] and not isinstance( privilege['Resource']['Collection'], str): raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo database Collection name under Privileges->Resoures. A valid string database name is expected.' ) if 'Actions' not in privilege or not isinstance( privilege['Actions'], list) or len( privilege['Actions']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo role Actions for Privileges. A valid list of strings is expected.' ) if 'Roles' in mongo_role_definition: if not isinstance(mongo_role_definition['Roles'], list): raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo Roles. A valid dictionary JSON representation is expected' ) else: for Role in mongo_role_definition['Roles']: if 'Role' not in Role or not isinstance( Role['Role'], str) or len(Role['Role']) == 0: raise InvalidArgumentValueError( 'Role creation failed. Invalid Mongo Role. A valid string Role is expected.' ) if 'Type' not in mongo_role_definition: mongo_role_definition['Type'] = RoleDefinitionType.custom_role ns.mongo_role_definition_body = mongo_role_definition
def _deploy_arm_template_core( cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, input_yaml_files=None, deployment_name=None, parameters=None, mode=None, validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk( cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse( _urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: template = get_file_json(template_file, preserve_order=True) template_obj = template else: # call merge utility file_path_list = [] prefix = "merged-" output_file_path = os.path.join(os.getcwd(), prefix + 'arm_rp.json') if os.path.isdir(input_yaml_files): for root, _, files in os.walk(input_yaml_files): for filename in files: if filename.endswith(".yaml"): file_path_list.append(os.path.join(root, filename)) else: file_path_list = input_yaml_files.split(',') if os.path.exists(output_file_path): os.remove(output_file_path) SFMergeUtility.sf_merge_utility(file_path_list, "SF_SBZ_RP_JSON", parameters=parameters, output_dir=None, prefix=prefix) parameters = None template = get_file_json(output_file_path, preserve_order=True) template_obj = template template_param_defs = template_obj.get('parameters', {}) template_obj['resources'] = template_obj.get('resources', []) template = json.loads(json.dumps(template)) if parameters is not None: parameters = _process_parameters(template_param_defs, parameters) or {} parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) # workaround properties.mode = 'incremental' smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) logger.warning("Deploying . . .") logger.warning("You can get the state of the deployment with the cmd") logger.warning( "az group deployment show --name {0} --resource-group {1}".format( deployment_name, resource_group_name)) if validate_only: return sdk_no_wait(no_wait, smc.deployments.validate, resource_group_name, deployment_name, properties) return sdk_no_wait(no_wait, smc.deployments.create_or_update, resource_group_name, deployment_name, properties)
def _deploy_arm_template_core( cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, deployment_name=None, parameters=None, mode=None, validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk( cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse( _urlretrieve(template_uri).decode('utf-8'), preserve_order=True) else: template = get_file_json(template_file, preserve_order=True) template_obj = template template_param_defs = template_obj.get('parameters', {}) template_obj['resources'] = template_obj.get('resources', []) parameters = _process_parameters(template_param_defs, parameters) or {} parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) template = json.loads(json.dumps(template)) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) # workaround properties.mode = 'incremental' smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) if validate_only: return sdk_no_wait(no_wait, smc.deployments.validate, resource_group_name, deployment_name, properties) validation = smc.deployments.validate(resource_group_name, deployment_name, properties) if validation.error: logger.warning("deployment template validation failed:") logger.warning(validation.error) else: operation_status_poller = sdk_no_wait(no_wait, smc.deployments.create_or_update, resource_group_name, deployment_name, properties) if no_wait: return operation_status_poller wait_time = 0 timestep = 5 while operation_status_poller.status() in ['Running', 'InProgress' ] and wait_time < 600: sleep(timestep) wait_time += timestep parsed_template = smc.deployments.validate( resource_group_name, deployment_name, properties).properties.additional_properties['validatedResources'] return _display_deployment_status(cli_ctx, operation_status_poller.status(), resource_group_name, deployment_name, parsed_template)
def _try_parse_json_object(value): try: parsed = shell_safe_json_parse(value) return parsed.get('parameters', parsed) except CLIError: return None
def create_management_policies(client, resource_group_name, account_name, policy): if os.path.exists(policy): policy = get_file_json(policy) else: policy = shell_safe_json_parse(policy) return client.create_or_update(resource_group_name, account_name, policy=policy)
def _try_parse_json_object(value): try: parsed = shell_safe_json_parse(value) return parsed.get('parameters', parsed) except CLIError: return None
def _deploy_arm_template_core( cmd, resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, input_yaml_files=None, deployment_name=None, parameters=None, mode=None, validate_only=False, no_wait=False): DeploymentProperties, TemplateLink, Deployment = get_sdk( cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', 'Deployment', mod='models') template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse( _urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: template = get_file_json(template_file, preserve_order=True) template_obj = template else: output_file_path = _invoke_mergeutil(input_yaml_files, parameters) parameters = None template = get_file_json(output_file_path, preserve_order=True) template_obj = template template_param_defs = template_obj.get('parameters', {}) template_obj['resources'] = template_obj.get('resources', []) template = json.loads(json.dumps(template)) if parameters is not None: parameters = _process_parameters(template_param_defs, parameters) or {} parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) # workaround properties.mode = 'incremental' smc = get_mgmt_service_client( cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES).deployments deployment = Deployment(properties=properties) logger.warning("Deploying . . .") logger.warning("You can get the state of the deployment with the cmd") logger.warning("az group deployment show --name %s --resource-group %s", deployment_name, resource_group_name) if validate_only: if cmd.supported_api_version( min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES): from azure.cli.core.commands import LongRunningOperation validation_poller = smc.begin_validate(resource_group_name, deployment_name, deployment) return LongRunningOperation(cmd.cli_ctx)(validation_poller) else: return sdk_no_wait(no_wait, smc.validate, resource_group_name, deployment_name, deployment) return sdk_no_wait(no_wait, smc.begin_create_or_update, resource_group_name, deployment_name, deployment)
def create_task(client, resource_group_name, service_name, project_name, task_name, source_platform, target_platform, task_type, source_connection_json, target_connection_json, database_options_json, enable_schema_validation=False, enable_data_integrity_validation=False, enable_query_analysis_validation=False): # Set inputs to lowercase task_type = task_type.lower() source_platform = source_platform.lower() target_platform = target_platform.lower() # Validation: Test scenario eligibility if not determine_scenario_eligibility(source_platform, target_platform, task_type): # If not an extension scenario, run CLI core method # TODO: We currently don't have any CLI core code to perform any validations # because of this we need to raise the error here. try: # CLI core doesnt currently support task types - it only supports offline migrations. # TODO: Remove this check after task types are supported if source_platform != "sql" or target_platform != "sqldb" or task_type != "offlinemigration": raise ValueError core_res = core_create_task( client, resource_group_name, service_name, project_name, task_name, source_connection_json, target_connection_json, database_options_json, enable_schema_validation, enable_data_integrity_validation, enable_query_analysis_validation) except: raise ValueError( "The provided source-platform, target-platform, and task-type \ combination is not appropriate. \n\ Please refer to the help file 'az dms project task create -h' for the supported scenarios." ) else: return core_res # Run extension scenario # Source connection info if os.path.exists(source_connection_json): source_connection_json = get_file_json(source_connection_json) else: source_connection_json = shell_safe_json_parse(source_connection_json) source_connection_info = create_connection(source_connection_json, "Source Database", source_platform) # Target connection info if os.path.exists(target_connection_json): target_connection_json = get_file_json(target_connection_json) else: target_connection_json = shell_safe_json_parse(target_connection_json) target_connection_info = create_connection(target_connection_json, "Target Database", target_platform) # Database options if os.path.exists(database_options_json): database_options_json = get_file_json(database_options_json) else: database_options_json = shell_safe_json_parse(database_options_json) # Get the task properties task_properties = get_task_migration_properties(database_options_json, source_platform, target_platform, task_type, source_connection_info, target_connection_info) return client.create_or_update(group_name=resource_group_name, service_name=service_name, project_name=project_name, task_name=task_name, properties=task_properties)
def process_key_release_policy(cmd, ns): default_cvm_policy = None if hasattr(ns, 'default_cvm_policy'): default_cvm_policy = ns.default_cvm_policy del ns.default_cvm_policy immutable = None if hasattr(ns, 'immutable'): immutable = ns.immutable del ns.immutable if not ns.release_policy and not default_cvm_policy: if immutable: raise InvalidArgumentValueError('Please provide policy when setting `--immutable`') return if ns.release_policy and default_cvm_policy: raise InvalidArgumentValueError('Can not specify both `--policy` and `--default-cvm-policy`') import json KeyReleasePolicy = cmd.loader.get_sdk('KeyReleasePolicy', mod='_models', resource_type=ResourceType.DATA_KEYVAULT_KEYS) if default_cvm_policy: policy = { 'version': '1.0.0', 'anyOf': [ { 'authority': 'https://sharedeus.eus.attest.azure.net/', 'allOf': [ { 'claim': 'x-ms-attestation-type', 'equals': 'sevsnpvm' }, { 'claim': 'x-ms-compliance-status', 'equals': 'azure-compliant-cvm' } ] }, { 'authority': 'https://sharedwus.wus.attest.azure.net/', 'allOf': [ { 'claim': 'x-ms-attestation-type', 'equals': 'sevsnpvm' }, { 'claim': 'x-ms-compliance-status', 'equals': 'azure-compliant-cvm' } ] }, { 'authority': 'https://sharedneu.neu.attest.azure.net/', 'allOf': [ { 'claim': 'x-ms-attestation-type', 'equals': 'sevsnpvm' }, { 'claim': 'x-ms-compliance-status', 'equals': 'azure-compliant-cvm' } ] }, { 'authority': 'https://sharedweu.weu.attest.azure.net/', 'allOf': [ { 'claim': 'x-ms-attestation-type', 'equals': 'sevsnpvm' }, { 'claim': 'x-ms-compliance-status', 'equals': 'azure-compliant-cvm' } ] } ] } ns.release_policy = KeyReleasePolicy(encoded_policy=json.dumps(policy).encode('utf-8'), immutable=immutable) return import os if os.path.exists(ns.release_policy): data = get_file_json(ns.release_policy) else: data = shell_safe_json_parse(ns.release_policy) ns.release_policy = KeyReleasePolicy(encoded_policy=json.dumps(data).encode('utf-8'), immutable=immutable)
def map_keyvalue_to_featureflagvalue(keyvalue): ''' Helper Function to convert value string to a valid FeatureFlagValue. Throws Exception if value is an invalid JSON. Args: keyvalue - KeyValue object Return: Valid FeatureFlagValue object ''' try: # Make sure value string is a valid json feature_flag_dict = shell_safe_json_parse(keyvalue.value) feature_name = keyvalue.key[len(FeatureFlagConstants. FEATURE_FLAG_PREFIX):] # Make sure value json has all the fields we support in the backend valid_fields = {'id', 'description', 'enabled', 'conditions'} if valid_fields != feature_flag_dict.keys(): logger.debug( "'%s' feature flag is missing required values or it contains ", feature_name + "unsupported values. Setting missing value to defaults and ignoring unsupported values\n" ) conditions = feature_flag_dict.get('conditions', None) if conditions: client_filters = conditions.get('client_filters', []) # Convert all filters to FeatureFilter objects client_filters_list = [] for client_filter in client_filters: # If there is a filter, it should always have a name # In case it doesn't, ignore this filter name = client_filter.get('name') if name: params = client_filter.get('parameters', {}) client_filters_list.append(FeatureFilter(name, params)) else: logger.warning( "Ignoring this filter without the 'name' attribute:\n%s", json.dumps(client_filter, indent=2, ensure_ascii=False)) conditions['client_filters'] = client_filters_list feature_flag_value = FeatureFlagValue( id_=feature_name, description=feature_flag_dict.get('description', ''), enabled=feature_flag_dict.get('enabled', False), conditions=conditions) except ValueError as exception: error_msg = "Invalid value. Unable to decode the following JSON value: \n" +\ "{0}\nFull exception: \n{1}".format(keyvalue.value, str(exception)) raise ValueError(error_msg) except: logger.debug("Exception while parsing value:\n%s\n", keyvalue.value) raise return feature_flag_value
def get_file_or_parse_json(value): if os.path.exists(value): return get_file_json(value) return shell_safe_json_parse(value)
def validate_mongo_user_definition_body(cmd, ns): """ Extracts user definition body """ from azure.cli.core.util import get_file_json, shell_safe_json_parse import os if ns.mongo_user_definition_body is not None: if os.path.exists(ns.mongo_user_definition_body): mongo_user_definition = get_file_json( ns.mongo_user_definition_body) else: mongo_user_definition = shell_safe_json_parse( ns.mongo_user_definition_body) if not isinstance(mongo_user_definition, dict): raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo user definition. A valid dictionary JSON representation is expected.' ) if 'Id' not in mongo_user_definition or not isinstance( mongo_user_definition['Id'], str) or len( mongo_user_definition['Id']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo User ID. A valid string of <DatabaseName>.<Username> is expected.' ) mongo_user_definition['Id'] = _parse_resource_path( mongo_user_definition['Id'], False, "mongodbUserDefinitions") if 'UserName' not in mongo_user_definition or not isinstance( mongo_user_definition['UserName'], str) or len( mongo_user_definition['UserName']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo User definition user name. A valid string user name is expected.' ) if 'Password' not in mongo_user_definition or not isinstance( mongo_user_definition['Password'], str) or len( mongo_user_definition['Password']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo User definition password. A valid string password is expected.' ) if 'DatabaseName' not in mongo_user_definition or not isinstance( mongo_user_definition['DatabaseName'], str) or len( mongo_user_definition['DatabaseName']) == 0: raise InvalidArgumentValueError( 'User creation failed. User creation failed. Invalid Mongo database name. A valid string database name is expected.' ) if 'CustomData' in mongo_user_definition and not isinstance( mongo_user_definition['CustomData'], str): raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo Custom Data parameter. A valid string custom data is expected.' ) if 'Mechanisms' in mongo_user_definition and not isinstance( mongo_user_definition['Mechanisms'], str) or len( mongo_user_definition['Mechanisms']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo Mechanisms parameter. A valid string Mechanisms is expected.' ) if 'Roles' in mongo_user_definition: if not isinstance(mongo_user_definition['Roles'], list) or len( mongo_user_definition['Roles']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo Roles. A valid dictionary JSON representation is expected' ) else: for Role in mongo_user_definition['Roles']: if 'Role' not in Role or not isinstance( Role['Role'], str) or len(Role['Role']) == 0: raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo Role. A valid string Role is expected.' ) if 'Db' in Role and not isinstance(Role['Db'], str): raise InvalidArgumentValueError( 'User creation failed. Invalid Mongo Db. A valid string database name is expected.' ) ns.mongo_user_definition_body = mongo_user_definition
def _deploy_arm_template_core( cli_ctx, resource_group_name, # pylint: disable=too-many-arguments template_file=None, template_uri=None, input_yaml_files=None, deployment_name=None, parameters=None, mode=None, validate_only=False, no_wait=False): DeploymentProperties, TemplateLink = get_sdk( cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES, 'DeploymentProperties', 'TemplateLink', mod='models') template = None template_link = None template_obj = None if template_uri: template_link = TemplateLink(uri=template_uri) template_obj = shell_safe_json_parse( _urlretrieve(template_uri).decode('utf-8'), preserve_order=True) elif template_file: template = get_file_json(template_file, preserve_order=True) template_obj = template else: output_file_path = _invoke_mergeutil(input_yaml_files, parameters) parameters = None template = get_file_json(output_file_path, preserve_order=True) template_obj = template template_param_defs = template_obj.get('parameters', {}) template_obj['resources'] = template_obj.get('resources', []) template = json.loads(json.dumps(template)) if parameters is not None: parameters = _process_parameters(template_param_defs, parameters) or {} parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters) parameters = json.loads(json.dumps(parameters)) properties = DeploymentProperties(template=template, template_link=template_link, parameters=parameters, mode=mode) # workaround properties.mode = 'incremental' smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES) logger.warning("Deploying . . .") logger.warning("You can get the state of the deployment with the cmd") logger.warning( "az group deployment show --name {0} --resource-group {1}".format( deployment_name, resource_group_name)) if validate_only: return sdk_no_wait(no_wait, smc.deployments.validate, resource_group_name, deployment_name, properties) return sdk_no_wait(no_wait, smc.deployments.create_or_update, resource_group_name, deployment_name, properties)
def _prompt_for_parameters(missing_parameters, fail_on_no_tty=True): # pylint: disable=too-many-statements prompt_list = missing_parameters.keys() if isinstance(missing_parameters, OrderedDict) \ else sorted(missing_parameters) result = OrderedDict() no_tty = False for param_name in prompt_list: param = missing_parameters[param_name] param_type = param.get('type', 'string') description = 'Missing description' metadata = param.get('metadata', None) if metadata is not None: description = metadata.get('description', description) allowed_values = param.get('allowedValues', None) prompt_str = "Please provide {} value for '{}' (? for help): ".format( param_type, param_name) while True: if allowed_values is not None: try: ix = prompt_choice_list(prompt_str, allowed_values, help_string=description) result[param_name] = allowed_values[ix] except NoTTYException: result[param_name] = None no_tty = True break elif param_type == 'securestring': try: value = prompt_pass(prompt_str, help_string=description) except NoTTYException: value = None no_tty = True result[param_name] = value break elif param_type == 'int': try: int_value = prompt_int(prompt_str, help_string=description) result[param_name] = int_value except NoTTYException: result[param_name] = 0 no_tty = True break elif param_type == 'bool': try: value = prompt_t_f(prompt_str, help_string=description) result[param_name] = value except NoTTYException: result[param_name] = False no_tty = True break elif param_type in ['object', 'array']: try: value = prompt(prompt_str, help_string=description) except NoTTYException: value = '' no_tty = True if value == '': value = {} if param_type == 'object' else [] else: try: value = shell_safe_json_parse(value) except Exception as ex: # pylint: disable=broad-except logger.error(ex) continue result[param_name] = value break else: try: result[param_name] = prompt(prompt_str, help_string=description) except NoTTYException: result[param_name] = None no_tty = True break if no_tty and fail_on_no_tty: raise NoTTYException return result
def _prompt_for_parameters(missing_parameters, fail_on_no_tty=True): # pylint: disable=too-many-statements prompt_list = missing_parameters.keys() if isinstance(missing_parameters, OrderedDict) \ else sorted(missing_parameters) result = OrderedDict() no_tty = False for param_name in prompt_list: param = missing_parameters[param_name] param_type = param.get('type', 'string') description = 'Missing description' metadata = param.get('metadata', None) if metadata is not None: description = metadata.get('description', description) allowed_values = param.get('allowedValues', None) prompt_str = "Please provide {} value for '{}' (? for help): ".format(param_type, param_name) while True: if allowed_values is not None: try: ix = prompt_choice_list(prompt_str, allowed_values, help_string=description) result[param_name] = allowed_values[ix] except NoTTYException: result[param_name] = None no_tty = True break elif param_type == 'securestring': try: value = prompt_pass(prompt_str, help_string=description) except NoTTYException: value = None no_tty = True result[param_name] = value break elif param_type == 'int': try: int_value = prompt_int(prompt_str, help_string=description) result[param_name] = int_value except NoTTYException: result[param_name] = 0 no_tty = True break elif param_type == 'bool': try: value = prompt_t_f(prompt_str, help_string=description) result[param_name] = value except NoTTYException: result[param_name] = False no_tty = True break elif param_type in ['object', 'array']: try: value = prompt(prompt_str, help_string=description) except NoTTYException: value = '' no_tty = True if value == '': value = {} if param_type == 'object' else [] else: try: value = shell_safe_json_parse(value) except Exception as ex: # pylint: disable=broad-except logger.error(ex) continue result[param_name] = value break else: try: result[param_name] = prompt(prompt_str, help_string=description) except NoTTYException: result[param_name] = None no_tty = True break if no_tty and fail_on_no_tty: raise NoTTYException return result