Example #1
0
def validate_json_file(namespace):
    """Validate the give json file existing"""
    if namespace.json_file:
        try:
            get_file_json(namespace.json_file)
        except EnvironmentError:
            raise ValueError("Cannot access JSON request file: " + namespace.json_file)
        except ValueError as err:
            raise ValueError("Invalid JSON file: {}".format(err))
Example #2
0
def validate_json_file(namespace):
    """Validate the give json file existing"""
    if namespace.json_file:
        try:
            get_file_json(namespace.json_file)
        except EnvironmentError:
            raise ValueError("Cannot access JSON request file: " + namespace.json_file)
        except ValueError as err:
            raise ValueError("Invalid JSON file: {}".format(err))
Example #3
0
def create_task(client,
                resource_group_name,
                service_name,
                project_name,
                task_name,
                source_platform,
                target_platform,
                source_connection_json,
                target_connection_json,
                database_options_json,
                enable_schema_validation=False,
                enable_data_integrity_validation=False,
                enable_query_analysis_validation=False):

    # Validation: Test scenario eligibility
    if not determine_scenario_eligibility(source_platform, target_platform):
        raise ValueError(
            'The provided source-platform and target-platform combination is not appropriate. \n\
           The only supported scenarios are: \n\
               1) Sql -> SqlDb \n \
               2) PostgreSql -> AzureDbForPostgreSql \n\
               3) MySql -> AzureDbForMySql')

    if os.path.exists(source_connection_json):
        source_connection_json = get_file_json(source_connection_json)
    else:
        source_connection_json = shell_safe_json_parse(source_connection_json)
    source_connection_info = create_connection(source_connection_json,
                                               "Source Database",
                                               source_platform)

    if os.path.exists(target_connection_json):
        target_connection_json = get_file_json(target_connection_json)
    else:
        target_connection_json = shell_safe_json_parse(target_connection_json)
    target_connection_info = create_connection(target_connection_json,
                                               "Target Database",
                                               target_platform)

    if os.path.exists(database_options_json):
        database_options_json = get_file_json(database_options_json)
    else:
        database_options_json = shell_safe_json_parse(database_options_json)

    task_properties = get_task_migration_properties(
        database_options_json, source_platform, target_platform,
        source_connection_info, target_connection_info,
        enable_schema_validation, enable_data_integrity_validation,
        enable_query_analysis_validation)

    return client.create_or_update(group_name=resource_group_name,
                                   service_name=service_name,
                                   project_name=project_name,
                                   task_name=task_name,
                                   properties=task_properties)
Example #4
0
def create_task(client,
                resource_group_name,
                service_name,
                project_name,
                task_name,
                source_connection_json,
                target_connection_json,
                database_options_json,
                enable_schema_validation=False,
                enable_data_integrity_validation=False,
                enable_query_analysis_validation=False):
    if os.path.exists(source_connection_json):
        source_connection_json = get_file_json(source_connection_json)
    else:
        source_connection_json = shell_safe_json_parse(source_connection_json)

    source_connection_info = create_sql_connection_info(source_connection_json, 'Source Database ')

    if os.path.exists(target_connection_json):
        target_connection_json = get_file_json(target_connection_json)
    else:
        target_connection_json = shell_safe_json_parse(target_connection_json)

    target_connection_info = create_sql_connection_info(target_connection_json, 'Target Database ')

    if os.path.exists(database_options_json):
        database_options_json = get_file_json(database_options_json)
    else:
        database_options_json = shell_safe_json_parse(database_options_json)

    database_options = []
    for d in database_options_json:
        database_options.append(
            MigrateSqlServerSqlDbDatabaseInput(name=d.get('name', None),
                                               target_database_name=d.get('target_database_name', None),
                                               make_source_db_read_only=d.get('make_source_db_read_only', None),
                                               table_map=d.get('table_map', None)))

    validation_options = MigrationValidationOptions(enable_schema_validation=enable_schema_validation,
                                                    enable_data_integrity_validation=enable_data_integrity_validation,
                                                    enable_query_analysis_validation=enable_query_analysis_validation)

    task_input = MigrateSqlServerSqlDbTaskInput(source_connection_info=source_connection_info,
                                                target_connection_info=target_connection_info,
                                                selected_databases=database_options,
                                                validation_options=validation_options)

    migration_properties = MigrateSqlServerSqlDbTaskProperties(input=task_input)

    return client.create_or_update(group_name=resource_group_name,
                                   service_name=service_name,
                                   project_name=project_name,
                                   task_name=task_name,
                                   properties=migration_properties)
Example #5
0
def create_service(cmd,
                   client,
                   resource_group_name,
                   cluster_name,
                   application_name,
                   service_name,
                   service_type,
                   state,
                   instance_count=None,
                   target_replica_set_size=None,
                   min_replica_set_size=None,
                   default_move_cost=None,
                   partition_scheme='singleton'):
    parameter_file, template_file = _get_template_file_and_parameters_file()
    template = get_file_json(template_file)
    parameters = get_file_json(parameter_file)['parameters']

    # set params
    _set_parameters(parameters, "clusterName", cluster_name)
    _set_parameters(parameters, "applicationName", application_name)
    _set_parameters(parameters, "serviceName", service_name)

    _set_service_parameters(template, parameters, "serviceTypeName",
                            service_type, "string")

    if partition_scheme == 'singleton':
        _set_service_parameters(template, parameters, "partitionDescription",
                                {"partitionScheme": "Singleton"}, "object")
    elif partition_scheme == 'uniformInt64':
        _set_service_parameters(template, parameters, "partitionDescription",
                                {"partitionScheme": "UniformInt64Range"},
                                "object")
    elif partition_scheme == 'named':
        _set_service_parameters(template, parameters, "partitionDescription",
                                {"partitionScheme": "Named"}, "object")

    if state == 'stateless':
        _set_service_parameters(template, parameters, "instanceCount",
                                int(instance_count), "int")
    else:
        _set_service_parameters(template, parameters, "targetReplicaSetSize",
                                int(target_replica_set_size), "int")
        _set_service_parameters(template, parameters, "minReplicaSetSize",
                                int(min_replica_set_size), "int")

    if default_move_cost:
        _set_service_parameters(template, parameters, "defaultMoveCost",
                                default_move_cost, "string")

    validate_and_deploy_arm_template(cmd, resource_group_name, template,
                                     parameters)

    return client.services.get(resource_group_name, cluster_name,
                               application_name, service_name)
Example #6
0
    def generate_name(cmd):
        import uuid
        from random import choice

        noun = choice(get_file_json(GENERATE_RANDOM_APP_NAMES)['APP_NAME_NOUNS'])
        adjective = choice(get_file_json(GENERATE_RANDOM_APP_NAMES)['APP_NAME_ADJECTIVES'])
        random_uuid = str(uuid.uuid4().hex)

        name = '{}-{}-{}'.format(adjective, noun, random_uuid)
        name_available = get_site_availability(cmd, name).name_available

        if name_available:
            return name
        return ""
def create_scaler(nodes):
    dir_path = os.path.dirname(os.path.realpath(__file__))
    template = get_file_json(os.path.join(dir_path, './data/azuredeploy.cluster.json'))
    parameters = get_file_json(os.path.join(dir_path, './data/azuredeploy.cluster.parameters.json'))
    return EngineScaler( 
        resource_group='my-rg',
        nodes=nodes,            
        deployments=None,
        over_provision=0,
        spare_count=1,
        arm_parameters=parameters,
        arm_template=template,
        ignore_pools='',
        idle_threshold=0,
        notifier='')
Example #8
0
def get_arm_template(local_file_path, url):
    if local_file_path:
        return get_file_json(local_file_path)
        
    with urllib.request.urlopen(url) as response:
        raw = response.read()
        return json.loads(raw)
def create_management_policies(client, resource_group_name, account_name, policy=None):
    if policy:
        if os.path.exists(policy):
            policy = get_file_json(policy)
        else:
            policy = shell_safe_json_parse(policy)
    return client.create_or_update_management_policies(resource_group_name, account_name, policy=policy)
    def test_deployment_missing_values(self):

        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir,
                                     'param-validation-template.json').replace(
                                         '\\', '\\\\')
        parameters_path = os.path.join(curr_dir,
                                       'param-validation-params.json').replace(
                                           '\\', '\\\\')
        parameters_with_reference_path = os.path.join(
            curr_dir,
            'param-validation-ref-params.json').replace('\\', '\\\\')

        template = get_file_json(template_path, preserve_order=True)
        template_param_defs = template.get('parameters', {})

        parameter_list = [[parameters_path], [parameters_with_reference_path]]
        result_parameters = _process_parameters(template_param_defs,
                                                parameter_list)
        missing_parameters = _find_missing_parameters(result_parameters,
                                                      template)

        # ensure that parameters with default values are not considered missing
        params_with_defaults = [
            x for x in template_param_defs
            if 'defaultValue' in template_param_defs[x]
        ]
        for item in params_with_defaults:
            self.assertTrue(item not in missing_parameters)

        # ensure that a parameter that specifies a reference does not prompt
        self.assertTrue('secretReference' not in missing_parameters)
        self.assertTrue('secretReference' in result_parameters)
Example #11
0
def deploy_arm_template(
        cli_ctx,
        resource_group_name,  # pylint: disable=too-many-arguments
        template_file=None,
        deployment_name=None,
        parameters=None,
        mode=None):
    DeploymentProperties, _ = get_sdk(cli_ctx,
                                      ResourceType.MGMT_RESOURCE_RESOURCES,
                                      'DeploymentProperties',
                                      'TemplateLink',
                                      mod='models')
    template = None
    template = get_file_json(template_file, preserve_order=True)
    template_obj = template

    template_obj['resources'] = template_obj.get('resources', [])
    parameters = _process_parameters(parameters) or {}

    import json
    template = json.loads(json.dumps(template))
    parameters = json.loads(json.dumps(parameters))

    properties = DeploymentProperties(template=template,
                                      template_link=None,
                                      parameters=parameters,
                                      mode=mode)

    smc = get_mgmt_service_client(cli_ctx,
                                  ResourceType.MGMT_RESOURCE_RESOURCES)
    return smc.deployments.create_or_update(resource_group_name,
                                            deployment_name,
                                            properties,
                                            raw=False)
    def get_http_proxy_config(self) -> Union[dict, ManagedClusterHTTPProxyConfig, None]:
        """Obtain the value of http_proxy_config.

        :return: dict, ManagedClusterHTTPProxyConfig or None
        """
        # read the original value passed by the command
        http_proxy_config = None
        http_proxy_config_file_path = self.raw_param.get("http_proxy_config")
        # validate user input
        if http_proxy_config_file_path:
            if not os.path.isfile(http_proxy_config_file_path):
                raise InvalidArgumentValueError(
                    "{} is not valid file, or not accessable.".format(
                        http_proxy_config_file_path
                    )
                )
            http_proxy_config = get_file_json(http_proxy_config_file_path)
            if not isinstance(http_proxy_config, dict):
                raise InvalidArgumentValueError(
                    "Error reading Http Proxy Config from {}. "
                    "Please see https://aka.ms/HttpProxyConfig for correct format.".format(
                        http_proxy_config_file_path
                    )
                )

        # try to read the property value corresponding to the parameter from the `mc` object
        if self.mc and self.mc.http_proxy_config is not None:
            http_proxy_config = self.mc.http_proxy_config

        # this parameter does not need dynamic completion
        # this parameter does not need validation
        return http_proxy_config
    def test_deployment_prompt_alphabetical_order(self):
        # check that params are prompted for in alphabetical order when the file is loaded with preserve_order=False
        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir,
                                     'param-validation-template.json').replace(
                                         '\\', '\\\\')
        parameters_path = os.path.join(curr_dir,
                                       'param-validation-params.json').replace(
                                           '\\', '\\\\')
        parameters_with_reference_path = os.path.join(
            curr_dir,
            'param-validation-ref-params.json').replace('\\', '\\\\')

        template = get_file_json(template_path, preserve_order=False)
        template_param_defs = template.get('parameters', {})

        parameter_list = [[parameters_path], [parameters_with_reference_path]]
        result_parameters = _process_parameters(template_param_defs,
                                                parameter_list)
        missing_parameters = _find_missing_parameters(result_parameters,
                                                      template)

        param_alpha_order = [
            "[u'arrayParam', u'boolParam', u'enumParam', u'objectParam', u'secureParam']",
            "['arrayParam', 'boolParam', 'enumParam', 'objectParam', 'secureParam']"
        ]
        results = _prompt_for_parameters(dict(missing_parameters),
                                         fail_on_no_tty=False)
        self.assertTrue(str(list(results.keys())) in param_alpha_order)
Example #14
0
def _deploy_arm_template_core(resource_group_name,  # pylint: disable=too-many-arguments
                              template_file=None, template_uri=None, deployment_name=None,
                              parameters=None, mode='incremental', validate_only=False,
                              no_wait=False):
    DeploymentProperties, TemplateLink = get_sdk(ResourceType.MGMT_RESOURCE_RESOURCES,
                                                 'DeploymentProperties',
                                                 'TemplateLink',
                                                 mod='models')
    parameters = parameters or {}
    template = None
    template_link = None
    template_obj = None
    if template_uri:
        template_link = TemplateLink(uri=template_uri)
        template_obj = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8'))
    else:
        template = get_file_json(template_file)
        template_obj = template

    parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters)

    properties = DeploymentProperties(template=template, template_link=template_link,
                                      parameters=parameters, mode=mode)

    smc = get_mgmt_service_client(ResourceType.MGMT_RESOURCE_RESOURCES)
    if validate_only:
        return smc.deployments.validate(resource_group_name, deployment_name, properties, raw=no_wait)
    return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=no_wait)
Example #15
0
def _deploy_arm_template_core(resource_group_name, template_file=None, template_uri=None,
                              deployment_name=None, parameter_list=None, mode='incremental',
                              validate_only=False, no_wait=False):
    from azure.mgmt.resource.resources.models import DeploymentProperties, TemplateLink

    if bool(template_uri) == bool(template_file):
        raise CLIError('please provide either template file path or uri, but not both')

    parameters = _merge_parameters(parameter_list)

    template = None
    template_link = None
    if template_uri:
        template_link = TemplateLink(uri=template_uri)
    else:
        template = get_file_json(template_file)

    missing = _find_missing_parameters(parameters, template)
    if len(missing) > 0:
        prompt_parameters = _prompt_for_parameters(missing)
        for param_name in prompt_parameters:
            parameters[param_name] = prompt_parameters[param_name]

    properties = DeploymentProperties(template=template, template_link=template_link,
                                      parameters=parameters, mode=mode)

    smc = get_mgmt_service_client(ResourceManagementClient)
    if validate_only:
        return smc.deployments.validate(resource_group_name, deployment_name,
                                        properties, raw=no_wait)
    else:
        return smc.deployments.create_or_update(resource_group_name, deployment_name,
                                                properties, raw=no_wait)
Example #16
0
def _deploy_arm_template_core(resource_group_name, template_file=None, template_uri=None, deployment_name=None,
                              parameter_list=None, mode='incremental', validate_only=False, no_wait=False):
    DeploymentProperties, TemplateLink = get_sdk(ResourceType.MGMT_RESOURCE_RESOURCES,
                                                 'DeploymentProperties',
                                                 'TemplateLink',
                                                 mod='models')

    if bool(template_uri) == bool(template_file):
        raise CLIError('please provide either template file path or uri, but not both')

    parameters = _merge_parameters(parameter_list)
    if parameters is None:
        parameters = {}
    template = None
    template_link = None
    if template_uri:
        template_link = TemplateLink(uri=template_uri)
    else:
        template = get_file_json(template_file)

    parameters = _get_missing_parameters(parameters, template, _prompt_for_parameters)

    properties = DeploymentProperties(template=template, template_link=template_link,
                                      parameters=parameters, mode=mode)

    smc = get_mgmt_service_client(ResourceType.MGMT_RESOURCE_RESOURCES)
    if validate_only:
        return smc.deployments.validate(resource_group_name, deployment_name, properties, raw=no_wait)
    return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=no_wait)
Example #17
0
def create_role_definition(role_definition):
    role_id = uuid.uuid4()
    if os.path.exists(role_definition):
        role_definition = get_file_json(role_definition)
    else:
        role_definition = shell_safe_json_parse(role_definition)

    # to workaround service defects, ensure property names are camel case
    names = [p for p in role_definition if p[:1].isupper()]
    for n in names:
        new_name = n[:1].lower() + n[1:]
        role_definition[new_name] = role_definition.pop(n)

    if 'name' not in role_definition:
        raise CLIError("please provide 'name'")
    if 'assignableScopes' not in role_definition:
        raise CLIError("please provide 'assignableScopes'")

    permission = Permission(actions=role_definition.get('actions', None),
                            not_actions=role_definition.get(
                                'notActions', None))
    properties = RoleDefinitionProperties(
        role_name=role_definition['name'],
        description=role_definition.get('description', None),
        type=_CUSTOM_RULE,
        assignable_scopes=role_definition['assignableScopes'],
        permissions=[permission])

    definition = RoleDefinition(name=role_id, properties=properties)

    definitions_client = _auth_client_factory().role_definitions
    return definitions_client.create_or_update(
        role_definition_id=role_id,
        scope=properties.assignable_scopes[0],
        role_definition=definition)
def _get_maintenance_config(cmd, file_path):
    # get models
    MaintenanceConfiguration = cmd.get_models('MaintenanceConfiguration', resource_type=CUSTOM_MGMT_AKS_PREVIEW, operation_group='maintenance_configurations')
    TimeInWeek = cmd.get_models('TimeInWeek', resource_type=CUSTOM_MGMT_AKS_PREVIEW, operation_group='maintenance_configurations')
    TimeSpan = cmd.get_models('TimeSpan', resource_type=CUSTOM_MGMT_AKS_PREVIEW, operation_group='maintenance_configurations')

    maintenance_config = get_file_json(file_path)
    logger.info(maintenance_config)
    if not isinstance(maintenance_config, dict):
        raise CLIError("Error reading maintenance configuration at {}.".format(file_path))
    time_in_week = maintenance_config["timeInWeek"]
    not_allowed_time = maintenance_config["notAllowedTime"]
    week_schedule = []
    if time_in_week is not None:
        for item in time_in_week:
            w = TimeInWeek(**item)
            logger.info('day: %s, time slots: %s ', w.day, w.hour_slots)
            week_schedule.append(w)
    not_allowed = []
    if not_allowed_time is not None:
        for item in not_allowed_time:
            t = TimeSpan(**item)
            logger.info('start: %s, end: %s ', t.start, t.end)
            not_allowed.append(t)
    result = MaintenanceConfiguration()
    result.time_in_week = week_schedule
    result.not_allowed_time = not_allowed
    return result
Example #19
0
 def parse(self, namespace):
     """Parse all arguments in the namespace to validate whether all required
     arguments have been set.
     :param namespace: The namespace object.
     :raises: ValueError if a require argument was not set.
     """
     if self._custom_validator:
         try:
             self._custom_validator(namespace, self)
         except TypeError:
             raise ValueError("Custom validator must be a function that takes two arguments.")
     try:
         if namespace.json_file:
             try:
                 namespace.json_file = get_file_json(namespace.json_file)
             except EnvironmentError:
                 raise ValueError("Cannot access JSON request file: " + namespace.json_file)
             except ValueError as err:
                 raise ValueError("Invalid JSON file: {}".format(err))
             other_values = [arg_name(n) for n in self._arg_tree if getattr(namespace, n)]
             if other_values:
                 message = "--json-file cannot be combined with:\n"
                 raise ValueError(message + '\n'.join(other_values))
             self.done = True
             return
     except AttributeError:
         pass
     required_args = self._parse(namespace, self._request_param['name'], True)
     missing_args = [n for n in required_args if not getattr(namespace, n)]
     if missing_args:
         message = "The following additional arguments are required:\n"
         message += "\n".join([arg_name(m) for m in missing_args])
         raise ValueError(message)
     self.done = True
Example #20
0
def create_management_policies(client, resource_group_name, account_name, policy=None):
    if policy:
        if os.path.exists(policy):
            policy = get_file_json(policy)
        else:
            policy = shell_safe_json_parse(policy)
    return client.create_or_update(resource_group_name, account_name, policy=policy)
 def _load_public_certificate_file(self,
                                   client,
                                   resource_group,
                                   service,
                                   loaded_public_certificate_file=None,
                                   **_):
     if not loaded_public_certificate_file:
         return
     data = get_file_json(loaded_public_certificate_file)
     if not data:
         return
     if not data.get('loadedCertificates'):
         raise FileOperationError(
             "loadedCertificates must be provided in the json file")
     loaded_certificates = []
     for item in data['loadedCertificates']:
         if not item.get('certificateName') or not item.get(
                 'loadTrustStore'):
             raise FileOperationError(
                 "certificateName, loadTrustStore must be provided in the json file"
             )
         certificate_resource = client.certificates.get(
             resource_group, service, item['certificateName'])
         loaded_certificates.append(
             models.LoadedCertificate(
                 resource_id=certificate_resource.id,
                 load_trust_store=item['loadTrustStore']))
     return loaded_certificates
Example #22
0
def getMaintenanceConfiguration(cmd, config_file, weekday, start_hour):
    if config_file is not None and weekday is not None:
        raise CLIError('either config-file or weekday can be supplied.')
    if weekday is None and start_hour is not None:
        raise CLIError(
            'if maintenance-start-hour is supplied, maintenance-weekday must be supplied too.'
        )
    # get models
    MaintenanceConfiguration = cmd.get_models(
        'MaintenanceConfiguration',
        resource_type=CUSTOM_MGMT_AKS_PREVIEW,
        operation_group='maintenance_configurations')
    TimeInWeek = cmd.get_models('TimeInWeek',
                                resource_type=CUSTOM_MGMT_AKS_PREVIEW,
                                operation_group='maintenance_configurations')

    if weekday is not None:
        dict = {}
        dict["day"] = weekday
        if start_hour is not None:
            dict["hour_slots"] = [start_hour]
        timeInWeek = TimeInWeek(**dict)
        result = MaintenanceConfiguration()
        result.time_in_week = [timeInWeek]
        result.not_allowed_time = []
        return result

    maintenance_config = get_file_json(config_file)
    logger.info(maintenance_config)
    return maintenance_config
Example #23
0
def arm_deploy_template_managed_storage(resource_group_name,
                                        registry_name,
                                        location,
                                        sku,
                                        admin_user_enabled,
                                        deployment_name=None):
    """Deploys ARM template to create a container registry with managed storage account.
    :param str resource_group_name: The name of resource group
    :param str registry_name: The name of container registry
    :param str location: The name of location
    :param str sku: The SKU of the container registry
    :param bool admin_user_enabled: Enable admin user
    :param str deployment_name: The name of the deployment
    """
    from azure.mgmt.resource.resources.models import DeploymentProperties
    from azure.cli.core.util import get_file_json
    import os

    parameters = _parameters(
        registry_name=registry_name,
        location=location,
        sku=sku,
        admin_user_enabled=admin_user_enabled)

    file_path = os.path.join(os.path.dirname(__file__), 'template.json')
    template = get_file_json(file_path)
    properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')

    return _arm_deploy_template(
        get_arm_service_client().deployments, resource_group_name, deployment_name, properties)
Example #24
0
def data_collection_rules_create(client,
                                 resource_group_name,
                                 data_collection_rule_name,
                                 rule_file,
                                 location=None,
                                 tags=None,
                                 description=None):
    from azure.cli.core.util import get_file_json
    from azure.cli.core.azclierror import FileOperationError, UnclassifiedUserFault
    body = {}
    body['location'] = location
    body['tags'] = tags
    body['description'] = description
    try:
        json_data = get_file_json(rule_file)
    except FileNotFoundError:
        raise FileOperationError("No such file: " + str(rule_file))
    except IsADirectoryError:
        raise FileOperationError("Is a directory: " + str(rule_file))
    except PermissionError:
        raise FileOperationError("Permission denied: " + str(rule_file))
    except OSError as e:
        raise UnclassifiedUserFault(e)
    for key_prop in json_data:
        if key_prop == 'properties':
            data = json_data['properties']
        else:
            data = json_data
    for key in data:
        if key == 'dataSources':
            body['data_sources'] = {}
            for key_ds in data['dataSources']:
                if key_ds == 'performanceCounters':
                    body['data_sources']['performance_counters'] = data[
                        'dataSources']['performanceCounters']
                if key_ds == 'windowsEventLogs':
                    body['data_sources']['windows_event_logs'] = data[
                        'dataSources']['windowsEventLogs']
                if key_ds == 'syslog':
                    body['data_sources']['syslog'] = data['dataSources'][
                        'syslog']
                if key_ds == 'extensions':
                    body['data_sources']['extensions'] = data['dataSources'][
                        'extensions']
        if key == 'destinations':
            body['destinations'] = {}
            for key_de in data['destinations']:
                if key_de == 'logAnalytics':
                    body['destinations']['log_analytics'] = data[
                        'destinations']['logAnalytics']
                if key_de == 'azureMonitorMetrics':
                    body['destinations']['azure_monitor_metrics'] = data[
                        'destinations']['azureMonitorMetrics']
        if key == 'dataFlows':
            body['data_flows'] = data['dataFlows']
    return _data_collection_rules_create(
        client,
        resource_group_name=resource_group_name,
        data_collection_rule_name=data_collection_rule_name,
        body=body)
Example #25
0
def arm_deploy_template_new_storage(cli_ctx,
                                    resource_group_name,
                                    registry_name,
                                    location,
                                    sku,
                                    storage_account_name,
                                    admin_user_enabled,
                                    deployment_name=None):
    """Deploys ARM template to create a container registry with a new storage account.
    :param str resource_group_name: The name of resource group
    :param str registry_name: The name of container registry
    :param str location: The name of location
    :param str sku: The SKU of the container registry
    :param str storage_account_name: The name of storage account
    :param bool admin_user_enabled: Enable admin user
    :param str deployment_name: The name of the deployment
    """
    from azure.mgmt.resource.resources.models import DeploymentProperties
    from azure.cli.core.util import get_file_json
    import os

    parameters = _parameters(
        registry_name=registry_name,
        location=location,
        sku=sku,
        admin_user_enabled=admin_user_enabled,
        storage_account_name=storage_account_name)

    file_path = os.path.join(os.path.dirname(__file__), 'template_new_storage.json')
    template = get_file_json(file_path)
    properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')

    return _arm_deploy_template(
        get_arm_service_client(cli_ctx).deployments, resource_group_name, deployment_name, properties)
Example #26
0
    def deploy_arm_template(cli_ctx, resource_group_name,  # pylint: disable=too-many-arguments
                            template_file=None, deployment_name=None,
                            parameters=None, mode=None):
        DeploymentProperties, _ = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
                                          'DeploymentProperties', 'TemplateLink', mod='models')

        template = {}
        # TODO: get_file_json() can return None if specified, otherwise it can throw an error.
        template = get_file_json(template_file, preserve_order=True)
        template_obj = template

        # So template should always be a dict, otherwise this next line will fail.
        template_obj['resources'] = template_obj.get('resources', [])
        # template_obj is not used after this point, can remove it.
        parameters = BotTemplateDeployer.__process_parameters(parameters) or {}

        # Turn the template into JSON string, then load it back to a dict, list, etc.
        template = json.loads(json.dumps(template))
        parameters = json.loads(json.dumps(parameters))

        properties = DeploymentProperties(template=template, template_link=None,
                                          parameters=parameters, mode=mode)

        smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)
        return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=False)
Example #27
0
def validate_file_or_dict(string):
    import os
    if os.path.exists(string):
        from azure.cli.core.util import get_file_json
        return get_file_json(string)
    else:
        from azure.cli.core.util import shell_safe_json_parse
        return shell_safe_json_parse(string)
Example #28
0
    def test_resource_policyset(self, resource_group):
        policy_name = self.create_random_name('azure-cli-test-policy', 30)
        policy_display_name = self.create_random_name('test_policy', 20)
        policy_description = 'desc_for_test_policy_123'
        policyset_name = self.create_random_name('azure-cli-test-policyset', 30)
        policyset_display_name = self.create_random_name('test_policyset', 20)
        policyset_description = 'desc_for_test_policyset_123'
        curr_dir = os.path.dirname(os.path.realpath(__file__))
        rules_file = os.path.join(curr_dir, 'sample_policy_rule.json').replace('\\', '\\\\')
        policyset_file = os.path.join(curr_dir, 'sample_policy_set.json').replace('\\', '\\\\')
        params_def_file = os.path.join(curr_dir, 'sample_policy_param_def.json').replace('\\', '\\\\')

        # create a policy
        policycreatecmd = 'policy definition create -n {} --rules {} --params {} --display-name {} --description {}'
        policy = self.cmd(policycreatecmd.format(policy_name, rules_file, params_def_file, policy_display_name,
                                                 policy_description)).get_output_in_json()

        # create a policy set
        policyset = get_file_json(policyset_file)
        policyset[0]['policyDefinitionId'] = policy['id']
        with open(os.path.join(curr_dir, 'sample_policy_set.json'), 'w') as outfile:
            json.dump(policyset, outfile)
        self.cmd('policy set-definition create -n {} --definitions @"{}" --display-name {} --description {}'.format(
            policyset_name, policyset_file, policyset_display_name, policyset_description),
            checks=[JCheck('name', policyset_name),
                    JCheck('displayName', policyset_display_name),
                    JCheck('description', policyset_description)])

        # update it
        new_policyset_description = policy_description + '_new'
        self.cmd(
            'policy set-definition update -n {} --description {}'.format(policyset_name, new_policyset_description),
            checks=JCheck('description', new_policyset_description))

        # list and show it
        self.cmd('policy set-definition list', checks=JMESPathCheck("length([?name=='{}'])".format(policyset_name), 1))
        self.cmd('policy set-definition show -n {}'.format(policyset_name),
                 checks=[JCheck('name', policyset_name),
                         JCheck('displayName', policyset_display_name)])

        # create a policy assignment on a resource group
        policy_assignment_name = self.create_random_name('azurecli-test-policy-assignment', 40)
        policy_assignment_display_name = self.create_random_name('test_assignment', 20)
        self.cmd('policy assignment create -d {} -n {} --display-name {} -g {}'.format(
            policyset_name, policy_assignment_name, policy_assignment_display_name, resource_group),
            checks=[JCheck('name', policy_assignment_name),
                    JCheck('displayName', policy_assignment_display_name),
                    JCheck('sku.name', 'A0'),
                    JCheck('sku.tier', 'Free')])

        # delete the assignment
        self.cmd('policy assignment delete -n {} -g {}'.format(policy_assignment_name, resource_group))
        self.cmd('policy assignment list --disable-scope-strict-match')

        # delete the policy set
        self.cmd('policy set-definition delete -n {}'.format(policyset_name))
        time.sleep(10)  # ensure the policy is gone when run live.
        self.cmd('policy set-definition list', checks=JCheck("length([?name=='{}'])".format(policyset_name), 0))
def validate_resource_forest_settings(namespace):
    if not namespace.settings:
        return
    import os
    from azure.cli.core.util import get_file_json, shell_safe_json_parse
    if os.path.exists(namespace.settings):
        namespace.settings = get_file_json(namespace.settings)
    else:
        namespace.settings = shell_safe_json_parse(namespace.settings)
Example #30
0
def create_task(client,
                job_id,
                json_file=None,
                task_id=None,
                command_line=None,
                resource_files=None,
                environment_settings=None,
                affinity_id=None,
                max_wall_clock_time=None,
                retention_time=None,
                max_task_retry_count=None,
                application_package_references=None):
    task = None
    tasks = []
    if json_file:
        json_obj = get_file_json(json_file)
        try:
            task = TaskAddParameter.from_dict(json_obj)
        except DeserializationError:
            tasks = []
            try:
                for json_task in json_obj:
                    tasks.append(TaskAddParameter.from_dict(json_task))
            except (DeserializationError, TypeError):
                raise ValueError(
                    "JSON file '{}' is not formatted correctly.".format(
                        json_file))
    else:
        if command_line is None or task_id is None:
            raise ValueError(
                "Missing required arguments.\nEither --json-file, "
                "or both --task-id and --command-line must be specified.")
        task = TaskAddParameter(
            task_id,
            command_line,
            resource_files=resource_files,
            environment_settings=environment_settings,
            affinity_info=AffinityInformation(affinity_id)
            if affinity_id else None,
            application_package_references=application_package_references)
        if max_wall_clock_time is not None or retention_time is not None \
                or max_task_retry_count is not None:
            task.constraints = TaskConstraints(
                max_wall_clock_time=max_wall_clock_time,
                retention_time=retention_time,
                max_task_retry_count=max_task_retry_count)
    if task is not None:
        client.add(job_id=job_id, task=task)
        return client.get(job_id=job_id, task_id=task.id)

    submitted_tasks = []
    for i in range(0, len(tasks), MAX_TASKS_PER_REQUEST):
        submission = client.add_collection(job_id=job_id,
                                           value=tasks[i:i +
                                                       MAX_TASKS_PER_REQUEST])
        submitted_tasks.extend(submission.value)  # pylint: disable=no-member
    return submitted_tasks
Example #31
0
def _load_tokens_from_file(file_path):
    if os.path.isfile(file_path):
        try:
            return get_file_json(file_path, throw_on_empty=False) or []
        except (CLIError, ValueError) as ex:
            raise CLIError("Failed to load token files. If you have a repro, please log an issue at "
                           "https://github.com/Azure/azure-cli/issues. At the same time, you can clean "
                           "up by running 'az account clear' and then 'az login'. (Inner Error: {})".format(ex))
    return []
Example #32
0
def _load_tokens_from_file(file_path):
    if os.path.isfile(file_path):
        try:
            return get_file_json(file_path, throw_on_empty=False) or []
        except (CLIError, ValueError) as ex:
            raise CLIError("Failed to load token files. If you have a repro, please log an issue at "
                           "https://github.com/Azure/azure-cli/issues. At the same time, you can clean "
                           "up by running 'az account clear' and then 'az login'. (Inner Error: {})".format(ex))
    return []
Example #33
0
def validate_file_or_dict(string):
    import os
    string = os.path.expanduser(string)
    if os.path.exists(string):
        from azure.cli.core.util import get_file_json
        return get_file_json(string)

    from azure.cli.core.util import shell_safe_json_parse
    return shell_safe_json_parse(string)
Example #34
0
    def test_load_json_from_file(self):
        _, pathname = tempfile.mkstemp()

        # test good case
        with open(pathname, 'w') as good_file:
            good_file.write('{"key1":"value1", "key2":"value2"}')
        result = get_file_json(pathname)
        self.assertEqual('value2', result['key2'])

        # test error case
        with open(pathname, 'w') as bad_file:
            try:
                bad_file.write('{"key1":"value1" "key2":"value2"}')
                get_file_json(pathname)
                self.fail('expect throw on reading from badly formatted file')
            except Exception as ex:  # pylint: disable=broad-except
                self.assertTrue(str(ex).find(
                    'contains error: Expecting value: line 1 column 1 (char 0)'))
Example #35
0
    def test_load_json_from_file(self):
        _, pathname = tempfile.mkstemp()

        # test good case
        with open(pathname, 'w') as good_file:
            good_file.write('{"key1":"value1", "key2":"value2"}')
        result = get_file_json(pathname)
        self.assertEqual('value2', result['key2'])

        # test error case
        with open(pathname, 'w') as bad_file:
            try:
                bad_file.write('{"key1":"value1" "key2":"value2"}')
                get_file_json(pathname)
                self.fail('expect throw on reading from badly formatted file')
            except Exception as ex:  # pylint: disable=broad-except
                self.assertTrue(str(ex).find(
                    'contains error: Expecting value: line 1 column 1 (char 0)'))
def _deploy_arm_template_core(cli_ctx, resource_group_name,  # pylint: disable=too-many-arguments
                              template_file=None, template_uri=None, input_yaml_files=None, deployment_name=None,
                              parameters=None, mode=None, validate_only=False,
                              no_wait=False):
    DeploymentProperties, TemplateLink = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
                                                 'DeploymentProperties', 'TemplateLink', mod='models')
    template = None
    template_link = None
    template_obj = None
    if template_uri:
        template_link = TemplateLink(uri=template_uri)
        template_obj = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8'), preserve_order=True)
    elif template_file:
        template = get_file_json(template_file, preserve_order=True)
        template_obj = template
    else:
        output_file_path = _invoke_mergeutil(input_yaml_files, parameters)
        parameters = None
        template = get_file_json(output_file_path, preserve_order=True)
        template_obj = template

    template_param_defs = template_obj.get('parameters', {})
    template_obj['resources'] = template_obj.get('resources', [])

    template = json.loads(json.dumps(template))

    if parameters is not None:
        parameters = _process_parameters(template_param_defs, parameters) or {}
        parameters = _get_missing_parameters(parameters, template_obj, _prompt_for_parameters)
        parameters = json.loads(json.dumps(parameters))

    properties = DeploymentProperties(template=template, template_link=template_link,
                                      parameters=parameters, mode=mode)
    # workaround
    properties.mode = 'incremental'
    smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)

    logger.warning("Deploying . . .")
    logger.warning("You can get the state of the deployment with the cmd")
    logger.warning("az group deployment show --name {0} --resource-group {1}".format(deployment_name, resource_group_name))
    if validate_only:
        return sdk_no_wait(no_wait, smc.deployments.validate, resource_group_name, deployment_name, properties)

    return sdk_no_wait(no_wait, smc.deployments.create_or_update, resource_group_name, deployment_name, properties)
Example #37
0
def create_policy_definition(name, rules, display_name=None, description=None):
    if os.path.exists(rules):
        rules = get_file_json(rules)
    else:
        rules = shell_safe_json_parse(rules)

    policy_client = _resource_policy_client_factory()
    parameters = PolicyDefinition(policy_rule=rules, description=description,
                                  display_name=display_name)
    return policy_client.policy_definitions.create_or_update(name, parameters)
Example #38
0
def create_policy_definition(name, rules, display_name=None, description=None):
    if os.path.exists(rules):
        rules = get_file_json(rules)
    else:
        rules = shell_safe_json_parse(rules)

    policy_client = _resource_policy_client_factory()
    PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models')
    parameters = PolicyDefinition(policy_rule=rules, description=description,
                                  display_name=display_name)
    return policy_client.policy_definitions.create_or_update(name, parameters)
Example #39
0
def create_policy_definition(name, rules, display_name=None, description=None):
    if os.path.exists(rules):
        rules = get_file_json(rules)
    else:
        rules = shell_safe_json_parse(rules)

    policy_client = _resource_policy_client_factory()
    PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models')
    parameters = PolicyDefinition(policy_rule=rules, description=description,
                                  display_name=display_name)
    return policy_client.policy_definitions.create_or_update(name, parameters)
    def test_file_string_or_uri(self):
        data = '{ "some": "data here"}'
        with tempfile.NamedTemporaryFile(delete=False) as tmp:
            tmp.write(data.encode('utf-8'))
            tmp.close()

            output = _load_file_string_or_uri(tmp.name, 'test')
            self.assertEqual(get_file_json(tmp.name), output)

            uri = urljoin('file:', pathname2url(tmp.name))
            output = _load_file_string_or_uri(uri, 'test')
            self.assertEqual(get_file_json(tmp.name), output)

            os.unlink(tmp.name)

        output = _load_file_string_or_uri(data, 'test')
        self.assertEqual(shell_safe_json_parse(data), output)

        self.assertEqual(None, _load_file_string_or_uri(None, 'test', required=False))
        self.assertRaises(CLIError, _load_file_string_or_uri, None, 'test')
def get_file_or_parse_json(value, value_type):
    if os.path.exists(value):
        return get_file_json(value)

    # Test if provided value is a valid json
    try:
        json_parse = shell_safe_json_parse(value)
    except:
        raise CLIError("The supplied input for '" + value_type + "' is not a valid file path or a valid json object.")
    else:
        return json_parse
    def test_deployment_parameters(self):

        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir,
                                     'param-validation-template.json').replace(
                                         '\\', '\\\\')
        parameters_path = os.path.join(curr_dir,
                                       'param-validation-params.json').replace(
                                           '\\', '\\\\')

        template = get_file_json(template_path, preserve_order=True)
        template_param_defs = template.get('parameters', {})

        # test different ways of passing in parameters
        tests = [
            {  # empty JSON works
                "parameter_list": [["{}"]],
                "expected": {},
            },
            {  # empty parameters works
                "parameter_list": [],
                "expected": {},
            },
            {  # loading from file
                "parameter_list": [[parameters_path]],
                "expected": {"stringParam": {"value": "foo"}, "intParam": {"value": 10}, "madeupParam": {"value": "bar"}},
            },
            {  # KEY=VALUE syntax with extra equal sign
                "parameter_list": [['stringParam=foo=bar']],
                "expected": {"stringParam": {"value": "foo=bar"}},
            },
            {  # raw JSON (same as @file)
                "parameter_list": [['{\"stringParam\": {\"value\": \"foo\"}}']],
                "expected": {"stringParam": {"value": "foo"}},
            },
            {  # multiple KEY=VALUE
                "parameter_list": [['stringParam=foo', 'intParam=3']],
                "expected": {"stringParam": {"value": "foo"}, "intParam": {"value": 3}},
            },
            {  # KEY=VALUE where last in wins
                "parameter_list": [['stringParam=foo', 'stringParam=bar']],
                "expected": {"stringParam": {"value": "bar"}},
            },
            {  # file loading overriden by KEY=VALUE
                "parameter_list": [[parameters_path], ['stringParam=bar']],
                "expected": {"stringParam": {"value": "bar"}, "intParam": {"value": 10}, "madeupParam": {"value": "bar"}},
            }
        ]

        for i, test in enumerate(tests):
            parameter_list = test['parameter_list']
            result_parameters = _process_parameters(template_param_defs,
                                                    parameter_list)
            self.assertEqual(result_parameters, test['expected'], i)
    def test_deployment_parameters(self):

        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir, 'param-validation-template.json').replace('\\', '\\\\')
        parameters_path = os.path.join(curr_dir, 'param-validation-params.json').replace('\\', '\\\\')

        template = get_file_json(template_path, preserve_order=True)
        template_param_defs = template.get('parameters', {})

        # test different ways of passing in parameters
        tests = [
            {  # empty JSON works
                "parameter_list": [["{}"]],
                "expected": {},
            },
            {  # empty parameters works
                "parameter_list": [],
                "expected": {},
            },
            {  # loading from file
                "parameter_list": [[parameters_path]],
                "expected": {"stringParam": {"value": "foo"}, "intParam": {"value": 10}, "madeupParam": {"value": "bar"}},
            },
            {  # KEY=VALUE syntax with extra equal sign
                "parameter_list": [['stringParam=foo=bar']],
                "expected": {"stringParam": {"value": "foo=bar"}},
            },
            {  # raw JSON (same as @file)
                "parameter_list": [['{\"stringParam\": {\"value\": \"foo\"}}']],
                "expected": {"stringParam": {"value": "foo"}},
            },
            {  # multiple KEY=VALUE
                "parameter_list": [['stringParam=foo', 'intParam=3']],
                "expected": {"stringParam": {"value": "foo"}, "intParam": {"value": 3}},
            },
            {  # KEY=VALUE where last in wins
                "parameter_list": [['stringParam=foo', 'stringParam=bar']],
                "expected": {"stringParam": {"value": "bar"}},
            },
            {  # file loading overriden by KEY=VALUE
                "parameter_list": [[parameters_path], ['stringParam=bar']],
                "expected": {"stringParam": {"value": "bar"}, "intParam": {"value": 10}, "madeupParam": {"value": "bar"}},
            }
        ]

        for i, test in enumerate(tests):
            parameter_list = test['parameter_list']
            result_parameters = _process_parameters(template_param_defs, parameter_list)
            self.assertEqual(result_parameters, test['expected'], i)
Example #44
0
def create_task(client,
                job_id, json_file=None, task_id=None, command_line=None, resource_files=None,
                environment_settings=None, affinity_id=None, max_wall_clock_time=None,
                retention_time=None, max_task_retry_count=None,
                application_package_references=None):
    task = None
    tasks = []
    if json_file:
        json_obj = get_file_json(json_file)
        try:
            task = TaskAddParameter.from_dict(json_obj)
        except (DeserializationError, TypeError):
            try:
                task_collection = TaskAddCollectionParameter.from_dict(json_obj)
                tasks = task_collection.value
            except (DeserializationError, TypeError):
                try:
                    for json_task in json_obj:
                        tasks.append(TaskAddParameter.from_dict(json_task))
                except (DeserializationError, TypeError):
                    raise ValueError("JSON file '{}' is not formatted correctly.".format(json_file))
    else:
        if command_line is None or task_id is None:
            raise ValueError("Missing required arguments.\nEither --json-file, "
                             "or both --task-id and --command-line must be specified.")
        task = TaskAddParameter(
            id=task_id,
            command_line=command_line,
            resource_files=resource_files,
            environment_settings=environment_settings,
            affinity_info=AffinityInformation(affinity_id=affinity_id) if affinity_id else None,
            application_package_references=application_package_references)
        if max_wall_clock_time is not None or retention_time is not None \
                or max_task_retry_count is not None:
            task.constraints = TaskConstraints(max_wall_clock_time=max_wall_clock_time,
                                               retention_time=retention_time,
                                               max_task_retry_count=max_task_retry_count)
    if task is not None:
        client.add(job_id=job_id, task=task)
        return client.get(job_id=job_id, task_id=task.id)

    submitted_tasks = []
    for i in range(0, len(tasks), MAX_TASKS_PER_REQUEST):
        submission = client.add_collection(
            job_id=job_id,
            value=tasks[i:i + MAX_TASKS_PER_REQUEST])
        submitted_tasks.extend(submission.value)  # pylint: disable=no-member
    return submitted_tasks
def create_volume(client, resource_group_name,
                  name, location,
                  template_file=None, template_uri=None):
    """Create a volume. """
    volume_properties = None

    if template_uri:
        volume_properties = shell_safe_json_parse(_urlretrieve(template_uri).decode('utf-8'), preserve_order=True)
    elif template_file:
        volume_properties = get_file_json(template_file, preserve_order=True)
        volume_properties = json.loads(json.dumps(volume_properties))
    else:
        raise CLIError('One of --template-file or --template-uri has to be specified')

    volume_properties['location'] = location
    return client.create(resource_group_name, name, volume_properties)
Example #46
0
def scaffold_autoscale_settings_parameters(client):  # pylint: disable=unused-argument
    """Scaffold fully formed autoscale-settings' parameters as json template """

    import os.path
    from knack.util import CLIError
    from azure.cli.core.util import get_file_json

    # Autoscale settings parameter scaffold file path
    curr_dir = os.path.dirname(os.path.realpath(__file__))
    autoscale_settings_parameter_file_path = os.path.join(
        curr_dir, 'autoscale-parameters-template.json')

    if not os.path.exists(autoscale_settings_parameter_file_path):
        raise CLIError('File {} not found.'.format(autoscale_settings_parameter_file_path))

    return get_file_json(autoscale_settings_parameter_file_path)
Example #47
0
def create_appliance(resource_group_name,
                     appliance_name, managedby_resource_group_id,
                     location, kind, managedapp_definition_id=None,
                     plan_name=None, plan_publisher=None, plan_product=None,
                     plan_version=None, tags=None, parameters=None):
    """ Create a new managed application.
    :param str resource_group_name:the desired resource group name
    :param str appliance_name:the managed application name
    :param str kind:the managed application kind. can be marketplace or servicecatalog
    :param str plan_name:the managed application package plan name
    :param str plan_publisher:the managed application package plan publisher
    :param str plan_product:the managed application package plan product
    :param str plan_version:the managed application package plan version
    :param str tags:tags in 'a=b c' format
    """
    racf = _resource_managedapps_client_factory()
    appliance = Appliance(
        location=location,
        managed_resource_group_id=managedby_resource_group_id,
        kind=kind,
        tags=tags
    )

    if kind.lower() == 'servicecatalog':
        if managedapp_definition_id:
            appliance.appliance_definition_id = managedapp_definition_id
        else:
            raise CLIError('--managedapp-definition-id is required if kind is ServiceCatalog')
    elif kind.lower() == 'marketplace':
        if (plan_name is None and plan_product is None and
                plan_publisher is None and plan_version is None):
            raise CLIError('--plan-name, --plan-product, --plan-publisher and \
            --plan-version are all required if kind is MarketPlace')
        else:
            appliance.plan = Plan(plan_name, plan_publisher, plan_product, plan_version)

    applianceParameters = None

    if parameters:
        if os.path.exists(parameters):
            applianceParameters = get_file_json(parameters)
        else:
            applianceParameters = shell_safe_json_parse(parameters)

    appliance.parameters = applianceParameters

    return racf.appliances.create_or_update(resource_group_name, appliance_name, appliance)
def _create_update_role_definition(role_definition, for_update):
    definitions_client = _auth_client_factory().role_definitions
    if os.path.exists(role_definition):
        role_definition = get_file_json(role_definition)
    else:
        role_definition = shell_safe_json_parse(role_definition)

    # to workaround service defects, ensure property names are camel case
    names = [p for p in role_definition if p[:1].isupper()]
    for n in names:
        new_name = n[:1].lower() + n[1:]
        role_definition[new_name] = role_definition.pop(n)

    role_name = role_definition.get('name', None)
    if not role_name:
        raise CLIError("please provide role name")
    if for_update:  # for update, we need to use guid style unique name
        scopes_in_definition = role_definition.get('assignableScopes', None)
        scope = (scopes_in_definition[0] if scopes_in_definition else
                 '/subscriptions/' + definitions_client.config.subscription_id)
        matched = _search_role_definitions(definitions_client, role_name, scope)
        if len(matched) != 1:
            raise CLIError('Please provide the unique logic name of an existing role')
        role_definition['name'] = matched[0].name
        # ensure correct logical name and guid name. For update we accept both
        role_name = matched[0].properties.role_name
        role_id = matched[0].name
    else:
        role_id = uuid.uuid4()

    if not for_update and 'assignableScopes' not in role_definition:
        raise CLIError("please provide 'assignableScopes'")

    permission = Permission(actions=role_definition.get('actions', None),
                            not_actions=role_definition.get('notActions', None))
    properties = RoleDefinitionProperties(role_name=role_name,
                                          description=role_definition.get('description', None),
                                          type=_CUSTOM_RULE,
                                          assignable_scopes=role_definition['assignableScopes'],
                                          permissions=[permission])

    definition = RoleDefinition(name=role_id, properties=properties)

    return definitions_client.create_or_update(role_definition_id=role_id,
                                               scope=properties.assignable_scopes[0],
                                               role_definition=definition)
Example #49
0
def update_policy_definition(policy_definition_name, rules=None,
                             display_name=None, description=None):
    if rules is not None:
        if os.path.exists(rules):
            rules = get_file_json(rules)
        else:
            rules = shell_safe_json_parse(rules)

    policy_client = _resource_policy_client_factory()
    definition = policy_client.policy_definitions.get(policy_definition_name)
    # pylint: disable=line-too-long,no-member
    PolicyDefinition = get_sdk(ResourceType.MGMT_RESOURCE_POLICY, 'PolicyDefinition', mod='models')
    parameters = PolicyDefinition(
        policy_rule=rules if rules is not None else definition.policy_rule,
        description=description if description is not None else definition.description,
        display_name=display_name if display_name is not None else definition.display_name)
    return policy_client.policy_definitions.create_or_update(policy_definition_name, parameters)
    def test_deployment_prompt_alphabetical_order(self):
        # check that params are prompted for in alphabetical order when the file is loaded with preserve_order=False
        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir, 'param-validation-template.json').replace('\\', '\\\\')
        parameters_path = os.path.join(curr_dir, 'param-validation-params.json').replace('\\', '\\\\')
        parameters_with_reference_path = os.path.join(curr_dir, 'param-validation-ref-params.json').replace('\\', '\\\\')

        template = get_file_json(template_path, preserve_order=False)
        template_param_defs = template.get('parameters', {})

        parameter_list = [[parameters_path], [parameters_with_reference_path]]
        result_parameters = _process_parameters(template_param_defs, parameter_list)
        missing_parameters = _find_missing_parameters(result_parameters, template)

        param_alpha_order = ["[u'arrayParam', u'boolParam', u'enumParam', u'objectParam', u'secureParam']",
                             "['arrayParam', 'boolParam', 'enumParam', 'objectParam', 'secureParam']"]
        results = _prompt_for_parameters(dict(missing_parameters), fail_on_no_tty=False)
        self.assertTrue(str(list(results.keys())) in param_alpha_order)
Example #51
0
def deploy_arm_template(cli_ctx, resource_group_name,  # pylint: disable=too-many-arguments
                        template_file=None, deployment_name=None,
                        parameters=None, mode=None):
    DeploymentProperties, _ = get_sdk(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
                                      'DeploymentProperties', 'TemplateLink', mod='models')
    template = None
    template = get_file_json(template_file, preserve_order=True)
    template_obj = template

    template_obj['resources'] = template_obj.get('resources', [])
    parameters = _process_parameters(parameters) or {}

    import json
    template = json.loads(json.dumps(template))
    parameters = json.loads(json.dumps(parameters))

    properties = DeploymentProperties(template=template, template_link=None,
                                      parameters=parameters, mode=mode)

    smc = get_mgmt_service_client(cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES)
    return smc.deployments.create_or_update(resource_group_name, deployment_name, properties, raw=False)
Example #52
0
def update_pool(client,
                pool_id, json_file=None, start_task_command_line=None, certificate_references=None,
                application_package_references=None, metadata=None,
                start_task_environment_settings=None, start_task_wait_for_success=None,
                start_task_max_task_retry_count=None):
    if json_file:
        json_obj = get_file_json(json_file)
        param = None
        try:
            param = PoolUpdatePropertiesParameter.from_dict(json_obj)
        except DeserializationError:
            pass
        if not param:
            raise ValueError("JSON file '{}' is not in correct format.".format(json_file))

        if param.certificate_references is None:
            param.certificate_references = []
        if param.metadata is None:
            param.metadata = []
        if param.application_package_references is None:
            param.application_package_references = []
    else:
        if certificate_references is None:
            certificate_references = []
        if metadata is None:
            metadata = []
        if application_package_references is None:
            application_package_references = []
        param = PoolUpdatePropertiesParameter(
            certificate_references=certificate_references,
            application_package_references=application_package_references,
            metadata=metadata)

        if start_task_command_line:
            param.start_task = StartTask(command_line=start_task_command_line,
                                         environment_settings=start_task_environment_settings,
                                         wait_for_success=start_task_wait_for_success,
                                         max_task_retry_count=start_task_max_task_retry_count)
    client.update_properties(pool_id=pool_id, pool_update_properties_parameter=param)
    return client.get(pool_id)
    def test_deployment_missing_values(self):

        curr_dir = os.path.dirname(os.path.realpath(__file__))
        template_path = os.path.join(curr_dir, 'param-validation-template.json').replace('\\', '\\\\')
        parameters_path = os.path.join(curr_dir, 'param-validation-params.json').replace('\\', '\\\\')
        parameters_with_reference_path = os.path.join(curr_dir, 'param-validation-ref-params.json').replace('\\', '\\\\')

        template = get_file_json(template_path, preserve_order=True)
        template_param_defs = template.get('parameters', {})

        parameter_list = [[parameters_path], [parameters_with_reference_path]]
        result_parameters = _process_parameters(template_param_defs, parameter_list)
        missing_parameters = _find_missing_parameters(result_parameters, template)

        # ensure that parameters with default values are not considered missing
        params_with_defaults = [x for x in template_param_defs if 'defaultValue' in template_param_defs[x]]
        for item in params_with_defaults:
            self.assertTrue(item not in missing_parameters)

        # ensure that a parameter that specifies a reference does not prompt
        self.assertTrue('secretReference' not in missing_parameters)
        self.assertTrue('secretReference' in result_parameters)
def arm_deploy_template_existing_storage(resource_group_name, #pylint: disable=too-many-arguments
                                         registry_name,
                                         location,
                                         sku,
                                         storage_account_name,
                                         admin_user_enabled,
                                         deployment_name=None):
    '''Deploys ARM template to create a container registry with an existing storage account.
    :param str resource_group_name: The name of resource group
    :param str registry_name: The name of container registry
    :param str location: The name of location
    :param str sku: The SKU of the container registry
    :param str storage_account_name: The name of storage account
    :param bool admin_user_enabled: Enable admin user
    :param str deployment_name: The name of the deployment
    '''
    from azure.mgmt.resource.resources.models import DeploymentProperties
    from azure.cli.core.util import get_file_json
    import os

    storage_account_resource_group = \
    get_resource_group_name_by_storage_account_name(storage_account_name)

    parameters = _parameters(
        registry_name=registry_name,
        location=location,
        sku=sku,
        admin_user_enabled=admin_user_enabled,
        storage_account_name=storage_account_name,
        storage_account_resource_group=storage_account_resource_group)

    file_path = os.path.join(os.path.dirname(__file__), 'template_existing_storage.json')
    template = get_file_json(file_path)
    properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')

    return _arm_deploy_template(
        get_arm_service_client().deployments, resource_group_name, deployment_name, properties)
Example #55
0
    def test_resource_policyset(self, resource_group):
        curr_dir = os.path.dirname(os.path.realpath(__file__))

        self.kwargs.update({
            'pn': self.create_random_name('azure-cli-test-policy', 30),
            'pdn': self.create_random_name('test_policy', 20),
            'desc': 'desc_for_test_policy_123',
            'psn': self.create_random_name('azure-cli-test-policyset', 30),
            'psdn': self.create_random_name('test_policyset', 20),
            'ps_desc': 'desc_for_test_policyset_123',
            'rf': os.path.join(curr_dir, 'sample_policy_rule.json').replace('\\', '\\\\'),
            'psf': os.path.join(curr_dir, 'sample_policy_set.json').replace('\\', '\\\\'),
            'pdf': os.path.join(curr_dir, 'sample_policy_param_def.json').replace('\\', '\\\\')
        })

        # create a policy
        policy = self.cmd('policy definition create -n {pn} --rules {rf} --params {pdf} --display-name {pdn} --description {desc}').get_output_in_json()

        # create a policy set
        policyset = get_file_json(self.kwargs['psf'])
        policyset[0]['policyDefinitionId'] = policy['id']
        with open(os.path.join(curr_dir, 'sample_policy_set.json'), 'w') as outfile:
            json.dump(policyset, outfile)
        self.cmd('policy set-definition create -n {psn} --definitions @"{psf}" --display-name {psdn} --description {ps_desc}', checks=[
            self.check('name', '{psn}'),
            self.check('displayName', '{psdn}'),
            self.check('description', '{ps_desc}')
        ])

        # update it
        self.kwargs['ps_desc'] = self.kwargs['ps_desc'] + '_new'
        self.cmd('policy set-definition update -n {psn} --description {ps_desc}',
                 checks=self.check('description', '{ps_desc}'))

        # list and show it
        self.cmd('policy set-definition list',
                 checks=self.check("length([?name=='{psn}'])", 1))
        self.cmd('policy set-definition show -n {psn}', checks=[
            self.check('name', '{psn}'),
            self.check('displayName', '{psdn}')
        ])

        # create a policy assignment on a resource group
        self.kwargs.update({
            'pan': self.create_random_name('azurecli-test-policy-assignment', 40),
            'padn': self.create_random_name('test_assignment', 20)
        })
        self.cmd('policy assignment create -d {psn} -n {pan} --display-name {padn} -g {rg}', checks=[
            self.check('name', '{pan}'),
            self.check('displayName', '{padn}'),
            self.check('sku.name', 'A0'),
            self.check('sku.tier', 'Free'),
        ])

        # delete the assignment
        self.cmd('policy assignment delete -n {pan} -g {rg}')
        self.cmd('policy assignment list --disable-scope-strict-match')

        # delete the policy set
        self.cmd('policy set-definition delete -n {psn}')
        time.sleep(10)  # ensure the policy is gone when run live.
        self.cmd('policy set-definition list',
                 checks=self.check("length([?name=='{psn}'])", 0))

        # delete the policy
        self.cmd('policy definition delete -n {pn}')
        time.sleep(10)  # ensure the policy is gone when run live.
        self.cmd('policy definition list',
                 checks=self.check("length([?name=='{pn}'])", 0))
Example #56
0
    def test_resource_policyset(self, resource_group):
        policy_name = self.create_random_name('azure-cli-test-policy', 30)
        policy_display_name = self.create_random_name('test_policy', 20)
        policy_description = 'desc_for_test_policy_123'
        policyset_name = self.create_random_name('azure-cli-test-policyset', 30)
        policyset_display_name = self.create_random_name('test_policyset', 20)
        policyset_description = 'desc_for_test_policyset_123'
        curr_dir = os.path.dirname(os.path.realpath(__file__))
        rules_file = os.path.join(curr_dir, 'sample_policy_rule.json').replace('\\', '\\\\')
        policyset_file = os.path.join(curr_dir, 'sample_policy_set.json').replace('\\', '\\\\')
        params_def_file = os.path.join(curr_dir, 'sample_policy_param_def.json').replace('\\', '\\\\')

        # create a policy
        policycreatecmd = 'policy definition create -n {} --rules {} --params {} --display-name {} --description {}'
        policy = self.cmd(policycreatecmd.format(policy_name, rules_file, params_def_file, policy_display_name,
                                                 policy_description)).get_output_in_json()

        # create a policy set
        policyset = get_file_json(policyset_file)
        policyset[0]['policyDefinitionId'] = policy['id']
        with open(os.path.join(curr_dir, 'sample_policy_set.json'), 'w') as outfile:
            json.dump(policyset, outfile)
        self.cmd('policy set-definition create -n {} --definitions @"{}" --display-name {} --description {}'.format(
                 policyset_name, policyset_file, policyset_display_name, policyset_description),
                 checks=[
                    JCheck('name', policyset_name),
                    JCheck('displayName', policyset_display_name),
                    JCheck('description', policyset_description)
                ])

        # update it
        new_policyset_description = policy_description + '_new'
        self.cmd('policy set-definition update -n {} --description {}'.format(policyset_name, new_policyset_description),
                 checks=JCheck('description', new_policyset_description))

        # list and show it
        self.cmd('policy set-definition list', checks=JMESPathCheck("length([?name=='{}'])".format(policyset_name), 1))
        self.cmd('policy set-definition show -n {}'.format(policyset_name), checks=[
            JCheck('name', policyset_name),
            JCheck('displayName', policyset_display_name)
        ])

        # create a policy assignment on a resource group
        policy_assignment_name = self.create_random_name('azurecli-test-policy-assignment', 40)
        policy_assignment_display_name = self.create_random_name('test_assignment', 20)
        self.cmd('policy assignment create -d {} -n {} --display-name {} -g {}'.format(
                 policyset_name, policy_assignment_name, policy_assignment_display_name, resource_group),
                 checks=[
                    JCheck('name', policy_assignment_name),
                    JCheck('displayName', policy_assignment_display_name),
                    JCheck('sku.name', 'A0'),
                    JCheck('sku.tier', 'Free'),
                 ])

        # delete the assignment
        self.cmd('policy assignment delete -n {} -g {}'.format(policy_assignment_name, resource_group))
        self.cmd('policy assignment list --disable-scope-strict-match')

        # delete the policy set
        self.cmd('policy set-definition delete -n {}'.format(policyset_name))
        time.sleep(10)  # ensure the policy is gone when run live.
        self.cmd('policy set-definition list', checks=JCheck("length([?name=='{}'])".format(policyset_name), 0))
def _load_autoscale_settings_parameters(file_path):
    if not os.path.exists(file_path):
        raise CLIError('File {} not found.'.format(file_path))

    return get_file_json(file_path)
 def _try_load_file_object(value):
     if os.path.isfile(value):
         parsed = get_file_json(value, throw_on_empty=False)
         return parsed.get('parameters', parsed)
     return None