def set_encryption(client, resource_group_name, account_name, key_type=None, current_key_id=None, key_identifier=None): try: account_info = client.get( resource_group_name, account_name ) if resource_group_name else client.get_by_subscription(account_name) if key_type == 'CustomerKey': key_vault_props = KeyVaultProperties( key_identifier=key_identifier, current_key_identifier=current_key_id) else: key_vault_props = None encryption = AccountEncryption(type=key_type, key_vault_properties=key_vault_props) media_service = MediaService( location=account_info.location, identity=account_info.identity, storage_accounts=account_info.storage_accounts, encryption=encryption) return client.create_or_update(resource_group_name, account_name, media_service) except HttpResponseError as ex: recommendation = '' if ex.message == '(BadRequest) Access to the Customer Key was forbidden.': recommendation = 'Please use the Azure Portal to grant the key vault access to the media account.'\ 'For more information please visit https://aka.ms/keyvaultaccess' raise BadRequestError(ex, recommendation)
def unpack(cmd, exported_template, target_dir, template_file_name): packaged_template = PackagedTemplate(exported_template.main_template, exported_template.linked_templates) # Ensure paths are normalized: template_file_name = os.path.basename(template_file_name) target_dir = os.path.abspath(target_dir).rstrip(os.sep).rstrip(os.altsep) root_template_file_path = os.path.join(target_dir, template_file_name) # TODO: Directory/file existence checks.. # Iterate through artifacts to ensure no artifact will be placed # outside of the target directory: artifacts = getattr(packaged_template, 'Artifacts') if artifacts is not None: for artifact in artifacts: local_path = os.path.join( target_dir, _normalize_directory_seperators_for_local_file_system( getattr(artifact, 'path'))) abs_local_path = os.path.abspath(local_path) if os.path.commonpath([target_dir]) != os.path.commonpath( [target_dir, abs_local_path]): raise BadRequestError( 'Unable to unpack linked template ' + getattr(artifact, 'path') + 'because it would create a file outside of the target directory hierarchy of ' + target_dir) # Process each artifact: LinkedTemplateArtifact = get_sdk( cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'LinkedTemplateArtifact', mod='models') for artifact in artifacts: if not isinstance(artifact, LinkedTemplateArtifact): raise CLIError('Unknown linked template type encountered...') artifact_path = _normalize_directory_seperators_for_local_file_system( getattr(artifact, 'path')) abs_local_path = os.path.abspath( os.path.join(target_dir, artifact_path)) if not os.path.exists(os.path.dirname(abs_local_path)): os.makedirs(os.path.dirname(abs_local_path)) with open(abs_local_path, 'w') as artifact_file: json.dump(getattr(artifact, 'template'), artifact_file, indent=2) # Write our main template file if not os.path.exists(target_dir): os.makedirs(os.path.dirname(target_dir)) with open(root_template_file_path, 'w') as root_file: json.dump(getattr(packaged_template, 'RootTemplate'), root_file, indent=2) return target_dir
def migration_create_func(cmd, client, resource_group_name, server_name, properties, migration_name=None): subscription_id = get_subscription_id(cmd.cli_ctx) properties_filepath = os.path.join(os.path.abspath(os.getcwd()), properties) if not os.path.exists(properties_filepath): raise FileOperationError( "Properties file does not exist in the given location") with open(properties_filepath, "r") as f: try: request_payload = json.load(f) request_payload.get("properties")['TriggerCutover'] = 'true' json_data = json.dumps(request_payload) except ValueError as err: logger.error(err) raise BadRequestError( "Invalid json file. Make sure that the json file content is properly formatted." ) if migration_name is None: # Convert a UUID to a string of hex digits in standard form migration_name = str(uuid.uuid4()) r = send_raw_request( cmd.cli_ctx, "put", "https://management.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.DBforPostgreSQL/flexibleServers/{}/migrations/{}?api-version=2020-02-14-privatepreview" .format(subscription_id, resource_group_name, server_name, migration_name), None, None, json_data) return r.json()
def set_mru(client, cmd, resource_group_name, account_name, count=None, type=None): account_info = client.get( resource_group_name, account_name ) if resource_group_name else client.get_by_subscription(account_name) if account_info.encryption: raise BadRequestError( 'The media reserved unit operation failed as the Media Services account was created' ' with the 2020-05-01 version of the API or later. Accounts created this way no' ' longer need to set media reserved units as the system will automatically' ' scale up and down based on load.') client = MediaV2Client(cmd.cli_ctx, resource_group_name, account_name) mru = client.get_mru() count = count if count is not None else int(mru['CurrentReservedUnits']) if type is None: type = int(mru['ReservedUnitType']) else: try: type = int( list(_rut_dict.keys())[list(_rut_dict.values()).index(type)]) except: raise CLIError( 'Invalid --type argument. Allowed values: {}.'.format( ", ".join(get_mru_type_completion_list()))) client.set_mru(mru['AccountId'], count, type) return _map_mru(client.get_mru())
def refresh_application_credentials(self, object_id): password = uuid.uuid4() key_id = uuid.uuid4() start_date = datetime.datetime.utcnow() end_date = datetime.datetime(2299, 12, 31, tzinfo=datetime.timezone.utc) try: credentials = list(self.client.applications.list_password_credentials(object_id)) except GraphErrorException as e: logger.error(e.message) raise # keys created with older version of cli are not updatable # https://github.com/Azure/azure-sdk-for-python/issues/18131 for c in credentials: if c.custom_key_identifier is None: raise BadRequestError("Cluster AAD application contains a client secret with an empty identifier.\n\ Please either manually remove the existing client secret and run `az aro update --refresh-credentials`, \n\ or manually create a new client secret and run `az aro update --client-secret <ClientSecret>`.") # when appending credentials ALL fields must be present, otherwise # azure gives ambiguous errors about not being able to update old keys credentials.append(PasswordCredential( custom_key_identifier=str(start_date).encode(), # bytearray key_id=str(key_id), start_date=start_date, end_date=end_date, value=password)) self.client.applications.update_password_credentials(object_id, credentials) return password
def set_encryption(client, resource_group_name, account_name, key_type=None, current_key_id=None, key_identifier=None, system_assigned=None, user_assigned=None): try: account_info: MediaService = client.get(resource_group_name, account_name)\ if resource_group_name else client.get_by_subscription(account_name) if key_type == AccountEncryptionKeyType.CUSTOMER_KEY: key_vault_props = KeyVaultProperties( key_identifier=key_identifier, current_key_identifier=current_key_id) if user_assigned: identity = ResourceIdentity( use_system_assigned_identity=False, user_assigned_identity=user_assigned) elif system_assigned: identity = ResourceIdentity(use_system_assigned_identity=True, user_assigned_identity=None) else: identity = None encryption = AccountEncryption( type=key_type, key_vault_properties=key_vault_props, identity=identity) else: encryption = AccountEncryption(type=key_type, key_vault_properties=None, identity=None) media_service: MediaService = MediaService( location=account_info.location, storage_accounts=account_info.storage_accounts, key_delivery=account_info.key_delivery, identity=account_info.identity, encryption=encryption, storage_authentication=account_info.storage_authentication, name=account_info.name, public_network_access=account_info.public_network_access) return client.create_or_update(resource_group_name, account_name, media_service) except HttpResponseError as ex: recommendation = '' if ex.message == '(BadRequest) Access to the Customer Key was forbidden.': recommendation = 'Please use the Azure Portal to grant the key vault access to the media account.'\ 'For more information please visit https://aka.ms/keyvaultaccess' raise BadRequestError(ex, recommendation)
def get_mru(client, cmd, resource_group_name, account_name): account_info = client.get( resource_group_name, account_name ) if resource_group_name else client.get_by_subscription(account_name) if account_info.encryption: raise BadRequestError( 'The media reserved unit operation failed as the Media Services account was created' ' with the 2020-05-01 version of the API or later. Accounts created this way no' ' longer need to set media reserved units as the system will automatically' ' scale up and down based on load.') mru = MediaV2Client(cmd.cli_ctx, resource_group_name, account_name).get_mru() return _map_mru(mru)
def map_azure_error_to_cli_error(azure_error): error_message = getattr(azure_error, "message", str(azure_error)) if isinstance(azure_error, HttpResponseError): status_code = getattr(azure_error, "status_code", None) if status_code: status_code = int(status_code) if status_code == 400: return BadRequestError(error_message) if status_code == 401: return UnauthorizedError(error_message) if status_code == 403: return ForbiddenError(error_message) if status_code == 404: return ResourceNotFoundError(error_message) if 400 <= status_code < 500: return UnclassifiedUserFault(error_message) if 500 <= status_code < 600: return AzureInternalError(error_message) return ServiceError(error_message) if isinstance(azure_error, ServiceRequestError): return ClientRequestError(error_message) if isinstance(azure_error, ServiceResponseError): return AzureResponseError(error_message) return ServiceError(error_message)
def _pack_artifacts(cmd, template_abs_file_path, context): """ Recursively packs the specified template and its referenced artifacts and adds the artifact(s) to the current packing context. :param template_abs_file_path: The path to the template spec .json file to pack. :type template_abs_file_path : str :param context : The packing context of the current packing operation :type content : PackingContext :param artifactableTemplateObj : The packageable template object :type artifactableTemplateObj : JSON """ original_directory = getattr(context, 'CurrentDirectory') try: context.CurrentDirectory = os.path.dirname(template_abs_file_path) template_content = read_file_content(template_abs_file_path) artifactable_template_obj = _remove_comments_from_json( template_content) template_link_to_artifact_objs = _get_template_links_to_artifacts( cmd, artifactable_template_obj, includeNested=True) for template_link_obj in template_link_to_artifact_objs: relative_path = str(template_link_obj['relativePath']) if not relative_path: continue # This is a templateLink to a local template... Get the absolute path of the # template based on its relative path from the current template directory and # make sure it exists: abs_local_path = os.path.join(getattr(context, 'CurrentDirectory'), relative_path) if not os.path.isfile(abs_local_path): raise CLIError('File ' + abs_local_path + 'not found.') # Let's make sure we're not referencing a file outside of our root directory # hierarchy. We won't allow such references for security purposes: if (not os.path.commonpath([ getattr(context, 'RootTemplateDirectory') ]) == os.path.commonpath( [getattr(context, 'RootTemplateDirectory'), abs_local_path])): raise BadRequestError( 'Unable to handle the reference to file ' + abs_local_path + 'from ' + template_abs_file_path + 'because it exists outside of the root template directory of ' + getattr(context, 'RootTemplateDirectory')) # Convert the template relative path to one that is relative to our root # directory path, and then if we haven't already processed that template into # an artifact elsewhere, we'll do so here... as_relative_path = _absolute_to_relative_path( getattr(context, 'RootTemplateDirectory'), abs_local_path) duplicateFile = False for prev_added_artifact in getattr(context, 'Artifact'): prev_added_artifact = os.path.join( getattr(context, 'RootTemplateDirectory'), getattr(prev_added_artifact, 'path')) if os.path.samefile(prev_added_artifact, abs_local_path): duplicateFile = True continue if duplicateFile: continue _pack_artifacts(cmd, abs_local_path, context) LinkedTemplateArtifact = get_sdk( cmd.cli_ctx, ResourceType.MGMT_RESOURCE_TEMPLATESPECS, 'LinkedTemplateArtifact', mod='models') template_content = read_file_content(abs_local_path) template_json = json.loads( json.dumps(process_template(template_content))) artifact = LinkedTemplateArtifact(path=as_relative_path, template=template_json) context.Artifact.append(artifact) finally: context.CurrentDirectory = original_directory
def execute_query(client, graph_query, first, skip, subscriptions, management_groups, allow_partial_scopes, skip_token): # type: (ResourceGraphClient, str, int, int, list[str], str) -> object mgs_list = management_groups if mgs_list is not None and len(mgs_list) > __MANAGEMENT_GROUP_LIMIT: mgs_list = mgs_list[:__MANAGEMENT_GROUP_LIMIT] warning_message = "The query included more management groups than allowed. "\ "Only the first {0} management groups were included for the results. "\ "To use more than {0} management groups, "\ "see the docs for examples: "\ "https://aka.ms/arg-error-toomanysubs".format(__MANAGEMENT_GROUP_LIMIT) __logger.warning(warning_message) subs_list = None if mgs_list is None: subs_list = subscriptions or _get_cached_subscriptions() if subs_list is not None and len(subs_list) > __SUBSCRIPTION_LIMIT: subs_list = subs_list[:__SUBSCRIPTION_LIMIT] warning_message = "The query included more subscriptions than allowed. "\ "Only the first {0} subscriptions were included for the results. "\ "To use more than {0} subscriptions, "\ "see the docs for examples: "\ "https://aka.ms/arg-error-toomanysubs".format(__SUBSCRIPTION_LIMIT) __logger.warning(warning_message) response = None try: result_truncated = False request_options = QueryRequestOptions( top=first, skip=skip, skip_token=skip_token, result_format=ResultFormat.object_array, allow_partial_scopes=allow_partial_scopes) request = QueryRequest(query=graph_query, subscriptions=subs_list, management_groups=mgs_list, options=request_options) response = client.resources(request) # type: QueryResponse if response.result_truncated == ResultTruncated.true: result_truncated = True if result_truncated and first is not None and len( response.data) < first: __logger.warning( "Unable to paginate the results of the query. " "Some resources may be missing from the results. " "To rewrite the query and enable paging, " "see the docs for an example: https://aka.ms/arg-results-truncated" ) except HttpResponseError as ex: if ex.model.error.code == 'BadRequest': raise BadRequestError( json.dumps(_to_dict(ex.model.error), indent=4)) from ex raise AzureInternalError(json.dumps(_to_dict(ex.model.error), indent=4)) from ex result_dict = dict() result_dict['data'] = response.data result_dict['count'] = response.count result_dict['total_records'] = response.total_records result_dict['skip_token'] = response.skip_token return result_dict