def disable_protection(cmd, client, resource_group_name, vault_name, item, delete_backup_data): container_uri = cust_help.get_protection_container_uri_from_id(item.id) item_uri = cust_help.get_protected_item_uri_from_id(item.id) backup_item_type = item_uri.split(';')[0] if not cust_help.is_sql(backup_item_type) and not cust_help.is_hana(backup_item_type): raise CLIError( """ Item must be either of type SQLDataBase or SAPHanaDatabase. """) if delete_backup_data: result = client.delete(vault_name, resource_group_name, fabric_name, container_uri, item_uri, cls=cust_help.get_pipeline_response) return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name) properties = _get_protected_item_instance(backup_item_type) properties.protection_state = 'ProtectionStopped' properties.policy_id = '' param = ProtectedItemResource(properties=properties) # Trigger disable protection and wait for completion result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, param, cls=cust_help.get_pipeline_response) return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def update_policy_for_item(cmd, client, resource_group_name, vault_name, item, policy, tenant_id=None, is_critical_operation=False): if item.properties.backup_management_type != policy.properties.backup_management_type: raise CLIError( """ The policy type should match with the workload being protected. Use the relevant get-default policy command and use it to update the policy for the workload. """) container_uri = cust_help.get_protection_container_uri_from_id(item.id) item_uri = cust_help.get_protected_item_uri_from_id(item.id) backup_item_type = item_uri.split(';')[0] if not cust_help.is_sql(backup_item_type) and not cust_help.is_hana(backup_item_type): raise InvalidArgumentValueError("Item must be either of type SQLDataBase or SAPHanaDatabase.") item_properties = _get_protected_item_instance(backup_item_type) item_properties.policy_id = policy.id param = ProtectedItemResource(properties=item_properties) if is_critical_operation: existing_policy_name = item.properties.policy_id.split('/')[-1] existing_policy = common.show_policy(protection_policies_cf(cmd.cli_ctx), resource_group_name, vault_name, existing_policy_name) if cust_help.is_retention_duration_decreased(existing_policy, policy, "AzureWorkload"): # update the payload with critical operation and add auxiliary header for cross tenant case if tenant_id is not None: client = get_mgmt_service_client(cmd.cli_ctx, RecoveryServicesBackupClient, aux_tenants=[tenant_id]).protected_items param.properties.resource_guard_operation_requests = [cust_help.get_resource_guard_operation_request( cmd.cli_ctx, resource_group_name, vault_name, "updateProtection")] # Update policy result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, param, cls=cust_help.get_pipeline_response) return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def update_policy_for_item(cmd, client, resource_group_name, vault_name, item, policy): if item.properties.backup_management_type != policy.properties.backup_management_type: raise CLIError(""" The policy type should match with the workload being protected. Use the relevant get-default policy command and use it to update the policy for the workload. """) # Get container and item URIs container_uri = helper.get_protection_container_uri_from_id(item.id) item_uri = helper.get_protected_item_uri_from_id(item.id) # Update policy request afs_item_properties = AzureFileshareProtectedItem() afs_item_properties.policy_id = policy.id afs_item_properties.source_resource_id = item.properties.source_resource_id afs_item = ProtectedItemResource(properties=afs_item_properties) # Update policy result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, afs_item, cls=helper.get_pipeline_response) return helper.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def update_policy_for_item(cmd, client, resource_group_name, vault_name, item, policy, tenant_id=None, is_critical_operation=False): if item.properties.backup_management_type != policy.properties.backup_management_type: raise CLIError(""" The policy type should match with the workload being protected. Use the relevant get-default policy command and use it to update the policy for the workload. """) # Get container and item URIs container_uri = helper.get_protection_container_uri_from_id(item.id) item_uri = helper.get_protected_item_uri_from_id(item.id) # Update policy request afs_item_properties = AzureFileshareProtectedItem() afs_item_properties.policy_id = policy.id afs_item_properties.source_resource_id = item.properties.source_resource_id afs_item = ProtectedItemResource(properties=afs_item_properties) if is_critical_operation: existing_policy_name = item.properties.policy_id.split('/')[-1] existing_policy = common.show_policy( protection_policies_cf(cmd.cli_ctx), resource_group_name, vault_name, existing_policy_name) if helper.is_retention_duration_decreased(existing_policy, policy, "AzureStorage"): # update the payload with critical operation and add auxiliary header for cross tenant case if tenant_id is not None: client = get_mgmt_service_client( cmd.cli_ctx, RecoveryServicesBackupClient, aux_tenants=[tenant_id]).protected_items afs_item.properties.resource_guard_operation_requests = [ helper.get_resource_guard_operation_request( cmd.cli_ctx, resource_group_name, vault_name, "updateProtection") ] # Update policy result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, afs_item, cls=helper.get_pipeline_response) return helper.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def disable_protection(cmd, client, resource_group_name, vault_name, item): # Get container and item URIs container_uri = helper.get_protection_container_uri_from_id(item.id) item_uri = helper.get_protected_item_uri_from_id(item.id) afs_item_properties = AzureFileshareProtectedItem() afs_item_properties.policy_id = '' afs_item_properties.protection_state = ProtectionState.protection_stopped afs_item_properties.source_resource_id = item.properties.source_resource_id afs_item = ProtectedItemResource(properties=afs_item_properties) result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, afs_item, cls=helper.get_pipeline_response) return helper.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def update_policy_for_item(cmd, client, resource_group_name, vault_name, item, policy): if item.properties.backup_management_type != policy.properties.backup_management_type: raise CLIError( """ The policy type should match with the workload being protected. Use the relevant get-default policy command and use it to update the policy for the workload. """) container_uri = cust_help.get_protection_container_uri_from_id(item.id) item_uri = cust_help.get_protected_item_uri_from_id(item.id) backup_item_type = item_uri.split(';')[0] if not cust_help.is_sql(backup_item_type) and not cust_help.is_hana(backup_item_type): raise InvalidArgumentValueError("Item must be either of type SQLDataBase or SAPHanaDatabase.") item_properties = _get_protected_item_instance(backup_item_type) item_properties.policy_id = policy.id param = ProtectedItemResource(properties=item_properties) # Update policy result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, param, cls=cust_help.get_pipeline_response) return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
def enable_for_AzureFileShare(cmd, client, resource_group_name, vault_name, afs_name, storage_account_name, policy_name): # get registered storage accounts storage_account = None containers_client = backup_protection_containers_cf(cmd.cli_ctx) registered_containers = common.list_containers(containers_client, resource_group_name, vault_name, "AzureStorage") storage_account = _get_storage_account_from_list(registered_containers, storage_account_name) # get unregistered storage accounts if storage_account is None: unregistered_containers = list_protectable_containers( cmd.cli_ctx, resource_group_name, vault_name) storage_account = _get_storage_account_from_list( unregistered_containers, storage_account_name) if storage_account is None: # refresh containers in the vault protection_containers_client = protection_containers_cf( cmd.cli_ctx) filter_string = helper.get_filter_string( {'backupManagementType': "AzureStorage"}) refresh_result = protection_containers_client.refresh( vault_name, resource_group_name, fabric_name, filter=filter_string, cls=helper.get_pipeline_response) helper.track_refresh_operation(cmd.cli_ctx, refresh_result, vault_name, resource_group_name) # refetch the protectable containers after refresh unregistered_containers = list_protectable_containers( cmd.cli_ctx, resource_group_name, vault_name) storage_account = _get_storage_account_from_list( unregistered_containers, storage_account_name) if storage_account is None: raise CLIError("Storage account not found or not supported.") # register storage account protection_containers_client = protection_containers_cf(cmd.cli_ctx) properties = AzureStorageContainer( backup_management_type="AzureStorage", source_resource_id=storage_account.properties.container_id, workload_type="AzureFileShare") param = ProtectionContainerResource(properties=properties) result = protection_containers_client.register( vault_name, resource_group_name, fabric_name, storage_account.name, param, cls=helper.get_pipeline_response) helper.track_register_operation(cmd.cli_ctx, result, vault_name, resource_group_name, storage_account.name) protectable_item = _get_protectable_item_for_afs(cmd.cli_ctx, vault_name, resource_group_name, afs_name, storage_account) if protectable_item is None: items_client = backup_protected_items_cf(cmd.cli_ctx) item = common.show_item(cmd, items_client, resource_group_name, vault_name, storage_account_name, afs_name, "AzureStorage") if item is None: raise CLIError("Could not find a fileshare with name " + afs_name + " to protect or a protected fileshare of name " + afs_name) return item policy = common.show_policy(protection_policies_cf(cmd.cli_ctx), resource_group_name, vault_name, policy_name) helper.validate_policy(policy) helper.validate_azurefileshare_item(protectable_item) container_uri = helper.get_protection_container_uri_from_id( protectable_item.id) item_uri = helper.get_protectable_item_uri_from_id(protectable_item.id) item_properties = AzureFileshareProtectedItem() item_properties.policy_id = policy.id item_properties.source_resource_id = protectable_item.properties.parent_container_fabric_id item = ProtectedItemResource(properties=item_properties) result = client.create_or_update(vault_name, resource_group_name, fabric_name, container_uri, item_uri, item, cls=helper.get_pipeline_response) return helper.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)