Пример #1
0
def _get_backup_request(workload_type, retain_until):
    if workload_type == WorkloadType.vm.value:
        trigger_backup_properties = IaasVMBackupRequest(
            recovery_point_expiry_time_in_utc=retain_until)
    trigger_backup_request = BackupRequestResource(
        properties=trigger_backup_properties)
    return trigger_backup_request
Пример #2
0
def backup_now(cmd, client, resource_group_name, vault_name, item, retain_until, backup_type,
               enable_compression=False):
    message = "For SAPHANA and SQL workload, retain-until parameter value will be overridden by the underlying policy"
    if retain_until is not None:
        logger.warning(message)
    container_uri = cust_help.get_protection_container_uri_from_id(item.id)
    item_uri = cust_help.get_protected_item_uri_from_id(item.id)

    backup_item_type = item_uri.split(';')[0]
    if not cust_help.is_sql(backup_item_type) and enable_compression:
        raise CLIError(
            """
            Enable compression is not applicable for SAPHanaDatabase item type.
            """)

    if cust_help.is_hana(backup_item_type) and backup_type in ['Log', 'CopyOnlyFull']:
        raise CLIError(
            """
            Backup type cannot be Log or CopyOnlyFull for SAPHanaDatabase item type.
            """)

    properties = AzureWorkloadBackupRequest(backup_type=backup_type, enable_compression=enable_compression,
                                            recovery_point_expiry_time_in_utc=retain_until)
    param = BackupRequestResource(properties=properties)

    # Trigger backup and wait for completion
    result = sdk_no_wait(True, client.trigger,
                         vault_name, resource_group_name, fabric_name, container_uri, item_uri,
                         param)
    return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name, resource_group_name)
Пример #3
0
def backup_now(cmd,
               client,
               resource_group_name,
               vault_name,
               item,
               retain_until,
               backup_type,
               enable_compression=False):
    if backup_type is None:
        raise RequiredArgumentMissingError(
            "Backup type missing. Please provide a valid backup type using "
            "--backup-type argument.")

    message = "For SAPHANA and SQL workload, retain-until parameter value will be overridden by the underlying policy"

    if (retain_until is not None and backup_type != 'CopyOnlyFull'):
        logger.warning(message)
        retain_until = datetime.now(timezone.utc) + timedelta(days=30)

    if retain_until is None:
        retain_until = datetime.now(timezone.utc) + timedelta(days=30)

    container_uri = cust_help.get_protection_container_uri_from_id(item.id)
    item_uri = cust_help.get_protected_item_uri_from_id(item.id)

    backup_item_type = item_uri.split(';')[0]
    if not cust_help.is_sql(backup_item_type) and enable_compression:
        raise CLIError("""
            Enable compression is not applicable for SAPHanaDatabase item type.
            """)

    if cust_help.is_hana(backup_item_type) and backup_type in [
            'Log', 'CopyOnlyFull', 'Incremental'
    ]:
        raise CLIError("""
            Backup type cannot be Log, CopyOnlyFull, Incremental for SAPHanaDatabase Adhoc backup.
            """)

    properties = AzureWorkloadBackupRequest(
        backup_type=backup_type,
        enable_compression=enable_compression,
        recovery_point_expiry_time_in_utc=retain_until)
    param = BackupRequestResource(properties=properties)

    # Trigger backup and wait for completion
    result = client.trigger(vault_name,
                            resource_group_name,
                            fabric_name,
                            container_uri,
                            item_uri,
                            param,
                            raw=True)
    return cust_help.track_backup_job(cmd.cli_ctx, result, vault_name,
                                      resource_group_name)
    def trigger_backup(self, container_name, protected_item_name):
        expiry_time = datetime.utcnow() + timedelta(days=2)

        backup_request = BackupRequestResource(properties=IaasVMBackupRequest(
            recovery_point_expiry_time_in_utc=expiry_time), )

        response = self.client.backups.trigger(self.vault_name,
                                               self.resource_group,
                                               self.fabric_name,
                                               container_name,
                                               protected_item_name,
                                               backup_request,
                                               raw=True)
        self._validate_operation_response(response)

        job_response = self._get_operation_response(
            container_name,
            protected_item_name,
            response,
            lambda operation_id: self.client.protected_item_operation_results.
            get(
                self.vault_name,
                self.resource_group,
                self.fabric_name,
                container_name,
                protected_item_name,
                operation_id,
                raw=True,
            ),
            lambda operation_id: self.client.protected_item_operation_statuses.
            get(
                self.vault_name,
                self.resource_group,
                self.fabric_name,
                container_name,
                protected_item_name,
                operation_id,
            ),
        )

        self.context.assertIsNotNone(job_response.job_id)
        return job_response.job_id
Пример #5
0
def _get_backup_request(retain_until):
    trigger_backup_properties = AzureFileShareBackupRequest(
        recovery_point_expiry_time_in_utc=retain_until)
    trigger_backup_request = BackupRequestResource(
        properties=trigger_backup_properties)
    return trigger_backup_request