Ejemplo n.º 1
0
def test_tls_connection(cassandra_service_tls, dcos_ca_bundle):
    """
    Tests writing, reading and deleting data over a secure TLS connection.
    """
    with sdk_jobs.InstallJobContext([
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)
    ]):

        sdk_jobs.run_job(
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))

        key_id = os.getenv('AWS_ACCESS_KEY_ID')
        if not key_id:
            assert False, 'AWS credentials are required for this test. ' \
                          'Disable test with e.g. TEST_TYPES="sanity and not aws"'
        plan_parameters = {
            'AWS_ACCESS_KEY_ID':
            key_id,
            'AWS_SECRET_ACCESS_KEY':
            os.getenv('AWS_SECRET_ACCESS_KEY'),
            'AWS_REGION':
            os.getenv('AWS_REGION', 'us-west-2'),
            'S3_BUCKET_NAME':
            os.getenv('AWS_BUCKET_NAME', 'infinity-framework-test'),
            'SNAPSHOT_NAME':
            str(uuid.uuid1()),
            'CASSANDRA_KEYSPACES':
            '"testspace1 testspace2"',
        }

        # Run backup plan, uploading snapshots and schema to the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME,
                            'backup-s3',
                            parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, 'backup-s3')

        sdk_jobs.run_job(
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))

        # Run restore plan, downloading snapshots and schema from the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME,
                            'restore-s3',
                            parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, 'restore-s3')

    with sdk_jobs.InstallJobContext([
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)
    ]):

        sdk_jobs.run_job(
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))
Ejemplo n.º 2
0
def _delete_hdfs_terasort_files():
    if HDFS_KERBEROS_ENABLED:
        job_dict = TERASORT_DELETE_JOB_KERBEROS
    else:
        job_dict = TERASORT_DELETE_JOB
    LOGGER.info("Deleting hdfs terasort files by running job {}".format(job_dict['id']))
    sdk_jobs.install_job(job_dict)
    sdk_jobs.run_job(job_dict, timeout_seconds=300)
    sdk_jobs.remove_job(job_dict)
Ejemplo n.º 3
0
def test_tls_connection(
    cassandra_service: Dict[str, Any],
    dcos_ca_bundle: str,
) -> None:
    """
    Tests writing, reading and deleting data over a secure TLS connection.
    """
    with sdk_jobs.InstallJobContext(
        [
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle),
        ]
    ):

        sdk_jobs.run_job(config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))

        key_id = os.getenv("AWS_ACCESS_KEY_ID")
        if not key_id:
            assert (
                False
            ), "AWS credentials are required for this test. " 'Disable test with e.g. TEST_TYPES="sanity and not aws"'
        plan_parameters = {
            "AWS_ACCESS_KEY_ID": key_id,
            "AWS_SECRET_ACCESS_KEY": os.getenv("AWS_SECRET_ACCESS_KEY"),
            "AWS_REGION": os.getenv("AWS_REGION", "us-west-2"),
            "S3_BUCKET_NAME": os.getenv("AWS_BUCKET_NAME", "infinity-framework-test"),
            "SNAPSHOT_NAME": str(uuid.uuid1()),
            "CASSANDRA_KEYSPACES": '"testspace1 testspace2"',
        }

        # Run backup plan, uploading snapshots and schema to the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME, "backup-s3", parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, "backup-s3")

        sdk_jobs.run_job(config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))

        # Run restore plan, downloading snapshots and schema from the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME, "restore-s3", parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, "restore-s3")

    with sdk_jobs.InstallJobContext(
        [
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle),
        ]
    ):

        sdk_jobs.run_job(config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))
Ejemplo n.º 4
0
def test_enabling_then_disabling_tls(
    cassandra_service: Dict[str, Any],
    dcos_ca_bundle: str,
) -> None:
    # Write data.
    write_data_job = config.get_write_data_job()
    with sdk_jobs.InstallJobContext([write_data_job]):
        sdk_jobs.run_job(write_data_job)

    # Turn TLS on and off again.
    update_service_transport_encryption(cassandra_service, enabled=True, allow_plaintext=True)
    update_service_transport_encryption(cassandra_service, enabled=True, allow_plaintext=False)
    update_service_transport_encryption(cassandra_service, enabled=False, allow_plaintext=False)

    # Make sure data is still there.
    verify_data_job = config.get_verify_data_job()
    with sdk_jobs.InstallJobContext([verify_data_job]):
        sdk_jobs.run_job(verify_data_job)
Ejemplo n.º 5
0
def verify_client_can_write_read_udf(job_node_address: str = DEFAULT_NODE_ADDRESS,) -> None:

    write_udf_job = get_write_udf_job(node_address=job_node_address)
    verify_udf_data_job = get_verify_udf_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(node_address=job_node_address)

    sdk_jobs.run_job(write_udf_job)
    sdk_jobs.run_job(verify_udf_data_job)
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 6
0
def verify_client_can_write_read_udf(
    job_node_address: str = DEFAULT_NODE_ADDRESS,
) -> None:

    write_udf_job = get_write_udf_job(node_address=job_node_address)
    verify_udf_data_job = get_verify_udf_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(node_address=job_node_address)

    sdk_jobs.run_job(write_udf_job)
    sdk_jobs.run_job(verify_udf_data_job)
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 7
0
def test_tls_connection(cassandra_service_tls, dcos_ca_bundle):
    """
    Tests writing, reading and deleting data over a secure TLS connection.
    """
    with sdk_jobs.InstallJobContext([
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)]):

        sdk_jobs.run_job(config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))

        key_id = os.getenv('AWS_ACCESS_KEY_ID')
        if not key_id:
            assert False, 'AWS credentials are required for this test. ' \
                          'Disable test with e.g. TEST_TYPES="sanity and not aws"'
        plan_parameters = {
            'AWS_ACCESS_KEY_ID': key_id,
            'AWS_SECRET_ACCESS_KEY': os.getenv('AWS_SECRET_ACCESS_KEY'),
            'AWS_REGION': os.getenv('AWS_REGION', 'us-west-2'),
            'S3_BUCKET_NAME': os.getenv('AWS_BUCKET_NAME', 'infinity-framework-test'),
            'SNAPSHOT_NAME': str(uuid.uuid1()),
            'CASSANDRA_KEYSPACES': '"testspace1 testspace2"',
        }

        # Run backup plan, uploading snapshots and schema to the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME, 'backup-s3', parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, 'backup-s3')

        sdk_jobs.run_job(config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))

        # Run restore plan, downloading snapshots and schema from the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME, 'restore-s3', parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, 'restore-s3')

    with sdk_jobs.InstallJobContext([
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)]):

        sdk_jobs.run_job(config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))
Ejemplo n.º 8
0
def test_enabling_then_disabling_tls(cassandra_service, dcos_ca_bundle):
    # Write data.
    write_data_job = config.get_write_data_job()
    with sdk_jobs.InstallJobContext([write_data_job]):
        sdk_jobs.run_job(write_data_job)

    # Turn TLS on and off again.
    update_service_transport_encryption(cassandra_service,
                                        enabled=True,
                                        allow_plaintext=True)
    update_service_transport_encryption(cassandra_service,
                                        enabled=True,
                                        allow_plaintext=False)
    update_service_transport_encryption(cassandra_service,
                                        enabled=False,
                                        allow_plaintext=False)

    # Make sure data is still there.
    verify_data_job = config.get_verify_data_job()
    with sdk_jobs.InstallJobContext([verify_data_job]):
        sdk_jobs.run_job(verify_data_job)
Ejemplo n.º 9
0
def verify_client_can_write_read_and_delete_with_auth(
    job_node_address: str = DEFAULT_NODE_ADDRESS,
) -> None:
    write_data_job = get_write_data_job(node_address=job_node_address, auth=True)
    verify_data_job = get_verify_data_job(node_address=job_node_address, auth=True)
    delete_data_job = get_delete_data_job(node_address=job_node_address, auth=True)
    verify_deletion_job = get_verify_deletion_job(node_address=job_node_address, auth=True)

    # Ensure the keyspaces we will use aren't present. In practice this should run once and fail
    # because the data isn't present. When the job is flagged as failed (due to restart=NEVER),
    # the run_job() call will throw.
    try:
        sdk_jobs.run_job(delete_data_job)
    except Exception:
        log.info("Error during delete (normal if no stale data)")
        log.info(traceback.format_exc())

    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 10
0
def verify_client_can_write_read_and_delete(
    dcos_ca_bundle: Optional[str] = None,
) -> None:
    write_data_job = config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle)
    verify_data_job = config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle)
    delete_data_job = config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)
    verify_deletion_job = config.get_verify_deletion_job(dcos_ca_bundle=dcos_ca_bundle)

    with sdk_jobs.InstallJobContext(
        [write_data_job, verify_data_job, delete_data_job, verify_deletion_job]
    ):
        sdk_jobs.run_job(write_data_job)
        sdk_jobs.run_job(verify_data_job)
        sdk_jobs.run_job(delete_data_job)
        sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 11
0
def verify_client_can_write_read_and_delete(dcos_ca_bundle=None):
    write_data_job = config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle)
    verify_data_job = config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle)
    delete_data_job = config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle)
    verify_deletion_job = config.get_verify_deletion_job(
        dcos_ca_bundle=dcos_ca_bundle)

    with sdk_jobs.InstallJobContext([
            write_data_job, verify_data_job, delete_data_job,
            verify_deletion_job
    ]):
        sdk_jobs.run_job(write_data_job)
        sdk_jobs.run_job(verify_data_job)
        sdk_jobs.run_job(delete_data_job)
        sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 12
0
def verify_client_can_write_read_and_delete_with_auth(
    job_node_address: str = DEFAULT_NODE_ADDRESS, ) -> None:
    write_data_job = get_write_data_job(node_address=job_node_address,
                                        auth=True)
    verify_data_job = get_verify_data_job(node_address=job_node_address,
                                          auth=True)
    delete_data_job = get_delete_data_job(node_address=job_node_address,
                                          auth=True)
    verify_deletion_job = get_verify_deletion_job(
        node_address=job_node_address, auth=True)

    # Ensure the keyspaces we will use aren't present. In practice this should run once and fail
    # because the data isn't present. When the job is flagged as failed (due to restart=NEVER),
    # the run_job() call will throw.
    try:
        sdk_jobs.run_job(delete_data_job)
    except Exception:
        log.info("Error during delete (normal if no stale data)")
        log.info(traceback.format_exc())

    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 13
0
def run_backup_and_restore(
    service_name: str,
    backup_plan: str,
    restore_plan: str,
    plan_parameters: Dict[str, Optional[str]],
    job_node_address: str = DEFAULT_NODE_ADDRESS,
) -> None:
    write_data_job = get_write_data_job(node_address=job_node_address)
    verify_data_job = get_verify_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(
        node_address=job_node_address)

    # Ensure the keyspaces we will use aren't present. In practice this should run once and fail
    # because the data isn't present. When the job is flagged as failed (due to restart=NEVER),
    # the run_job() call will throw.
    try:
        sdk_jobs.run_job(delete_data_job)
    except Exception:
        log.info("Error during delete (normal if no stale data)")
        log.info(traceback.format_exc())

    # Write data to Cassandra with a metronome job, then verify it was written
    # Note: Write job will fail if data already exists
    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)

    # Run backup plan, uploading snapshots and schema to the cloudddd
    sdk_plan.start_plan(service_name, backup_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, backup_plan)

    # Delete all keyspaces and tables with a metronome job
    sdk_jobs.run_job(delete_data_job)

    # Verify that the keyspaces and tables were deleted
    sdk_jobs.run_job(verify_deletion_job)

    # Run restore plan, retrieving snapshots and schema from S3
    sdk_plan.start_plan(service_name, restore_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, restore_plan)

    # Verify that the data we wrote and then deleted has been restored
    sdk_jobs.run_job(verify_data_job)

    # Delete data in preparation for any other backup tests
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 14
0
def run_backup_and_restore(service_name,
                           backup_plan,
                           restore_plan,
                           plan_parameters,
                           job_node_address=DEFAULT_NODE_ADDRESS):
    write_data_job = get_write_data_job(node_address=job_node_address)
    verify_data_job = get_verify_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(
        node_address=job_node_address)

    # Write data to Cassandra with a metronome job, then verify it was written
    # Note: Write job will fail if data already exists
    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)

    # Run backup plan, uploading snapshots and schema to the cloudddd
    sdk_plan.start_plan(service_name, backup_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, backup_plan)

    # Delete all keyspaces and tables with a metronome job
    sdk_jobs.run_job(delete_data_job)

    # Verify that the keyspaces and tables were deleted
    sdk_jobs.run_job(verify_deletion_job)

    # Run restore plan, retrieving snapshots and schema from S3
    sdk_plan.start_plan(service_name, restore_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, restore_plan)

    # Verify that the data we wrote and then deleted has been restored
    sdk_jobs.run_job(verify_data_job)

    # Delete data in preparation for any other backup tests
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 15
0
def test_tls_connection(
    cassandra_service: Dict[str, Any],
    dcos_ca_bundle: str,
) -> None:
    """
    Tests writing, reading and deleting data over a secure TLS connection.
    """
    with sdk_jobs.InstallJobContext([
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle),
    ]):

        sdk_jobs.run_job(
            config.get_write_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))

        key_id = os.getenv("AWS_ACCESS_KEY_ID")
        if not key_id:
            assert (
                False
            ), "AWS credentials are required for this test. " 'Disable test with e.g. TEST_TYPES="sanity and not aws"'
        plan_parameters = {
            "AWS_ACCESS_KEY_ID":
            key_id,
            "AWS_SECRET_ACCESS_KEY":
            os.getenv("AWS_SECRET_ACCESS_KEY"),
            "AWS_REGION":
            os.getenv("AWS_REGION", "us-west-2"),
            "S3_BUCKET_NAME":
            os.getenv("AWS_BUCKET_NAME", "infinity-framework-test"),
            "SNAPSHOT_NAME":
            str(uuid.uuid1()),
            "CASSANDRA_KEYSPACES":
            '"testspace1 testspace2"',
        }

        # Run backup plan, uploading snapshots and schema to the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME,
                            "backup-s3",
                            parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, "backup-s3")

        sdk_jobs.run_job(
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))

        # Run restore plan, downloading snapshots and schema from the cloudddd
        sdk_plan.start_plan(config.SERVICE_NAME,
                            "restore-s3",
                            parameters=plan_parameters)
        sdk_plan.wait_for_completed_plan(config.SERVICE_NAME, "restore-s3")

    with sdk_jobs.InstallJobContext([
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle),
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle),
    ]):

        sdk_jobs.run_job(
            config.get_verify_data_job(dcos_ca_bundle=dcos_ca_bundle))
        sdk_jobs.run_job(
            config.get_delete_data_job(dcos_ca_bundle=dcos_ca_bundle))
Ejemplo n.º 16
0
def run_backup_and_restore(
        service_name,
        backup_plan,
        restore_plan,
        plan_parameters,
        job_node_address=DEFAULT_NODE_ADDRESS):
    write_data_job = get_write_data_job(node_address=job_node_address)
    verify_data_job = get_verify_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(node_address=job_node_address)

    # Write data to Cassandra with a metronome job, then verify it was written
    # Note: Write job will fail if data already exists
    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)

    # Run backup plan, uploading snapshots and schema to the cloudddd
    sdk_plan.start_plan(service_name, backup_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, backup_plan)

    # Delete all keyspaces and tables with a metronome job
    sdk_jobs.run_job(delete_data_job)

    # Verify that the keyspaces and tables were deleted
    sdk_jobs.run_job(verify_deletion_job)

    # Run restore plan, retrieving snapshots and schema from S3
    sdk_plan.start_plan(service_name, restore_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, restore_plan)

    # Verify that the data we wrote and then deleted has been restored
    sdk_jobs.run_job(verify_data_job)

    # Delete data in preparation for any other backup tests
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)
Ejemplo n.º 17
0
def run_backup_and_restore(
    service_name: str,
    backup_plan: str,
    restore_plan: str,
    plan_parameters: Dict[str, Optional[str]],
    job_node_address: str = DEFAULT_NODE_ADDRESS,
) -> None:
    write_data_job = get_write_data_job(node_address=job_node_address)
    verify_data_job = get_verify_data_job(node_address=job_node_address)
    delete_data_job = get_delete_data_job(node_address=job_node_address)
    verify_deletion_job = get_verify_deletion_job(node_address=job_node_address)

    # Ensure the keyspaces we will use aren't present. In practice this should run once and fail
    # because the data isn't present. When the job is flagged as failed (due to restart=NEVER),
    # the run_job() call will throw.
    try:
        sdk_jobs.run_job(delete_data_job)
    except Exception:
        log.info("Error during delete (normal if no stale data)")
        log.info(traceback.format_exc())

    # Write data to Cassandra with a metronome job, then verify it was written
    # Note: Write job will fail if data already exists
    sdk_jobs.run_job(write_data_job)
    sdk_jobs.run_job(verify_data_job)

    # Run backup plan, uploading snapshots and schema to the cloudddd
    sdk_plan.start_plan(service_name, backup_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, backup_plan)

    # Delete all keyspaces and tables with a metronome job
    sdk_jobs.run_job(delete_data_job)

    # Verify that the keyspaces and tables were deleted
    sdk_jobs.run_job(verify_deletion_job)

    # Run restore plan, retrieving snapshots and schema from S3
    sdk_plan.start_plan(service_name, restore_plan, parameters=plan_parameters)
    sdk_plan.wait_for_completed_plan(service_name, restore_plan)

    # Verify that the data we wrote and then deleted has been restored
    sdk_jobs.run_job(verify_data_job)

    # Delete data in preparation for any other backup tests
    sdk_jobs.run_job(delete_data_job)
    sdk_jobs.run_job(verify_deletion_job)