Beispiel #1
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'eu-central-1')
    sigv4_check_apply()

    bucket_name = bucket_name_mangle('sigv4')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))
    cinfo = calling_format.from_store_name(bucket_name)
    conn = cinfo.connect(creds)

    try:
        conn.create_bucket(bucket_name, location='eu-central-1')
    except boto.exception.S3CreateError:
        pass

    source = unicode(tmpdir.join('source'))
    contents = 'abcdefghijklmnopqrstuvwxyz\n' * 100
    with open(source, 'wb') as f:
        f.write(contents)

    data_url = 's3://{0}/data'.format(bucket_name)

    with open(source) as f:
        uri_put_file(creds, data_url, f)

    results = uri_get_file(creds, data_url)
    assert contents == results
Beispiel #2
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'eu-central-1')
    sigv4_check_apply()

    bucket_name = bucket_name_mangle('sigv4')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))
    cinfo = calling_format.from_store_name(bucket_name)
    conn = cinfo.connect(creds)

    try:
        conn.create_bucket(bucket_name, location='eu-central-1')
    except boto.exception.S3CreateError:
        pass

    source = unicode(tmpdir.join('source'))
    contents = 'abcdefghijklmnopqrstuvwxyz\n' * 100
    with open(source, 'wb') as f:
        f.write(contents)

    data_url = 's3://{0}/data'.format(bucket_name)

    with open(source) as f:
        uri_put_file(creds, data_url, f)

    results = uri_get_file(creds, data_url)
    assert contents == results
Beispiel #3
0
    def validate_bucket():
        """Validate the eu-central-1 bucket's existence

        This is done using the subdomain that points to eu-central-1.

        """

        sigv4_check_apply()
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        conn.get_bucket(bucket_name, validate=True)
Beispiel #4
0
    def validate_bucket():
        """Validate the eu-central-1 bucket's existence

        This is done using the subdomain that points to eu-central-1.

        """

        sigv4_check_apply()
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        conn.get_bucket(bucket_name, validate=True)
Beispiel #5
0
def configure_backup_cxt(args):
    # Try to find some WAL-E prefix to store data in.
    prefix = (args.file_prefix or args.gs_prefix or args.s3_prefix
              or args.wabs_prefix or os.getenv('WALE_FILE_PREFIX')
              or os.getenv('WALE_GS_PREFIX') or os.getenv('WALE_S3_PREFIX')
              or os.getenv('WALE_SWIFT_PREFIX')
              or os.getenv('WALE_WABS_PREFIX'))

    if prefix is None:
        raise UserException(msg='no storage prefix defined',
                            hint=('Either set one of the'
                                  ' --file-prefix,'
                                  ' --gs-prefix,'
                                  ' --s3-prefix or'
                                  ' --wabs-prefix options'
                                  ' or define one of the'
                                  ' WALE_FILE_PREFIX,'
                                  ' WALE_GS_PREFIX,'
                                  ' WALE_S3_PREFIX,'
                                  ' WALE_SWIFT_PREFIX or'
                                  ' WALE_WABS_PREFIX,'
                                  ' environment variables.'))

    store = storage.StorageLayout(prefix)

    # GPG can be optionally layered atop of every backend, so a common
    # code path suffices.
    gpg_key_id = args.gpg_key_id or os.getenv('WALE_GPG_KEY_ID')
    if gpg_key_id is not None:
        external_program_check([GPG_BIN])

    # Enumeration of reading in configuration for all supported
    # backend data stores, yielding value adhering to the
    # 'operator.Backup' protocol.
    if store.is_s3:
        use_instance_profile = args.aws_instance_profile or \
            parse_boolean_envvar(os.getenv('AWS_INSTANCE_PROFILE'))
        if use_instance_profile:
            creds = s3_instance_profile()
        else:
            creds = s3_explicit_creds(args)
        from wal_e.blobstore import s3
        s3.sigv4_check_apply()

        from wal_e.operator import s3_operator

        return s3_operator.S3Backup(store, creds, gpg_key_id)
    elif store.is_wabs:
        account_name = args.wabs_account_name or os.getenv('WABS_ACCOUNT_NAME')
        if account_name is None:
            raise UserException(msg='WABS account name is undefined',
                                hint=_config_hint_generate(
                                    'wabs-account-name', True))

        access_key = os.getenv('WABS_ACCESS_KEY')
        access_token = os.getenv('WABS_SAS_TOKEN')
        if not (access_key or access_token):
            raise UserException(
                msg='WABS access credentials is required but not provided',
                hint=('Define one of the WABS_ACCESS_KEY or '
                      'WABS_SAS_TOKEN environment variables.'))

        from wal_e.blobstore import wabs
        from wal_e.operator.wabs_operator import WABSBackup

        creds = wabs.Credentials(account_name, access_key, access_token)

        return WABSBackup(store, creds, gpg_key_id)
    elif store.is_swift:
        from wal_e.blobstore import swift
        from wal_e.operator.swift_operator import SwiftBackup

        creds = swift.Credentials(
            os.getenv('SWIFT_AUTHURL'),
            os.getenv('SWIFT_USER'),
            os.getenv('SWIFT_PASSWORD'),
            os.getenv('SWIFT_TENANT'),
            os.getenv('SWIFT_REGION'),
            os.getenv('SWIFT_ENDPOINT_TYPE', 'publicURL'),
            os.getenv('SWIFT_AUTH_VERSION', '2'),
            os.getenv('SWIFT_DOMAIN_ID'),
            os.getenv('SWIFT_DOMAIN_NAME'),
            os.getenv('SWIFT_TENANT_ID'),
            os.getenv('SWIFT_USER_ID'),
            os.getenv('SWIFT_USER_DOMAIN_ID'),
            os.getenv('SWIFT_USER_DOMAIN_NAME'),
            os.getenv('SWIFT_PROJECT_ID'),
            os.getenv('SWIFT_PROJECT_NAME'),
            os.getenv('SWIFT_PROJECT_DOMAIN_ID'),
            os.getenv('SWIFT_PROJECT_DOMAIN_NAME'),
        )
        return SwiftBackup(store, creds, gpg_key_id)
    elif store.is_gs:
        from wal_e.operator.gs_operator import GSBackup
        return GSBackup(store, gpg_key_id)
    elif store.is_file:
        from wal_e.blobstore import file
        from wal_e.operator.file_operator import FileBackup

        creds = file.Credentials()
        return FileBackup(store, creds, gpg_key_id)
    else:
        raise UserCritical(msg='no unsupported blob stores should get here',
                           hint='Report a bug.')
Beispiel #6
0
def configure_backup_cxt(args):
    # Try to find some WAL-E prefix to store data in.
    prefix = (args.s3_prefix or args.wabs_prefix or args.gs_prefix
              or os.getenv('WALE_S3_PREFIX') or os.getenv('WALE_WABS_PREFIX')
              or os.getenv('WALE_GS_PREFIX') or os.getenv('WALE_SWIFT_PREFIX'))

    if prefix is None:
        raise UserException(
            msg='no storage prefix defined',
            hint=(
                'Either set one of the --wabs-prefix, --s3-prefix or '
                '--gs-prefix options or define one of the WALE_WABS_PREFIX, '
                'WALE_S3_PREFIX, WALE_SWIFT_PREFIX or WALE_GS_PREFIX '
                'environment variables.'
            )
        )

    store = storage.StorageLayout(prefix)

    # GPG can be optionally layered atop of every backend, so a common
    # code path suffices.
    gpg_key_id = args.gpg_key_id or os.getenv('WALE_GPG_KEY_ID')
    if gpg_key_id is not None:
        external_program_check([GPG_BIN])

    # Enumeration of reading in configuration for all supported
    # backend data stores, yielding value adhering to the
    # 'operator.Backup' protocol.
    if store.is_s3:
        use_instance_profile = args.aws_instance_profile or \
            parse_boolean_envvar(os.getenv('AWS_INSTANCE_PROFILE'))
        if use_instance_profile:
            creds = s3_instance_profile()
        else:
            creds = s3_explicit_creds(args)
        from wal_e.blobstore import s3
        s3.sigv4_check_apply()

        from wal_e.operator import s3_operator

        return s3_operator.S3Backup(store, creds, gpg_key_id)
    elif store.is_wabs:
        account_name = args.wabs_account_name or os.getenv('WABS_ACCOUNT_NAME')
        if account_name is None:
            raise UserException(
                msg='WABS account name is undefined',
                hint=_config_hint_generate('wabs-account-name', True))

        access_key = os.getenv('WABS_ACCESS_KEY')
        access_token = os.getenv('WABS_SAS_TOKEN')
        if not (access_key or access_token):
            raise UserException(
                msg='WABS access credentials is required but not provided',
                hint=(
                    'Define one of the WABS_ACCESS_KEY or '
                    'WABS_SAS_TOKEN environment variables.'
                ))

        from wal_e.blobstore import wabs
        from wal_e.operator.wabs_operator import WABSBackup

        creds = wabs.Credentials(account_name, access_key, access_token)

        return WABSBackup(store, creds, gpg_key_id)
    elif store.is_swift:
        from wal_e.blobstore import swift
        from wal_e.operator.swift_operator import SwiftBackup

        creds = swift.Credentials(
            os.getenv('SWIFT_AUTHURL'),
            os.getenv('SWIFT_USER'),
            os.getenv('SWIFT_PASSWORD'),
            os.getenv('SWIFT_TENANT'),
            os.getenv('SWIFT_REGION'),
            os.getenv('SWIFT_ENDPOINT_TYPE', 'publicURL'),
            os.getenv('SWIFT_AUTH_VERSION', '2'),
            os.getenv('SWIFT_DOMAIN_ID'),
            os.getenv('SWIFT_DOMAIN_NAME'),
            os.getenv('SWIFT_TENANT_ID'),
            os.getenv('SWIFT_USER_ID'),
            os.getenv('SWIFT_USER_DOMAIN_ID'),
            os.getenv('SWIFT_USER_DOMAIN_NAME'),
            os.getenv('SWIFT_PROJECT_ID'),
            os.getenv('SWIFT_PROJECT_NAME'),
            os.getenv('SWIFT_PROJECT_DOMAIN_ID'),
            os.getenv('SWIFT_PROJECT_DOMAIN_NAME'),
        )
        return SwiftBackup(store, creds, gpg_key_id)
    elif store.is_gs:
        from wal_e.operator.gs_operator import GSBackup
        return GSBackup(store, gpg_key_id)
    else:
        raise UserCritical(
            msg='no unsupported blob stores should get here',
            hint='Report a bug.')