示例#1
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'eu-central-1')
    sigv4_check_apply()

    bucket_name = bucket_name_mangle('sigv4')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))
    cinfo = calling_format.from_store_name(bucket_name)
    conn = cinfo.connect(creds)

    try:
        conn.create_bucket(bucket_name, location='eu-central-1')
    except boto.exception.S3CreateError:
        pass

    source = unicode(tmpdir.join('source'))
    contents = 'abcdefghijklmnopqrstuvwxyz\n' * 100
    with open(source, 'wb') as f:
        f.write(contents)

    data_url = 's3://{0}/data'.format(bucket_name)

    with open(source) as f:
        uri_put_file(creds, data_url, f)

    results = uri_get_file(creds, data_url)
    assert contents == results
示例#2
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'eu-central-1')
    sigv4_check_apply()

    bucket_name = bucket_name_mangle('sigv4')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))
    cinfo = calling_format.from_store_name(bucket_name)
    conn = cinfo.connect(creds)

    try:
        conn.create_bucket(bucket_name, location='eu-central-1')
    except boto.exception.S3CreateError:
        pass

    source = unicode(tmpdir.join('source'))
    contents = 'abcdefghijklmnopqrstuvwxyz\n' * 100
    with open(source, 'wb') as f:
        f.write(contents)

    data_url = 's3://{0}/data'.format(bucket_name)

    with open(source) as f:
        uri_put_file(creds, data_url, f)

    results = uri_get_file(creds, data_url)
    assert contents == results
示例#3
0
def test_get_bucket_vs_certs():
    """Integration test for bucket naming issues."""
    import boto.s3.connection

    # Add dots to try to trip up TLS certificate validation.
    bucket_name = bucket_name_mangle('wal-e.test.dots', delimiter='.')

    with pytest.raises(boto.https_connection.InvalidCertificateException):
        with FreshBucket(bucket_name, calling_format=SubdomainCallingFormat()):
            pass
示例#4
0
def test_404_termination(tmpdir):
    bucket_name = bucket_name_mangle("wal-e-test-404-termination")
    creds = Credentials(os.getenv("AWS_ACCESS_KEY_ID"), os.getenv("AWS_SECRET_ACCESS_KEY"))

    with FreshBucket(bucket_name, host="s3.amazonaws.com", calling_format=OrdinaryCallingFormat()) as fb:
        fb.create()

        target = str(tmpdir.join("target"))
        ret = do_lzop_get(creds, "s3://" + bucket_name + "/not-exist.lzo", target, False)
        assert ret is False
示例#5
0
def test_get_bucket_vs_certs():
    """Integration test for bucket naming issues."""
    import boto.s3.connection

    # Add dots to try to trip up TLS certificate validation.
    bucket_name = bucket_name_mangle('wal-e.test.dots', delimiter='.')

    with pytest.raises(boto.https_connection.InvalidCertificateException):
        with FreshBucket(bucket_name, calling_format=SubdomainCallingFormat()):
            pass
示例#6
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    bucket_name = bucket_name_mangle('sigv4')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))
    monkeypatch.setenv('AWS_REGION', 'eu-central-1')

    def create_bucket_if_not_exists():
        """Create a bucket via path-based API calls.

        This is because the preferred "$BUCKETNAME.s3.amazonaws"
        subdomain doesn't yet exist for a non-existent bucket.

        """
        monkeypatch.setenv('WALE_S3_ENDPOINT',
                           'https+path://s3-eu-central-1.amazonaws.com')
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        try:
            conn.create_bucket(bucket_name, location='eu-central-1')
        except boto.exception.S3CreateError:
            pass
        monkeypatch.delenv('WALE_S3_ENDPOINT')

    create_bucket_if_not_exists()

    def validate_bucket():
        """Validate the eu-central-1 bucket's existence

        This is done using the subdomain that points to eu-central-1.

        """

        sigv4_check_apply()
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        conn.get_bucket(bucket_name, validate=True)

    validate_bucket()

    def upload_download():
        """ Test uri_put_file and uri_get_file in eu-central-1"""
        source = str(tmpdir.join('source'))
        contents = b'abcdefghijklmnopqrstuvwxyz\n' * 100
        with open(source, 'wb') as f:
            f.write(contents)

        data_url = 's3://{0}/data'.format(bucket_name)

        with open(source) as f:
            uri_put_file(creds, data_url, f)

        results = uri_get_file(creds, data_url)
        assert contents == results

    upload_download()
示例#7
0
def test_sigv4_only_region(tmpdir, monkeypatch):
    bucket_name = bucket_name_mangle("sigv4")
    creds = Credentials(os.getenv("AWS_ACCESS_KEY_ID"), os.getenv("AWS_SECRET_ACCESS_KEY"))
    monkeypatch.setenv("AWS_REGION", "eu-central-1")

    def create_bucket_if_not_exists():
        """Create a bucket via path-based API calls.

        This is because the preferred "$BUCKETNAME.s3.amazonaws"
        subdomain doesn't yet exist for a non-existent bucket.

        """
        monkeypatch.setenv("WALE_S3_ENDPOINT", "https+path://s3-eu-central-1.amazonaws.com")
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        try:
            conn.create_bucket(bucket_name, location="eu-central-1")
        except boto.exception.S3CreateError:
            pass
        monkeypatch.delenv("WALE_S3_ENDPOINT")

    create_bucket_if_not_exists()

    def validate_bucket():
        """Validate the eu-central-1 bucket's existence

        This is done using the subdomain that points to eu-central-1.

        """

        sigv4_check_apply()
        cinfo = calling_format.from_store_name(bucket_name)
        conn = cinfo.connect(creds)
        conn.get_bucket(bucket_name, validate=True)

    validate_bucket()

    def upload_download():
        """ Test uri_put_file and uri_get_file in eu-central-1"""
        source = str(tmpdir.join("source"))
        contents = b"abcdefghijklmnopqrstuvwxyz\n" * 100
        with open(source, "wb") as f:
            f.write(contents)

        data_url = "s3://{0}/data".format(bucket_name)

        with open(source) as f:
            uri_put_file(creds, data_url, f)

        results = uri_get_file(creds, data_url)
        assert contents == results

    upload_download()
示例#8
0
def test_404_termination(tmpdir):
    bucket_name = bucket_name_mangle('wal-e-test-404-termination')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    with FreshBucket(bucket_name, host='s3.amazonaws.com',
                     calling_format=OrdinaryCallingFormat()) as fb:
        fb.create()

        target = unicode(tmpdir.join('target'))
        ret = do_lzop_get(creds, 's3://' + bucket_name + '/not-exist.lzo',
                          target, False)
        assert ret is False
示例#9
0
def test_empty_latest_listing():
    """Test listing a 'backup-list LATEST' on an empty prefix."""

    bucket_name = bucket_name_mangle('wal-e-test-empty-listing')
    layout = storage.StorageLayout('s3://{0}/test-prefix'
                                   .format(bucket_name))

    with FreshBucket(bucket_name, host='s3.amazonaws.com',
                     calling_format=OrdinaryCallingFormat()) as fb:
        fb.create()
        bl = BackupList(fb.conn, layout, False)
        found = list(bl.find_all('LATEST'))
        assert len(found) == 0
示例#10
0
def test_empty_latest_listing():
    """Test listing a 'backup-list LATEST' on an empty prefix."""

    bucket_name = bucket_name_mangle('wal-e-test-empty-listing')
    layout = storage.StorageLayout('s3://{0}/test-prefix'.format(bucket_name))

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=OrdinaryCallingFormat()) as fb:
        fb.create()
        bl = BackupList(fb.conn, layout, False)
        found = list(bl.find_all('LATEST'))
        assert len(found) == 0
示例#11
0
def test_301_redirect():
    """Integration test for bucket naming issues this test."""
    import boto.s3.connection

    bucket_name = bucket_name_mangle('wal-e-test-301-redirect')

    with pytest.raises(boto.exception.S3ResponseError) as e:
        # Just initiating the bucket manipulation API calls is enough
        # to provoke a 301 redirect.
        with FreshBucket(bucket_name,
                         calling_format=OrdinaryCallingFormat()) as fb:
            fb.create(location='us-west-1')

    assert e.value.status == 301
示例#12
0
def test_404_termination(tmpdir):
    bucket_name = bucket_name_mangle('wal-e-test-404-termination')
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=OrdinaryCallingFormat()) as fb:
        fb.create()

        target = unicode(tmpdir.join('target'))
        ret = do_lzop_get(creds, 's3://' + bucket_name + '/not-exist.lzo',
                          target, False)
        assert ret is False
示例#13
0
def test_301_redirect():
    """Integration test for bucket naming issues this test."""
    import boto.s3.connection

    bucket_name = bucket_name_mangle('wal-e-test-301-redirect')

    with pytest.raises(boto.exception.S3ResponseError) as e:
        # Just initiating the bucket manipulation API calls is enough
        # to provoke a 301 redirect.
        with FreshBucket(bucket_name,
                         calling_format=OrdinaryCallingFormat()) as fb:
            fb.create(location='us-west-1')

    assert e.value.status == 301
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    bucket_name = bucket_name_mangle('wal-e-test-us-west-1-no-dots')

    cinfo = calling_format.from_store_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(creds)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
示例#15
0
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    bucket_name = bucket_name_mangle('wal-e-test-us-west-1-no-dots')

    cinfo = calling_format.from_store_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(creds)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
示例#16
0
def test_backup_list(sts_conn, monkeypatch):
    """Test BackupList's compatibility with a test policy."""
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    bn = bucket_name_mangle('wal-e.sts.backup.list')
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()
    fed = sts_conn.get_federation_token('wal-e-test-backup-list',
                                        policy=make_policy(bn, 'test-prefix'))
    layout = StorageLayout('s3://{0}/test-prefix'.format(bn))
    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, calling_format=cf, host=h) as fb:
        fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        backups = list(BackupList(conn, layout, True))
        assert not backups
示例#17
0
def test_uri_put_file(sts_conn, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    bn = bucket_name_mangle('wal-e.sts.uri.put.file')
    cf = connection.OrdinaryCallingFormat()
    policy_text = make_policy(bn, 'test-prefix', allow_get_location=True)
    fed = sts_conn.get_federation_token('wal-e-test-uri-put-file',
                                        policy=policy_text)

    key_path = 'test-prefix/test-key'

    creds = Credentials(fed.credentials.access_key,
                        fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, keys=[key_path], calling_format=cf,
                     host='s3-us-west-1.amazonaws.com') as fb:
        fb.create(location='us-west-1')
        uri_put_file(creds, 's3://' + bn + '/' + key_path,
                     StringIO('test-content'))
        k = connection.Key(fb.conn.get_bucket(bn, validate=False))
        k.name = key_path
        assert k.get_contents_as_string() == b'test-content'
示例#18
0
def test_backup_list(sts_conn, monkeypatch):
    """Test BackupList's compatibility with a test policy."""
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    bn = bucket_name_mangle('wal-e.sts.backup.list')
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()
    fed = sts_conn.get_federation_token('wal-e-test-backup-list',
                                        policy=make_policy(bn, 'test-prefix'))
    layout = StorageLayout('s3://{0}/test-prefix'.format(bn))
    creds = Credentials(fed.credentials.access_key,
                        fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, calling_format=cf, host=h) as fb:
        fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        backups = list(BackupList(conn, layout, True))
        assert not backups
示例#19
0
def test_uri_put_file(sts_conn, monkeypatch):
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    bn = bucket_name_mangle('wal-e.sts.uri.put.file')
    cf = connection.OrdinaryCallingFormat()
    policy_text = make_policy(bn, 'test-prefix', allow_get_location=True)
    fed = sts_conn.get_federation_token('wal-e-test-uri-put-file',
                                        policy=policy_text)

    key_path = 'test-prefix/test-key'

    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn,
                     keys=[key_path],
                     calling_format=cf,
                     host='s3-us-west-1.amazonaws.com') as fb:
        fb.create(location='us-west-1')
        uri_put_file(creds, 's3://' + bn + '/' + key_path,
                     StringIO('test-content'))
        k = connection.Key(fb.conn.get_bucket(bn, validate=False))
        k.name = key_path
        assert k.get_contents_as_string() == b'test-content'
示例#20
0
def test_policy(sts_conn, monkeypatch):
    """Sanity checks for the intended ACLs of the policy"""
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    # Use periods to force OrdinaryCallingFormat when using
    # calling_format.from_store_name.
    bn = bucket_name_mangle('wal-e.sts.list.test')
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()

    fed = sts_conn.get_federation_token('wal-e-test-list-bucket',
                                        policy=make_policy(bn, 'test-prefix'))
    test_payload = 'wal-e test'

    keys = [
        'test-prefix/hello', 'test-prefix/world', 'not-in-prefix/goodbye',
        'not-in-prefix/world'
    ]
    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, keys=keys, calling_format=cf, host=h) as fb:
        # Superuser creds, for testing keys not in the prefix.
        bucket_superset_creds = fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        # Bucket using the token, subject to the policy.
        bucket = conn.get_bucket(bn, validate=False)

        for name in keys:
            if name.startswith('test-prefix/'):
                # Test the PUT privilege.
                k = connection.Key(bucket)
            else:
                # Not in the prefix, so PUT will not work.
                k = connection.Key(bucket_superset_creds)

            k.key = name
            k.set_contents_from_string(test_payload)

        # Test listing keys within the prefix.
        prefix_fetched_keys = list(bucket.list(prefix='test-prefix/'))
        assert len(prefix_fetched_keys) == 2

        # Test the GET privilege.
        for key in prefix_fetched_keys:
            assert key.get_contents_as_string() == b'wal-e test'

        # Try a bogus listing outside the valid prefix.
        with pytest.raises(exception.S3ResponseError) as e:
            list(bucket.list(prefix=''))

        assert e.value.status == 403

        # Test the rejection of PUT outside of prefix.
        k = connection.Key(bucket)
        k.key = 'not-in-prefix/world'

        with pytest.raises(exception.S3ResponseError) as e:
            k.set_contents_from_string(test_payload)

        assert e.value.status == 403
示例#21
0
def test_policy(sts_conn, monkeypatch):
    """Sanity checks for the intended ACLs of the policy"""
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    # Use periods to force OrdinaryCallingFormat when using
    # calling_format.from_store_name.
    bn = bucket_name_mangle('wal-e.sts.list.test')
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()

    fed = sts_conn.get_federation_token('wal-e-test-list-bucket',
                                        policy=make_policy(bn, 'test-prefix'))
    test_payload = 'wal-e test'

    keys = ['test-prefix/hello', 'test-prefix/world',
            'not-in-prefix/goodbye', 'not-in-prefix/world']
    creds = Credentials(fed.credentials.access_key,
                        fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, keys=keys, calling_format=cf, host=h) as fb:
        # Superuser creds, for testing keys not in the prefix.
        bucket_superset_creds = fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        # Bucket using the token, subject to the policy.
        bucket = conn.get_bucket(bn, validate=False)

        for name in keys:
            if name.startswith('test-prefix/'):
                # Test the PUT privilege.
                k = connection.Key(bucket)
            else:
                # Not in the prefix, so PUT will not work.
                k = connection.Key(bucket_superset_creds)

            k.key = name
            k.set_contents_from_string(test_payload)

        # Test listing keys within the prefix.
        prefix_fetched_keys = list(bucket.list(prefix='test-prefix/'))
        assert len(prefix_fetched_keys) == 2

        # Test the GET privilege.
        for key in prefix_fetched_keys:
            assert key.get_contents_as_string() == b'wal-e test'

        # Try a bogus listing outside the valid prefix.
        with pytest.raises(exception.S3ResponseError) as e:
            list(bucket.list(prefix=''))

        assert e.value.status == 403

        # Test the rejection of PUT outside of prefix.
        k = connection.Key(bucket)
        k.key = 'not-in-prefix/world'

        with pytest.raises(exception.S3ResponseError) as e:
            k.set_contents_from_string(test_payload)

        assert e.value.status == 403