def test_str_repr_call_info():
    """Ensure CallingInfo renders sensibly.

    Try a few cases sensitive to the bucket name.
    """
    if boto.__version__ <= '2.2.0':
        pytest.skip('Class name output is unstable on older boto versions')

    cinfo = calling_format.from_bucket_name('hello-world')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(hello-world, "
        "<class 'boto.s3.connection.SubdomainCallingFormat'>, "
        "None, None)")

    cinfo = calling_format.from_bucket_name('hello.world')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(hello.world, "
        "<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
        "None, None)")

    cinfo = calling_format.from_bucket_name('Hello-World')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(Hello-World, "
        "<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
        "'us-standard', 's3.amazonaws.com')")
Beispiel #2
0
def test_str_repr_call_info():
    """Ensure CallingInfo renders sensibly.

    Try a few cases sensitive to the bucket name.
    """
    if boto.__version__ <= '2.2.0':
        pytest.skip('Class name output is unstable on older boto versions')

    cinfo = calling_format.from_bucket_name('hello-world')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(hello-world, "
        "<class 'boto.s3.connection.SubdomainCallingFormat'>, "
        "None, None)"
    )

    cinfo = calling_format.from_bucket_name('hello.world')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(hello.world, "
        "<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
        "None, None)"
    )

    cinfo = calling_format.from_bucket_name('Hello-World')
    assert repr(cinfo) == str(cinfo)
    assert repr(cinfo) == (
        "CallingInfo(Hello-World, "
        "<class 'boto.s3.connection.OrdinaryCallingFormat'>, "
        "'us-standard', 's3.amazonaws.com')"
    )
def test_cipher_suites():
    # Imported for its side effects of setting up ssl cipher suites
    # and gevent.
    from wal_e import cmd

    # Quiet pyflakes.
    assert cmd

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    cinfo = calling_format.from_bucket_name('irrelevant')
    conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

    # Warm up the pool and the connection in it; new_http_connection
    # seems to be a more natural choice, but leaves the '.sock'
    # attribute null.
    conn.get_all_buckets()
    htcon = conn._pool.get_http_connection('s3.amazonaws.com', True)

    chosen_cipher_suite = htcon.sock.cipher()[0].split('-')

    # Test for the expected cipher suite.
    #
    # This can change or vary on different platforms somewhat
    # harmlessly, but do the simple thing and insist on an exact match
    # for now.
    assert chosen_cipher_suite == ['AES256', 'SHA']
Beispiel #4
0
def prepare_s3_default_test_bucket():
    # Check credentials are present: this procedure should not be
    # called otherwise.
    if no_real_s3_credentials():
        assert False

    bucket_name = 'wale-test-default-' + os.getenv('AWS_ACCESS_KEY_ID').lower()
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')
    cinfo = calling_format.from_bucket_name(bucket_name)
    conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

    def _clean():
        bucket = conn.get_bucket(bucket_name)
        for key in bucket.list():
            key.delete()
    try:
        conn.create_bucket(bucket_name, location=Location.USWest)
    except boto.exception.S3CreateError, e:
        if e.status == 409:
            # Conflict: bucket already present.  Re-use it, but
            # clean it out first.
            _clean()
        else:
            raise
def test_cipher_suites():
    # Imported for its side effects of setting up ssl cipher suites
    # and gevent.
    from wal_e import cmd

    # Quiet pyflakes.
    assert cmd

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    cinfo = calling_format.from_bucket_name('irrelevant')
    conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

    # Warm up the pool and the connection in it; new_http_connection
    # seems to be a more natural choice, but leaves the '.sock'
    # attribute null.
    conn.get_all_buckets()

    # Set up 'port' keyword argument for newer Botos that require it.
    spec = inspect.getargspec(conn._pool.get_http_connection)
    kw = {'host': 's3.amazonaws.com', 'is_secure': True}
    if 'port' in spec.args:
        kw['port'] = 443

    htcon = conn._pool.get_http_connection(**kw)

    chosen_cipher_suite = htcon.sock.cipher()[0].split('-')

    # Test for the expected cipher suite.
    #
    # This can change or vary on different platforms somewhat
    # harmlessly, but do the simple thing and insist on an exact match
    # for now.
    assert chosen_cipher_suite == ['AES256', 'SHA']
Beispiel #6
0
def test_us_standard_default_for_bogus():
    """Test degradation to us-standard for all weird bucket names.

    Such bucket names are not supported outside of us-standard by
    WAL-E.
    """
    for bn in SUBDOMAIN_BOGUS:
        cinfo = calling_format.from_bucket_name(bn)
        assert cinfo.region == 'us-standard'
Beispiel #7
0
def uri_to_key(aws_access_key_id, aws_secret_access_key, s3_uri):
    assert s3_uri.startswith('s3://')

    url_tup = urlparse(s3_uri)
    bucket_name = url_tup.netloc
    cinfo = calling_format.from_bucket_name(bucket_name)
    conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)
    bucket = boto.s3.bucket.Bucket(connection=conn, name=bucket_name)
    return boto.s3.key.Key(bucket=bucket, name=url_tup.path)
Beispiel #8
0
def uri_to_key(aws_access_key_id, aws_secret_access_key, s3_uri):
    assert s3_uri.startswith('s3://')

    url_tup = urlparse(s3_uri)
    bucket_name = url_tup.netloc
    cinfo = calling_format.from_bucket_name(bucket_name)
    conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)
    bucket = boto.s3.bucket.Bucket(connection=conn, name=bucket_name)
    return boto.s3.key.Key(bucket=bucket, name=url_tup.path)
def test_us_standard_default_for_bogus():
    """Test degradation to us-standard for all weird bucket names.

    Such bucket names are not supported outside of us-standard by
    WAL-E.
    """
    for bn in SUBDOMAIN_BOGUS:
        cinfo = calling_format.from_bucket_name(bn)
        assert cinfo.region == 'us-standard'
Beispiel #10
0
def test_get_location_errors(monkeypatch):
    """Simulate situations where get_location fails

    Exercise both the case where IAM refuses the privilege to get the
    bucket location and where some other S3ResponseError is raised
    instead.
    """
    bucket_name = 'wal-e.test.403.get.location'

    def just_403(self):
        raise boto.exception.S3ResponseError(status=403,
                                             reason=None,
                                             body=None)

    def unhandled_404(self):
        raise boto.exception.S3ResponseError(status=404,
                                             reason=None,
                                             body=None)

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    with FreshBucket(bucket_name,
                     calling_format=connection.OrdinaryCallingFormat()):
        cinfo = calling_format.from_bucket_name(bucket_name)

        # Provoke a 403 when trying to get the bucket location.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location', just_403)
        cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat

        cinfo = calling_format.from_bucket_name(bucket_name)

        # Provoke an unhandled S3ResponseError, in this case 404 not
        # found.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location',
                            unhandled_404)

        with pytest.raises(boto.exception.S3ResponseError) as e:
            cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert e.value.status == 404
Beispiel #11
0
def test_cert_validation_sensitivity():
    """Test degradation of dotted bucket names to OrdinaryCallingFormat

    Although legal bucket names with SubdomainCallingFormat, these
    kinds of bucket names run afoul certification validation, and so
    they are forced to fall back to OrdinaryCallingFormat.
    """
    for bn in SUBDOMAIN_OK:
        if '.' not in bn:
            cinfo = calling_format.from_bucket_name(bn)
            assert (cinfo.calling_format ==
                    boto.s3.connection.SubdomainCallingFormat)
        else:
            assert '.' in bn

            cinfo = calling_format.from_bucket_name(bn)
            assert (cinfo.calling_format == connection.OrdinaryCallingFormat)
            assert cinfo.region is None
            assert cinfo.ordinary_endpoint is None
Beispiel #12
0
def test_cert_validation_sensitivity():
    """Test degradation of dotted bucket names to OrdinaryCallingFormat

    Although legal bucket names with SubdomainCallingFormat, these
    kinds of bucket names run afoul certification validation, and so
    they are forced to fall back to OrdinaryCallingFormat.
    """
    for bn in SUBDOMAIN_OK:
        if '.' not in bn:
            cinfo = calling_format.from_bucket_name(bn)
            assert (cinfo.calling_format ==
                    boto.s3.connection.SubdomainCallingFormat)
        else:
            assert '.' in bn

            cinfo = calling_format.from_bucket_name(bn)
            assert (cinfo.calling_format == connection.OrdinaryCallingFormat)
            assert cinfo.region is None
            assert cinfo.ordinary_endpoint is None
Beispiel #13
0
def test_get_location_errors(monkeypatch):
    """Simulate situations where get_location fails

    Exercise both the case where IAM refuses the privilege to get the
    bucket location and where some other S3ResponseError is raised
    instead.
    """
    bucket_name = 'wal-e.test.403.get.location'

    def just_403(self):
        raise boto.exception.S3ResponseError(status=403,
                                             reason=None, body=None)

    def unhandled_404(self):
        raise boto.exception.S3ResponseError(status=404,
                                             reason=None, body=None)

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    with FreshBucket(bucket_name,
                     calling_format=connection.OrdinaryCallingFormat()):
        cinfo = calling_format.from_bucket_name(bucket_name)

        # Provoke a 403 when trying to get the bucket location.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location', just_403)
        cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat

        cinfo = calling_format.from_bucket_name(bucket_name)

        # Provoke an unhandled S3ResponseError, in this case 404 not
        # found.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location',
                            unhandled_404)

        with pytest.raises(boto.exception.S3ResponseError) as e:
            cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert e.value.status == 404
Beispiel #14
0
def test_classic_get_location():
    """Exercise get location on a s3-classic bucket."""
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    bucket_name = ('wal-e-test.classic.get.location.' +
                   aws_access_key_id.lower())

    cinfo = calling_format.from_bucket_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create()
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3.amazonaws.com'
Beispiel #15
0
def test_classic_get_location():
    """Exercise get location on a s3-classic bucket."""
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    bucket_name = ('wal-e-test.classic.get.location.' +
                   aws_access_key_id.lower())

    cinfo = calling_format.from_bucket_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create()
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3.amazonaws.com'
Beispiel #16
0
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    aws_access_key = os.getenv('AWS_ACCESS_KEY_ID')
    bucket_name = 'wal-e-test-us-west-1-no-dots' + aws_access_key.lower()

    cinfo = calling_format.from_bucket_name(bucket_name)

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
Beispiel #17
0
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    aws_access_key = os.getenv('AWS_ACCESS_KEY_ID')
    bucket_name = 'wal-e-test-us-west-1-no-dots' + aws_access_key.lower()

    cinfo = calling_format.from_bucket_name(bucket_name)

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
Beispiel #18
0
def test_real_get_location():
    """Exercise a case where a get location call is needed.

    In cases where a bucket has offensive characters -- like dots --
    that would otherwise break TLS, test sniffing the right endpoint
    so it can be used to address the bucket.
    """
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    bucket_name = ('wal-e-test-us-west-1.get.location.' +
                   aws_access_key_id.lower())

    cinfo = calling_format.from_bucket_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-west-1'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3-us-west-1.amazonaws.com'
Beispiel #19
0
def test_real_get_location():
    """Exercise a case where a get location call is needed.

    In cases where a bucket has offensive characters -- like dots --
    that would otherwise break TLS, test sniffing the right endpoint
    so it can be used to address the bucket.
    """
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    bucket_name = ('wal-e-test-us-west-1.get.location.' +
                   aws_access_key_id.lower())

    cinfo = calling_format.from_bucket_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-west-1'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3-us-west-1.amazonaws.com'