Ejemplo n.º 1
0
    def setUp(self):
        """Setup users, projects, and start a test server."""
        super(S3APITestCase, self).setUp()
        self.flags(auth_driver='nova.auth.ldapdriver.FakeLdapDriver',
                   buckets_path=os.path.join(OSS_TEMPDIR, 'buckets'),
                   s3_host='127.0.0.1')

        self.auth_manager = manager.AuthManager()
        self.admin_user = self.auth_manager.create_user('admin', admin=True)
        self.admin_project = self.auth_manager.create_project('admin',
                                                              self.admin_user)

        shutil.rmtree(FLAGS.buckets_path)
        os.mkdir(FLAGS.buckets_path)

        router = s3server.S3Application(FLAGS.buckets_path)
        server = wsgi.Server()
        server.start(router, FLAGS.s3_port, host=FLAGS.s3_host)

        if not boto.config.has_section('Boto'):
            boto.config.add_section('Boto')
        boto.config.set('Boto', 'num_retries', '0')
        conn = s3.S3Connection(aws_access_key_id=self.admin_user.access,
                               aws_secret_access_key=self.admin_user.secret,
                               host=FLAGS.s3_host,
                               port=FLAGS.s3_port,
                               is_secure=False,
                               calling_format=s3.OrdinaryCallingFormat())
        self.conn = conn

        def get_http_connection(host, is_secure):
            """Get a new S3 connection, don't attempt to reuse connections."""
            return self.conn.new_http_connection(host, is_secure)

        self.conn.get_http_connection = get_http_connection
Ejemplo n.º 2
0
 def __init__(self, config):
     """
     Creation of the manager. Since it is only s3 compatible
     we need to specify the endpoint in the config
     """
     super(CleversafeClient, self).__init__(__name__)
     self._config = config
     self._host = config["host"]
     self._public_host = config["public_host"]
     self._access_key = config["aws_access_key_id"]
     self._secret_key = config["aws_secret_access_key"]
     self._username = config["username"]
     self._password = config["password"]
     self._permissions_order = {
         "read-storage": 1,
         "write-storage": 2,
         "admin-storage": 3,
         "disabled": 0,
     }
     self._permissions_value = ["disabled", "readOnly", "readWrite", "owner"]
     self._auth = requests.auth.HTTPBasicAuth(self._username, self._password)
     self._conn = connect_s3(
         aws_access_key_id=self._access_key,
         aws_secret_access_key=self._secret_key,
         host=self._public_host,
         calling_format=connection.OrdinaryCallingFormat(),
     )
     self._bucket_name_id_table = {}
     self._update_bucket_name_id_table()
     self._user_name_id_table = {}
     self._user_id_name_table = {}
     self._update_user_name_id_table()
Ejemplo n.º 3
0
    def setUp(self):
        """Setup users, projects, and start a test server."""
        super(S3APITestCase, self).setUp()
        tempdir = self.useFixture(fixtures.TempDir())
        conf = self.useFixture(config_fixture.Config())
        conf.config(buckets_path=tempdir.path,
                    s3_listen='127.0.0.1',
                    s3_listen_port=0)

        self.server = s3server.get_wsgi_server()
        # NOTE(ft): this requires eventlet.monkey_patch, which is called in
        # tests/unit/__init__.py. Remove it out from there if you get these
        # tests rid of server run
        self.server.start()
        self.addCleanup(self.server.stop)

        if not boto.config.has_section('Boto'):
            boto.config.add_section('Boto')

        boto.config.set('Boto', 'num_retries', '0')
        conn = s3.S3Connection(aws_access_key_id='fake',
                               aws_secret_access_key='fake',
                               host=CONF.s3_listen,
                               port=self.server.port,
                               is_secure=False,
                               calling_format=s3.OrdinaryCallingFormat())
        self.conn = conn

        def get_http_connection(*args):
            """Get a new S3 connection, don't attempt to reuse connections."""
            return self.conn.new_http_connection(*args)

        self.conn.get_http_connection = get_http_connection
Ejemplo n.º 4
0
Archivo: S3.py Proyecto: veezor/Nimbus
    def __init__(self, username, access_key, secret_key,
                       rate_limit=None, host=None):

        self.username = username
        self.access_key = access_key
        self.secret_key = secret_key
        self.rate_limit = rate_limit
        self.rate_limiter = RateLimiter(self.rate_limit)
        self.callbacks = CallbackAggregator()
        self.multipart_status_callbacks = CallbackAggregator()
        self.host = host
        self.logger = logging.getLogger(__name__)

        if self.rate_limit:
            self.callbacks.add_callback(self.rate_limiter)

        if self.host =='s3.amazonaws.com':
            self.connection = boto.connect_s3(self.access_key, self.secret_key)
        else:
            self.connection = boto.connect_s3(aws_access_key_id=self.access_key,
                              aws_secret_access_key=self.secret_key,
                              is_secure=False,
                              host=self.host,
                              port=8773,
                              calling_format=boto_s3_connection_class.OrdinaryCallingFormat(),
                              path="/services/Walrus")

        if not self.connection:
            raise S3AuthError("check access_key and secret_key")

        self.bucket = self.connection.lookup(username)
        if not self.bucket:
            raise S3AuthError("check access_key and secret_key")
Ejemplo n.º 5
0
    def setUp(self):
        """Setup users, projects, and start a test server."""
        super(S3APITestCase, self).setUp()
        self.flags(buckets_path=os.path.join(OSS_TEMPDIR, 'buckets'),
                   s3_host='127.0.0.1')

        shutil.rmtree(CONF.buckets_path)
        os.mkdir(CONF.buckets_path)

        router = s3server.S3Application(CONF.buckets_path)
        self.server = wsgi.Server("S3 Objectstore",
                                  router,
                                  host=CONF.s3_host,
                                  port=0)
        self.server.start()

        if not boto.config.has_section('Boto'):
            boto.config.add_section('Boto')

        boto.config.set('Boto', 'num_retries', '0')
        conn = s3.S3Connection(aws_access_key_id='fake',
                               aws_secret_access_key='fake',
                               host=CONF.s3_host,
                               port=self.server.port,
                               is_secure=False,
                               calling_format=s3.OrdinaryCallingFormat())
        self.conn = conn

        def get_http_connection(*args):
            """Get a new S3 connection, don't attempt to reuse connections."""
            return self.conn.new_http_connection(*args)

        self.conn.get_http_connection = get_http_connection
Ejemplo n.º 6
0
def _create_s3_connection(config, region):
    calling_format = bsc.OrdinaryCallingFormat()
    if region:
        conn = bs.connect_to_region(
            region,
            aws_access_key_id=config[tac.key_id],
            aws_secret_access_key=config[tac.secret_key],
            proxy=config.get(tac.proxy_hostname),
            proxy_port=config.get(tac.proxy_port),
            proxy_user=config.get(tac.proxy_username),
            proxy_pass=config.get(tac.proxy_password),
            is_secure=True,
            validate_certs=True,
            calling_format=calling_format)
    else:
        if (not os.environ.get("S3_USE_SIGV4") and
                not config.get(asc.bucket_name)):
            calling_format = bsc.SubdomainCallingFormat()

        conn = boto.connect_s3(
            host=config[asc.host_name],
            aws_access_key_id=config[tac.key_id],
            aws_secret_access_key=config[tac.secret_key],
            proxy=config.get(tac.proxy_hostname),
            proxy_port=config.get(tac.proxy_port),
            proxy_user=config.get(tac.proxy_username),
            proxy_pass=config.get(tac.proxy_password),
            is_secure=True,
            validate_certs=True,
            calling_format=calling_format)
    return conn
Ejemplo n.º 7
0
    def __init__(self, config, lazy=False, host_aliases={}):
        """
        Config map should be a map from hostname to args, e.g.:
        {
            "s3.amazonaws.com": {
                "aws_access_key_id": "foo",
                "aws_secret_access_key": "bar",
                "is_secure": False,
                . . .
            },
        }

        :param host_aliases:
            A *REGEX* map from names that match the regex to hostnames
            provided in config
            e.g. ``{'aws\.accesssor1\.mirror': 's3.amazonaws.com'}``
        """

        self.config = config
        for host, kwargs in self.config.iteritems():
            # we need to pass the host argument in when we connect, so
            # set it here
            kwargs["host"] = host
            if 'calling_format' not in kwargs:
                kwargs["calling_format"] = connection.OrdinaryCallingFormat()

        self.host_aliases = host_aliases

        self.conns = {}
        if not lazy:
            self.connect()
Ejemplo n.º 8
0
def _s3connection_opts_from_uri(impl):
    # 'impl' should look like:
    #
    #    <protocol>+<calling_format>://[user:pass]@<host>[:port]
    #
    # A concrete example:
    #
    #     https+virtualhost://user:pass@localhost:1235
    o = urlparse.urlparse(impl, allow_fragments=False)

    if o.scheme is not None:
        proto_match = re.match(
            r'(?P<protocol>http|https)\+'
            r'(?P<format>virtualhost|path|subdomain)', o.scheme)
        if proto_match is None:
            raise UserException(
                msg='WALE_S3_ENDPOINT URI scheme is invalid',
                detail='The scheme defined is ' + repr(o.scheme),
                hint='An example of a valid scheme is https+virtualhost.')

    opts = {}

    if proto_match.group('protocol') == 'http':
        opts['is_secure'] = False
    else:
        # Constrained by prior regexp.
        proto_match.group('protocol') == 'https'
        opts['is_secure'] = True

    f = proto_match.group('format')
    if f == 'virtualhost':
        opts['calling_format'] = connection.VHostCallingFormat()
    elif f == 'path':
        opts['calling_format'] = connection.OrdinaryCallingFormat()
    elif f == 'subdomain':
        opts['calling_format'] = connection.SubdomainCallingFormat()
    else:
        # Constrained by prior regexp.
        assert False

    if o.username is not None or o.password is not None:
        raise UserException(
            msg='WALE_S3_ENDPOINT does not support username or password')

    if o.hostname is not None:
        opts['host'] = o.hostname

    if o.port is not None:
        opts['port'] = o.port

    if o.path:
        raise UserException(
            msg='WALE_S3_ENDPOINT does not support a URI path',
            detail='Path is {0!r}'.format(o.path))

    if o.query:
        raise UserException(
            msg='WALE_S3_ENDPOINT does not support query parameters')

    return opts
Ejemplo n.º 9
0
    def connection(self):
        if self._connection:
            return self._connection

        self._connection = boto.connect_s3(
            aws_access_key_id=self.aws_access_key_id,
            aws_secret_access_key=self.aws_secret_access_key,
            host=self.s3_connection_host,
            calling_format=boto_s3_connection.OrdinaryCallingFormat())
        return self._connection
Ejemplo n.º 10
0
 def bucket(self):
     boto_connection = boto.connect_s3(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     return boto_connection.get_bucket(self.config.bucket)
     try:
         return boto_connection.get_bucket(self.config.bucket)
     except boto.exception.S3ResponseError as e:
         if e.status == 404:
             logging.info('Creating bucket: {}'.format(self.config.bucket))
             return boto_connection.create_bucket(self.config.bucket)
         raise
Ejemplo n.º 11
0
 def bucket(self):
   if self.use_interoperable_auth:
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = _certs_path
   else:
     gs_connection = storage.get_connection(
         self.config.project, self.config.email, self.config.key_path)
   return gs_connection.get_bucket(self.config.bucket)
Ejemplo n.º 12
0
class EucalyptusClient(ec2client.EC2Client):
    cloudType = 'eucalyptus'
    # XXX will need their own image type
    RBUILDER_BUILD_TYPE = 'RAW_FS_IMAGE'
    ImagePrefix = 'emi-'
    CallingFormat = s3connection.OrdinaryCallingFormat()

    configurationDescriptorXmlData = _configurationDescriptorXmlData
    credentialsDescriptorXmlData = _credentialsDescriptorXmlData

    PermittedS3Users = None

    class Cloud(ec2client.EC2_Cloud):
        pass
    class Image(ec2client.EC2_Image):
        pass
    class Instance(ec2client.EC2_Instance):
        pass

    _configNameMap = []

    class _ImageMap(ec2client.EC2Client._ImageMap):
        def __init__(self, imageList):
            ec2client.EC2Client._ImageMap.__init__(self, imageList)
            for img in imageList:
                # Hash images by target image id too
                if img._targetImageId is not None:
                    self._ids[img._targetImageId] = img

    def drvCreateCloud(self, descriptorData):
        return ec2client.baseDriver.BaseDriver.drvCreateCloud(self,
            descriptorData)

    def drvVerifyCloudConfiguration(self, config):
        certificateData = config.get('certificateData')
        certificateKeyData = config.get('certificateKeyData')
        config.update(certificateData=certificateData,
            certificateKeyData=certificateKeyData)
        config.update((k, self._strip(v)) for k, v in config.items())

        # Seed the target configuration
        self._targetConfig = config
        # Validate credentials
        cli = self.drvCreateCloudClient(config)
        # Do a call to force cred validation
        try:
            cli.get_all_regions()
        except ec2client.EC2ResponseError, e:
            raise errors.ResponseError(e.status, self._getErrorMessage(e), e.body)
        self._targetConfig = None
Ejemplo n.º 13
0
 def bucket(self):
     if self.config.oauth2:
         enable_oauth2_auth_handler()
     gs_connection = boto.connect_gs(
         self.config.access_key, self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     # Always use our internal cacerts.txt file. This fixes an issue with the
     # PyInstaller-based frozen distribution, while allowing us to continue to
     # verify certificates and use a secure connection.
     gs_connection.ca_certificates_file = utils.get_cacerts_path()
     try:
         return gs_connection.get_bucket(self.config.bucket)
     except boto.exception.GSResponseError as e:
         if e.status == 404:
             logging.info('Creating bucket: {}'.format(self.config.bucket))
             return gs_connection.create_bucket(self.config.bucket)
         raise
Ejemplo n.º 14
0
def test_classic_get_location():
    """Exercise get location on a s3-classic bucket."""
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    bucket_name = 'wal-e-test.classic.get.location'

    cinfo = calling_format.from_store_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create()
        conn = cinfo.connect(creds)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3.amazonaws.com'
Ejemplo n.º 15
0
 def connect(self):
     try:
         self._client = boto.connect_s3(
             aws_access_key_id=self.config["access_key"],
             aws_secret_access_key=self.config["secret_key"],
             host=self.config["host"],
             is_secure=self.config["is_secure"],
             port=self.config["port"],
             calling_format=s3_connection.OrdinaryCallingFormat(),
         )
         self._bucket = self._client.get_bucket(self.config["bucket"])
     except Exception:
         log("CephAdapter connect error",
             params={log_const.KEY_NAME: log_const.HANDLED_EXCEPTION_VALUE},
             level="ERROR",
             exc_info=True)
         monitoring.got_counter("ceph_connection_exception")
         raise
Ejemplo n.º 16
0
def test_get_location_errors(monkeypatch):
    """Simulate situations where get_location fails

    Exercise both the case where IAM refuses the privilege to get the
    bucket location and where some other S3ResponseError is raised
    instead.
    """
    bucket_name = 'wal-e.test.403.get.location'

    def just_403(self):
        raise boto.exception.S3ResponseError(status=403,
                                             reason=None,
                                             body=None)

    def unhandled_404(self):
        raise boto.exception.S3ResponseError(status=404,
                                             reason=None,
                                             body=None)

    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    with FreshBucket(bucket_name,
                     calling_format=connection.OrdinaryCallingFormat()):
        cinfo = calling_format.from_store_name(bucket_name)

        # Provoke a 403 when trying to get the bucket location.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location', just_403)
        cinfo.connect(creds)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat

        cinfo = calling_format.from_store_name(bucket_name)

        # Provoke an unhandled S3ResponseError, in this case 404 not
        # found.
        monkeypatch.setattr(boto.s3.bucket.Bucket, 'get_location',
                            unhandled_404)

        with pytest.raises(boto.exception.S3ResponseError) as e:
            cinfo.connect(creds)

        assert e.value.status == 404
Ejemplo n.º 17
0
def test_classic_get_location():
    """Exercise get location on a s3-classic bucket."""
    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    bucket_name = ('wal-e-test.classic.get.location.' +
                   aws_access_key_id.lower())

    cinfo = calling_format.from_bucket_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create()
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region == 'us-standard'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3.amazonaws.com'
Ejemplo n.º 18
0
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    bucket_name = 'wal-e-test-us-west-1-no-dots'

    cinfo = calling_format.from_store_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(creds)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
Ejemplo n.º 19
0
 def create_bucket(self, bucket_name, access_key=None, secret_key=None):
     """
     Requires a default template created on cleversafe
     """
     if not access_key:
         access_key = self._access_key
     if not secret_key:
         secret_key = self._secret_key
     creds = {"host": self._public_host}
     creds["aws_access_key_id"] = access_key
     creds["aws_secret_access_key"] = secret_key
     conn = connect_s3(calling_format=connection.OrdinaryCallingFormat(), **creds)
     try:
         bucket = conn.create_bucket(bucket_name)
         self._update_bucket_name_id_table()
         return bucket
     except S3ResponseError as exce:
         msg = "Create bucket failed with error code: {0}"
         self.logger.error(msg.format(exce.error_code))
         raise RequestError(str(exce), exce.error_code)
Ejemplo n.º 20
0
def test_backup_list(sts_conn):
    """Test BackupList's compatibility with a test policy."""
    bn = 'wal-e.sts.backup.list'
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()
    fed = sts_conn.get_federation_token('wal-e-test-backup-list',
                                        policy=make_policy(bn, 'test-prefix'))
    layout = StorageLayout('s3://{0}/test-prefix'.format(bn))
    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, calling_format=cf, host=h) as fb:
        fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        backups = list(BackupList(conn, layout, True))
        assert not backups
Ejemplo n.º 21
0
def test_subdomain_compatible():
    """Exercise a case where connecting is region-oblivious."""
    aws_access_key = os.getenv('AWS_ACCESS_KEY_ID')
    bucket_name = 'wal-e-test-us-west-1-no-dots' + aws_access_key.lower()

    cinfo = calling_format.from_bucket_name(bucket_name)

    aws_access_key_id = os.getenv('AWS_ACCESS_KEY_ID')
    aws_secret_access_key = os.getenv('AWS_SECRET_ACCESS_KEY')

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(aws_access_key_id, aws_secret_access_key)

        assert cinfo.region is None
        assert cinfo.calling_format is connection.SubdomainCallingFormat
        assert isinstance(conn.calling_format,
                          connection.SubdomainCallingFormat)
Ejemplo n.º 22
0
def test_uri_put_file(sts_conn):
    bn = 'wal-e.sts.uri.put.file'
    cf = connection.OrdinaryCallingFormat()
    policy_text = make_policy(bn, 'test-prefix', allow_get_location=True)
    fed = sts_conn.get_federation_token('wal-e-test-uri-put-file',
                                        policy=policy_text)

    key_path = 'test-prefix/test-key'

    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn,
                     keys=[key_path],
                     calling_format=cf,
                     host='s3-us-west-1.amazonaws.com') as fb:
        fb.create(location='us-west-1')
        uri_put_file(creds, 's3://' + bn + '/' + key_path,
                     StringIO('test-content'))
        k = connection.Key(fb.conn.get_bucket(bn, validate=False))
        k.name = key_path
        assert k.get_contents_as_string() == 'test-content'
Ejemplo n.º 23
0
def test_real_get_location():
    """Exercise a case where a get location call is needed.

    In cases where a bucket has offensive characters -- like dots --
    that would otherwise break TLS, test sniffing the right endpoint
    so it can be used to address the bucket.
    """
    creds = Credentials(os.getenv('AWS_ACCESS_KEY_ID'),
                        os.getenv('AWS_SECRET_ACCESS_KEY'))

    bucket_name = 'wal-e-test-us-west-1.get.location'

    cinfo = calling_format.from_store_name(bucket_name)

    with FreshBucket(bucket_name,
                     host='s3-us-west-1.amazonaws.com',
                     calling_format=connection.OrdinaryCallingFormat()) as fb:
        fb.create(location='us-west-1')
        conn = cinfo.connect(creds)

        assert cinfo.region == 'us-west-1'
        assert cinfo.calling_format is connection.OrdinaryCallingFormat
        assert conn.host == 's3-us-west-1.amazonaws.com'
Ejemplo n.º 24
0
 def bucket(self):
     boto_connection = boto.connect_s3(
         self.config.access_key,
         self.config.access_secret,
         calling_format=connection.OrdinaryCallingFormat())
     return boto_connection.get_bucket(self.config.bucket)
Ejemplo n.º 25
0
parser.add_argument('--host', type=str, default='10.5.100.1', required=False)
parser.add_argument('-n', '--num-objs', type=int, default=1, required=False)
parser.add_argument('--bytes', type=int, default=1024 * 110, required=False)
parser.add_argument('--objnamelen', type=int, default=0, required=False)
parser.add_argument('--bucket', type=str, default='testbucket', required=False)
args = parser.parse_args()

print "Using S3 endpoint '%s:%s'" % (args.host, args.port)
access_key, secret_key = get_ec2_creds()
conn = boto.connect_s3(
    aws_access_key_id=access_key,
    aws_secret_access_key=secret_key,
    host=args.host,
    port=args.port,
    is_secure=False,
    calling_format=connection.OrdinaryCallingFormat(),
)

bname = args.bucket
all_buckets = conn.get_all_buckets()
bucket = [b for b in all_buckets if b.name == bname]
if bucket:
    bucket = bucket[0]
    num_objs = len(list(bucket.list()))
    print('Bucket {} already exists and contains {} objects'.format(
        bucket.name, num_objs))
else:
    print 'Creating new bucket {}'.format(bname)
    bucket = conn.create_bucket(bname)

k = key.Key(bucket)
Ejemplo n.º 26
0
def test_policy(sts_conn, monkeypatch):
    """Sanity checks for the intended ACLs of the policy"""
    monkeypatch.setenv('AWS_REGION', 'us-west-1')
    # Use periods to force OrdinaryCallingFormat when using
    # calling_format.from_store_name.
    bn = bucket_name_mangle('wal-e.sts.list.test')
    h = 's3-us-west-1.amazonaws.com'
    cf = connection.OrdinaryCallingFormat()

    fed = sts_conn.get_federation_token('wal-e-test-list-bucket',
                                        policy=make_policy(bn, 'test-prefix'))
    test_payload = 'wal-e test'

    keys = [
        'test-prefix/hello', 'test-prefix/world', 'not-in-prefix/goodbye',
        'not-in-prefix/world'
    ]
    creds = Credentials(fed.credentials.access_key, fed.credentials.secret_key,
                        fed.credentials.session_token)

    with FreshBucket(bn, keys=keys, calling_format=cf, host=h) as fb:
        # Superuser creds, for testing keys not in the prefix.
        bucket_superset_creds = fb.create(location='us-west-1')

        cinfo = calling_format.from_store_name(bn)
        conn = cinfo.connect(creds)
        conn.host = h

        # Bucket using the token, subject to the policy.
        bucket = conn.get_bucket(bn, validate=False)

        for name in keys:
            if name.startswith('test-prefix/'):
                # Test the PUT privilege.
                k = connection.Key(bucket)
            else:
                # Not in the prefix, so PUT will not work.
                k = connection.Key(bucket_superset_creds)

            k.key = name
            k.set_contents_from_string(test_payload)

        # Test listing keys within the prefix.
        prefix_fetched_keys = list(bucket.list(prefix='test-prefix/'))
        assert len(prefix_fetched_keys) == 2

        # Test the GET privilege.
        for key in prefix_fetched_keys:
            assert key.get_contents_as_string() == b'wal-e test'

        # Try a bogus listing outside the valid prefix.
        with pytest.raises(exception.S3ResponseError) as e:
            list(bucket.list(prefix=''))

        assert e.value.status == 403

        # Test the rejection of PUT outside of prefix.
        k = connection.Key(bucket)
        k.key = 'not-in-prefix/world'

        with pytest.raises(exception.S3ResponseError) as e:
            k.set_contents_from_string(test_payload)

        assert e.value.status == 403
Ejemplo n.º 27
0
    def __init__(self,
                 config={},
                 lazy=False,
                 host_aliases={},
                 stream_status=False):
        """
        Config map should be a map from hostname to args, e.g.:
        {
            "cleversafe.service.consul: {
                "aws_access_key_id": "foo",
                "aws_secret_access_key": "bar",
                "is_secure": False,
                . . .
            },
        }

        :param host_aliases:
            A *REGEX* map from names that match the regex to hostnames
            provided in config
            e.g. ``{'aws\.accessor1\.mirror': 'cleversafe.service.consul'}``
        """

        self.config = config
        for host, kwargs in iteritems(self.config):
            # we need to pass the host argument in when we connect, so
            # set it here
            kwargs["host"] = host
            if 'calling_format' not in kwargs:
                kwargs["calling_format"] = connection.OrdinaryCallingFormat()

        self.host_aliases = host_aliases

        self.conns = {}
        if not lazy:
            self.connect()

        self.s3_inst_info = {
            'ceph': {
                'secure': True,
                'url': 'ceph.service.consul',
                'access_key': "",
                'secret_key': ""
            },
            'ceph2': {
                'secure': True,
                'url': 'gdc-cephb-objstore.osdc.io',
                'access_key': "",
                'secret_key': ""
            },
            'cleversafe': {
                'secure': True,
                'url': 'gdc-accessors.osdc.io',
                'access_key': "",
                'secret_key': ""
            }
        }
        self.stream_status = stream_status

        # magic number here for multipart chunk size, change with care
        self.mp_chunk_size = 1073741824  # 1GiB

        # semi-magic number here, worth playing with if speed issues seen
        self.chunk_size = 16777216