def test_creates_bucket_when_missing(self): with mock.patch('boto.s3.connection.S3Connection.lookup', return_value=None): with mock.patch('boto.s3.connection.S3Connection.lookup', return_value='YES') as mock_create: fs = S3Storage(*self.cred) mock_create.assert_called_with(self.default_bucket_name)
def get_storage(cls, access_key_id, secret_access_key, bucket_name): from depot.io.awss3 import S3Storage return S3Storage( access_key_id, secret_access_key, bucket_name, prefix='my-prefix/')
def setupClass(cls): # Travis runs multiple tests concurrently on fake machines that might # collide on pid and hostid, so use an uuid1 which should be fairly random # thanks to clock_seq cls.run_id = '%s-%s' % (uuid.uuid1().hex, os.getpid()) try: global S3Storage from depot.io.awss3 import S3Storage except ImportError: raise SkipTest('Boto not installed') env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') cls.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), ) cls.cred = (access_key_id, secret_access_key) bucket_name = 'filedepot-testfs-%s' % cls.run_id cls.fs = S3Storage(access_key_id, secret_access_key, bucket_name) while not cls.fs._conn.lookup(bucket_name): # Wait for bucket to exist, to avoid flaky tests... time.sleep(0.5)
def setup_class(cls): try: from depot.io.awss3 import S3Storage except ImportError: raise SkipTest('Boto not installed') env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') PID = os.getpid() NODE = str(uuid.uuid1()).rsplit('-', 1)[-1] BUCKET_NAME = 'fdtest-%s-%s-%s' % (access_key_id.lower(), NODE, PID) cls.fs = S3Storage(access_key_id, secret_access_key, BUCKET_NAME)
def setup(self): try: global S3Storage from depot.io.awss3 import S3Storage except ImportError: raise SkipTest('Boto not installed') env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') PID = os.getpid() NODE = str(uuid.uuid1()).rsplit('-', 1)[-1] # Travis runs multiple tests concurrently self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), ) self.cred = (access_key_id, secret_access_key) self.fs = S3Storage(access_key_id, secret_access_key, 'filedepot-testfs-%s-%s-%s' % (access_key_id.lower(), NODE, PID))
def setup(self): try: global S3Storage from depot.io.awss3 import S3Storage except ImportError: raise SkipTest('Boto not installed') env = os.environ access_key_id = env.get('AWS_ACCESS_KEY_ID') secret_access_key = env.get('AWS_SECRET_ACCESS_KEY') if access_key_id is None or secret_access_key is None: raise SkipTest('Amazon S3 credentials not available') self.default_bucket_name = 'filedepot-%s' % (access_key_id.lower(), ) self.cred = (access_key_id, secret_access_key) bucket_name = 'filedepot-testfs-%s' % self.run_id self.fs = S3Storage(access_key_id, secret_access_key, bucket_name) while not self.fs._conn.lookup(bucket_name): # Wait for bucket to exist, to avoid flaky tests... time.sleep(0.5)
def test_default_bucket_name(self): with mock.patch('boto.s3.connection.S3Connection.lookup', return_value='YES'): fs = S3Storage(*self.cred) assert fs._bucket_driver.bucket == 'YES'
def get_storage(cls, access_key_id, secret_access_key, bucket_name): from depot.io.boto3 import S3Storage return S3Storage(access_key_id, secret_access_key, bucket_name)