def create_bucket_and_key(bucket_name=BUCKET_NAME, key_name=KEY_NAME, contents=None, num_attempts=12, sleep_time=5): # fake (or not) connection, bucket and key logger.debug('%r', locals()) s3 = boto3.resource('s3') bucket_exist = cleanup_bucket(s3) if not bucket_exist: mybucket = s3.create_bucket(Bucket=bucket_name) # # In real life, it can take a few seconds for the bucket to become ready. # If we try to write to the key while the bucket while it isn't ready, we # will get a ClientError: NoSuchBucket. # for attempt in range(num_attempts): try: mybucket = s3.Bucket(bucket_name) mykey = s3.Object(bucket_name, key_name) if contents is not None: mykey.put(Body=contents) return mybucket, mykey except botocore.exceptions.ClientError as err: logger.error('caught %r, retrying', err) time.sleep(sleep_time) assert False, 'failed to create bucket after %d attempts' % num_attempts
def populate_bucket(num_keys=10): # fake (or not) connection, bucket and key logger.debug('%r', locals()) s3 = boto3.resource('s3') for key_number in range(num_keys): key_name = 'key_%d' % key_number s3.Object(BUCKET_NAME, key_name).put(Body=str(key_number))
def setUp(self): ignore_resource_warnings() s3 = _resource('s3') bucket = s3.create_bucket(Bucket=BUCKET_NAME) bucket.wait_until_exists() self.body = b'hello' s3.Object(BUCKET_NAME, KEY_NAME).put(Body=self.body)
def create_bucket_and_key(bucket_name='mybucket', key_name='mykey', contents=None): # fake connection, bucket and key _LOGGER.debug('%r', locals()) s3 = boto3.resource('s3') mybucket = s3.create_bucket(Bucket=bucket_name) mykey = s3.Object(bucket_name, key_name) if contents is not None: mykey.put(Body=contents) return mybucket, mykey
def populate_bucket(bucket_name=BUCKET_NAME, num_keys=10): # fake (or not) connection, bucket and key logger.debug('%r', locals()) s3 = boto3.resource('s3') bucket_exist = cleanup_bucket(s3) if not bucket_exist: mybucket = s3.create_bucket(Bucket=bucket_name) mybucket = s3.Bucket(bucket_name) for key_number in range(num_keys): key_name = 'key_%d' % key_number s3.Object(bucket_name, key_name).put(Body=str(key_number))
def setUp(self): # lower the multipart upload size, to speed up these tests self.old_min_part_size = smart_open.s3.DEFAULT_MIN_PART_SIZE smart_open.s3.DEFAULT_MIN_PART_SIZE = 5 * 1024**2 ignore_resource_warnings() super().setUp() s3 = _resource('s3') s3.create_bucket(Bucket=BUCKET_NAME).wait_until_exists() self.body = u"hello wořld\nhow are you?".encode('utf8') s3.Object(BUCKET_NAME, KEY_NAME).put(Body=self.body)
def create_bucket_and_key(bucket_name=BUCKET_NAME, key_name=KEY_NAME, contents=None): # fake (or not) connection, bucket and key logger.debug('%r', locals()) s3 = boto3.resource('s3') bucket_exist = cleanup_bucket(s3) if not bucket_exist: mybucket = s3.create_bucket(Bucket=bucket_name) mybucket = s3.Bucket(bucket_name) mykey = s3.Object(bucket_name, key_name) if contents is not None: mykey.put(Body=contents) return mybucket, mykey
def populate_bucket(num_keys=10): s3 = boto3.resource('s3') for key_number in range(num_keys): key_name = 'key_%d' % key_number s3.Object(BUCKET_NAME, key_name).put(Body=str(key_number))