Beispiel #1
0
def openS3(keySize=None):
    """
    Creates an AWS bucket. If keySize is given, a key of random bytes is created and its handle
    is yielded. If no keySize is given an empty bucket handle is yielded. The bucket and all
    created keys are cleaned up automatically.

    :param int keySize: Size of key to be created.
    """
    if keySize is not None and keySize < 0:
        raise ValueError('Key size must be greater than zero')
    with closing(boto.s3.connect_to_region(AWSMultipartCopyTest.region)) as s3:
        bucket = s3.create_bucket('multipart-transfer-test-%s' % uuid.uuid4(),
                                  location=region_to_bucket_location(
                                      AWSMultipartCopyTest.region))
        try:
            keyName = 'test'
            if keySize is None:
                yield bucket
            else:
                key = bucket.new_key(keyName)
                content = os.urandom(keySize)
                key.set_contents_from_string(content)

                yield bucket.get_key(keyName)
        finally:
            for key in bucket.list():
                key.delete()
            bucket.delete()
Beispiel #2
0
def openS3(keySize=None):
    """
    Creates an AWS bucket. If keySize is given, a key of random bytes is created and its handle
    is yielded. If no keySize is given an empty bucket handle is yielded. The bucket and all
    created keys are cleaned up automatically.

    :param int keySize: Size of key to be created.
    """
    if keySize is not None and keySize < 0:
        raise ValueError('Key size must be greater than zero')
    with closing(boto.s3.connect_to_region(AWSMultipartCopyTest.region)) as s3:
        bucket = s3.create_bucket('multipart-transfer-test-%s' % uuid.uuid4(),
                                  location=region_to_bucket_location(AWSMultipartCopyTest.region))
        try:
            keyName = 'test'
            if keySize is None:
                yield bucket
            else:
                key = bucket.new_key(keyName)
                content = os.urandom(keySize)
                key.set_contents_from_string(content)

                yield bucket.get_key(keyName)
        finally:
            for key in bucket.list():
                key.delete()
            bucket.delete()
Beispiel #3
0
 def _createExternalStore(self):
     import boto.s3
     s3 = boto.s3.connect_to_region(self.awsRegion())
     try:
         return s3.create_bucket(bucket_name='import-export-test-%s' % uuid.uuid4(),
                                 location=region_to_bucket_location(self.awsRegion()))
     except:
         with panic(log=logger):
             s3.close()
 def _createExternalStore(self):
     import boto.s3
     s3 = boto.s3.connect_to_region(self.testRegion)
     try:
         return s3.create_bucket(bucket_name='import-export-test-%s' % uuid.uuid4(),
                                 location=region_to_bucket_location(self.testRegion))
     except:
         with panic(log=logger):
             s3.close()
Beispiel #5
0
 def _getOrCreateBucket(self, bucket_name, versioning=False):
     """
     :rtype: Bucket
     """
     assert self.minBucketNameLen <= len(bucket_name) <= self.maxBucketNameLen
     assert self.bucketNameRe.match(bucket_name)
     log.info("Setting up job store bucket '%s'.", bucket_name)
     while True:
         log.debug("Looking up job store bucket '%s'.", bucket_name)
         try:
             bucket = self.s3.get_bucket(bucket_name, validate=True)
             assert self.__getBucketRegion(bucket) == self.region
             assert versioning is self.__getBucketVersioning(bucket)
             log.debug("Using existing job store bucket '%s'.", bucket_name)
             return bucket
         except S3ResponseError as e:
             if e.error_code == 'NoSuchBucket':
                 log.debug("Bucket '%s' does not exist. Creating it.", bucket_name)
                 try:
                     location = region_to_bucket_location(self.region)
                     bucket = self.s3.create_bucket(bucket_name, location=location)
                 except S3CreateError as e:
                     if e.error_code == 'BucketAlreadyOwnedByYou':
                         # https://github.com/BD2KGenomics/toil/issues/955
                         log.warn('Got %s, retrying.', e)
                         # and loop
                     else:
                         raise
                 else:
                     assert self.__getBucketRegion(bucket) == self.region
                     if versioning:
                         bucket.configure_versioning(versioning)
                     log.debug("Created new job store bucket '%s'.", bucket_name)
                     return bucket
             else:
                 raise