def __init__(self, *args, keep_local=False, provider=None, **kwargs): super(RemoteObject, self).__init__(*args, keep_local=keep_local, provider=provider, **kwargs) bucket_name = 'test-static-remote-bucket' test_files = ('test.txt', 'out1.txt', 'out2.txt') s3 = boto3.resource('s3') s3.create_bucket(Bucket=bucket_name) # "Upload" files that should be in S3 before tests... s3c = S3Helper() for test_file in test_files: if not s3c.exists_in_bucket(bucket_name, test_file): logger.debug("Pre-populating remote bucket {} with file {}".format( bucket_name, test_file)) s3c.upload_to_s3(bucket_name, test_file)
def __init__(self, *args, keep_local=False, provider=None, **kwargs): super(RemoteObject, self).__init__(*args, keep_local=keep_local, provider=provider, **kwargs) bucket_name = 'test-remote-bucket' test_file = "test.txt" conn = boto.connect_s3() if bucket_name not in [b.name for b in conn.get_all_buckets()]: conn.create_bucket(bucket_name) # "Upload" files that should be in S3 before tests... s3c = S3Helper() if not s3c.exists_in_bucket(bucket_name, test_file): s3c.upload_to_s3(bucket_name, test_file)
def __init__( self, *args, keep_local=False, stay_on_remote=False, provider=None, **kwargs ): super(RemoteObject, self).__init__( *args, keep_local=keep_local, stay_on_remote=False, provider=provider, **kwargs ) bucket_name = "test-remote-bucket" test_file = "test.txt" s3 = boto3.resource("s3") s3.create_bucket(Bucket=bucket_name) # "Upload" files that should be in S3 before tests... s3c = S3Helper() if not s3c.exists_in_bucket(bucket_name, test_file): s3c.upload_to_s3(bucket_name, test_file)