def client(request): class S3Client(object): def __init__(self, scheme, host_name, access_key, secret_key, bucket): [ip, port] = host_name.split(':') self.scheme = scheme self.host_name = host_name self.access_key = access_key self.secret_key = secret_key self.bucket = bucket self.conn = S3Connection(self.access_key, self.secret_key, host=ip, port=int(port), is_secure=False, calling_format=OrdinaryCallingFormat()) def list(self, file_id): bucket = self.conn.get_bucket(self.bucket, validate=False) return list(bucket.list(prefix=file_id + '/', delimiter='/')) bucket = 'data' result = s3_server.up('onedata/s3proxy', [bucket], 'storage', '1') [container] = result['docker_ids'] def fin(): docker.remove([container], force=True, volumes=True) request.addfinalizer(fin) return S3Client('http', result['host_name'], result['access_key'], result['secret_key'], bucket)
def client(request): class S3Client(object): def __init__(self, scheme, host_name, access_key, secret_key, bucket): [ip, port] = host_name.split(':') self.scheme = scheme self.host_name = host_name self.access_key = access_key self.secret_key = secret_key self.bucket = bucket self.conn = S3Connection(self.access_key, self.secret_key, host=ip, port=int(port), is_secure=False, calling_format=OrdinaryCallingFormat()) def list(self, file_id): bucket = self.conn.get_bucket(self.bucket, validate=False) return list(bucket.list(prefix=file_id + '/', delimiter='/')) bucket = 'data' result = s3_server.up('onedata/s3proxy', [bucket], 'storage', '1') [container] = result['docker_ids'] def fin(): docker.remove([container], force=True, volumes=True) request.addfinalizer(fin) return S3Client('http', result['host_name'], result['access_key'], result[ 'secret_key'], bucket)
parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Bring up S3 storage.') parser.add_argument('-i', '--image', action='store', default='lphoward/fake-s3', help='docker image to use for the container', dest='image') parser.add_argument('-b', '--bucket', action='append', default=[], help='bucket name', dest='buckets') parser.add_argument('-u', '--uid', action='store', default=common.generate_uid(), help='uid that will be concatenated to docker names', dest='uid') args = parser.parse_args() config = s3.up(args.image, args.buckets, 'storage', args.uid) print(json.dumps(config))
parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Bring up S3 storage.') parser.add_argument( '-i', '--image', action='store', default='lphoward/fake-s3', help='docker image to use for the container', dest='image') parser.add_argument( '-b', '--bucket', action='append', default=[], help='bucket name', dest='buckets') parser.add_argument( '-u', '--uid', action='store', default=common.generate_uid(), help='uid that will be concatenated to docker names', dest='uid') args = parser.parse_args() config = s3.up(args.image, args.buckets, 'storage', args.uid) print(json.dumps(config))
from __future__ import print_function import argparse import json from environment import s3 parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description='Bring up S3 storage.') parser.add_argument('-i', '--image', action='store', default='lphoward/fake-s3', help='docker image to use for the container', dest='image') parser.add_argument('-b', '--bucket', action='append', default=[], help='bucket name', dest='buckets') args = parser.parse_args() config = s3.up(args.image, args.buckets) print(json.dumps(config))