def gettmpdir(prefix='tmp/', suffix='', bucket='bodylabs-temp', uuid_generator=None): # pylint: disable=redefined-outer-name ''' Make a directory on S3 with a known unique name. The prefix for the directory will be ``s3://<bucket>/<prefix><UUID><suffix>/`` Note that there _is_ a race condition in this code; if two clients happen to be trying to create a tmpdir and somehow come up with the same uuid exactly simultaniously, they could both get the same dir. But in practice this is sufficiently one in a billion that we'll not worry about it for now. ''' from boto.s3.key import Key from baiji.connection import S3Connection if uuid_generator is None: from uuid import uuid4 as uuid_generator # pragma: no cover #s3.ls and s3.path.parse generated one with leading slash so remove it in case prefix = prefix.lstrip('/') b = S3Connection()._bucket(bucket) # pylint: disable=protected-access done = False while not done: tmppath = "%s%s%s" % (prefix, uuid_generator(), suffix) existin_files_at_tmppath = [x for x in b.list(prefix=tmppath)] if not existin_files_at_tmppath: k = Key(b) k.key = "%s/.tempdir" % (tmppath) k.set_contents_from_string('') done = True return "s3://%s/%s/" % (bucket, tmppath)
def __init__(self, key, mode='r', connection=None, encrypt=True, version_id=None): from baiji.connection import S3Connection self.encrypt = encrypt self.key = key if path.islocal(key): self.should_upload_on_close = False self.mode = FileMode(mode, allowed_modes='arwxb+t') from six.moves import builtins local_path = path.parse(key).path if self.mode.is_output and not os.path.exists( os.path.dirname(local_path)): from baiji.util.shutillib import mkdir_p mkdir_p(os.path.dirname(local_path)) try: # Use os.open to catch exclusive access to the file, but use open to get a nice, useful file object self.fd = os.open(local_path, self.mode.flags) self.f = builtins.open(local_path, self.mode.mode.replace('x', 'w')) os.close(self.fd) except OSError as e: import errno if e.errno is errno.EEXIST: raise KeyExists("Local file exists: %s" % local_path) elif e.errno is errno.ENOENT: raise KeyNotFound("Local file does not exist: %s" % local_path) else: raise IOError(e.errno, "%s: %s" % (e.strerror, e.filename)) else: if connection is None: connection = S3Connection() self.connection = connection self.mode = FileMode(mode, allowed_modes='rwxbt') self.should_upload_on_close = self.mode.is_output if self.mode.creating_exclusively: if self.connection.exists(self.key): raise KeyExists("Key exists in bucket: %s" % self.key) else: self.connection.touch(self.key, encrypt=self.encrypt) # Use w+ so we can read back the contents in upload() new_mode = ('w+' + (self.mode.binary and 'b' or '') + (self.mode.text and 't' or '')) from baiji.util import tempfile self.f = tempfile.NamedTemporaryFile( mode=new_mode, suffix=os.path.splitext(path.parse(self.key).path)[1]) self.name = self.f.name self.remotename = key # Used by some serialization code to find files which sit along side the file in question, like textures which sit next to a mesh file if self.mode.reading: self.connection.cp(self.key, self.name, force=True, version_id=version_id)
def __init__(self, kwargs_for_cp): from baiji.connection import S3Connection self.connection = S3Connection(cache_buckets=True) self.kwargs_for_cp = kwargs_for_cp self.verbose = self.kwargs_for_cp.get('progress', False) if 'progress' in self.kwargs_for_cp: self.kwargs_for_cp['progress'] = False super(MultifileCopyWorker, self).__init__()
def isfile(key): ''' Return true if key is file; local or s3. ''' from baiji.connection import S3Connection from baiji.exceptions import InvalidSchemeException k = parse(key) if islocal(key): #This really only ensures that scheme == 'file' return os.path.isfile(k.path) if isremote(key): # scheme == 'S3' # exists currently only works for files on s3 because # directories don't exist on s3, only files. return S3Connection().exists(key) else: raise InvalidSchemeException("URI Scheme {} is not implemented".format(k.scheme))
def isdir(key): ''' Return true if key is directory-ish. That is, it ends with a path separator, or is a local directory that actually exists. On S3 a "directory" is considered to exist if one or more files exist that have the "directory" (ending with sep) as a prefix. ''' from baiji.connection import S3Connection from baiji.exceptions import InvalidSchemeException k = parse(key) if islocal(key): #This really only ensures that scheme == 'file' return os.path.isdir(k.path) if isremote(key): # scheme == 'S3' if not k.path.endswith(sep): k = parse(key + sep) try: next(S3Connection().ls(k.geturl())) return True except StopIteration: return False else: raise InvalidSchemeException("URI Scheme {} is not implemented".format(k.scheme))
def etag_matches(*args, **kwargs): return S3Connection().etag_matches(*args, **kwargs)
def etag(*args, **kwargs): return S3Connection().etag(*args, **kwargs)
def size(*args, **kwargs): return S3Connection().size(*args, **kwargs)
def list_buckets(*args, **kwargs): return S3Connection().list_buckets(*args, **kwargs)
def put_string(*args, **kwargs): return S3Connection().put_string(*args, **kwargs)
def touch(*args, **kwargs): return S3Connection().touch(*args, **kwargs)
def ls(*args, **kwargs): return S3Connection().ls(*args, **kwargs)
def rm_r(*args, **kwargs): return S3Connection().rm_r(*args, **kwargs)
def cp_many(*args, **kwargs): return S3Connection().cp_many(*args, **kwargs)
def disable_versioning(*args, **kwargs): return S3Connection().disable_versioning(*args, **kwargs)
def enable_versioning(*args, **kwargs): return S3Connection().enable_versioning(*args, **kwargs)
def create_bucket(*args, **kwargs): return S3Connection().create_bucket(*args, **kwargs)
def bucket_info(*args, **kwargs): return S3Connection().bucket_info(*args, **kwargs)
def encrypt_at_rest(*args, **kwargs): return S3Connection().encrypt_at_rest(*args, **kwargs)
def mv(*args, **kwargs): return S3Connection().mv(*args, **kwargs)
def glob(*args, **kwargs): return S3Connection().glob(*args, **kwargs)
def sync(*args, **kwargs): return S3Connection().sync(*args, **kwargs)
def get_url(*args, **kwargs): return S3Connection().get_url(*args, **kwargs)
def restore(*args, **kwargs): return S3Connection().restore(*args, **kwargs)
def exists(*args, **kwargs): return S3Connection().exists(*args, **kwargs)
def info(*args, **kwargs): return S3Connection().info(*args, **kwargs)
def get_string(*args, **kwargs): return S3Connection().get_string(*args, **kwargs)