def remove(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): raise OSError("No such key: '{}'".format(path)) # Does this delete folders? key.delete()
def fetch(s3bucket, s3key, aws_access_key, aws_secret_key, output_file, headers=None): if isinstance(output_file, basestring): output_file = open(output_file, 'w') _close_when_done_ = True else: _close_when_done_ = False connection = S3Connection(aws_access_key, aws_secret_key) bucket = connection.lookup(s3bucket) if bucket is None: sys.stderr.write( 'bucket does not exist, may be cause by incorrect credentials') return 1 key = boto.s3.key.Key(bucket, s3key) if not key.exists(): sys.stderr.write('key does not exist within given bucket') return 1 key.get_contents_to_file(output_file, headers=headers) if _close_when_done_: output_file.close() return 0
def stream_read_file(self, path): self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) if not key.exists(): raise IOError("No such key: '{0}'".format(path)) return StreamReadKeyAsFile(key)
def fetch(s3bucket, s3key, aws_access_key, aws_secret_key, output_file, headers=None): if isinstance(output_file, basestring): output_file = open(output_file, 'w') _close_when_done_ = True else: _close_when_done_ = False connection = S3Connection(aws_access_key,aws_secret_key) bucket = connection.lookup(s3bucket) if bucket is None: sys.stderr.write('bucket does not exist, may be cause by incorrect credentials') return 1 key = boto.s3.key.Key(bucket, s3key) if not key.exists(): sys.stderr.write('key does not exist within given bucket') return 1 key.get_contents_to_file(output_file, headers=headers) if _close_when_done_: output_file.close() return 0
def stream_write(self, path, fp): # Minimum size of upload part size on S3 is 5MB buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) tmp_path = "tmp/%s" % path mp = self._boto_bucket.initiate_multipart_upload( tmp_path, encrypt_key=(self._config.s3_encrypt is True)) num_part = 1 try: while True: buf = fp.read(buffer_size) if not buf: break io = compat.StringIO(buf) mp.upload_part_from_file(io, num_part) num_part += 1 io.close() except IOError as e: raise e mp.complete_upload() # do this to get the etag correct as the md5 key = self.makeKey(tmp_path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) new_key = key.copy(self._config.boto_bucket, path) if not new_key.exists(): raise IOError('No such key: \'{0}\''.format(path + "-tmp")) key.delete()
class S3Image(_BaseImage): """image-from-S3 class""" def __init__(self, source, s3_access_key, s3_secret_key, attrs=None, check_existence=False): _BaseImage.__init__(self) if 'boto' not in sys.modules: raise ImportError('boto S3 connection module not found') self.source = source self._s3_access_key = s3_access_key self._s3_secret_key = s3_secret_key if check_existence: if not self.exists(): raise ObjectNotFoundError(source) self.files = None self._set_attributes(attrs) return def __getattribute__(self, name): value = _BaseImage.__getattribute__(self, name) if name == 'files' and value is None: # source is 's3://bucket/path/to/object' (bucket_name, object_name) = self.source[5:].split('/', 1) s3 = S3Connection(self._s3_access_key, self._s3_secret_key, calling_format=OrdinaryCallingFormat()) bucket = s3.get_bucket(bucket_name) key = boto.s3.key.Key(bucket) key.key = object_name self._source_base = os.path.basename(self.source) self._temp_source = '%s/%s' % (self._tempdir, self._source_base) key.get_contents_to_filename(self._temp_source) s3.close() self._unpack() return self.files return value def exists(self): """report whether the file or S3 object exists""" (bucket_name, object_name) = self.source[5:].split('/', 1) s3 = S3Connection(self._s3_access_key, self._s3_secret_key, calling_format=OrdinaryCallingFormat()) try: bucket = s3.get_bucket(bucket_name) except S3ResponseError, data: if data.args[0] == 404: s3.close() return False raise key = boto.s3.key.Key(bucket) key.key = object_name rv = key.exists() s3.close() return rv
def content_redirect_url(self, path): path = self._init_path(path) key = self.makeKey(path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) return key.generate_url( expires_in=1200, method='GET', query_auth=True)
def stream_read(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) while True: buf = key.read(self.buffer_size) if not buf: break yield buf
def stream_read(self, path): self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) if not key.exists(): raise IOError("No such key: '{0}'".format(path)) while True: buf = key.read(self.buffer_size) if not buf: break yield buf
def remove(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if key.exists(): # It's a file key.delete() return # We assume it's a directory if not path.endswith('/'): path += '/' for key in self._s3_bucket.list(prefix=path, delimiter='/'): key.delete()
def keySize(s3bucket, s3key, aws_access_key, aws_secret_key): connection = S3Connection(aws_access_key, aws_secret_key) bucket = connection.lookup(s3bucket) if bucket is None: raise NonExistingBucket() key = boto.s3.key.Key(bucket, s3key) if not key.exists(): return NonExistingKey() return key.size
def remove(self, path): self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) if key.exists(): # It's a file key.delete() return # We assume it's a directory if not path.endswith("/"): path += "/" for key in self._cloud_bucket.list(prefix=path): key.delete()
def content_redirect_url(self, path): path = self._init_path(path) key = self.makeKey(path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) # No cloudfront? Sign to the bucket if not self.signer: return key.generate_url(expires_in=1200, method='GET', query_auth=True) # Have cloudfront? Sign it return self.signer(path, expire_time=60)
def content_redirect_url(self, path): path = self._init_path(path) key = self.makeKey(path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) # No cloudfront? Sign to the bucket if not self.signer: return key.generate_url( expires_in=1200, method='GET', query_auth=True) # Have cloudfront? Sign it return self.signer(path, expire_time=60)
def does_key_exist(self, bucket_name, key_name): """ Queries Amazon S3 to see if the named file exists. Args: bucket_name: A str containing the name of the bucket that the file exists in. key_name: A str containing the name of the key that identifies the file. Returns: True if a file does exist in the named bucket with the provided key name, and False otherwise. """ bucket = self.connection.lookup(bucket_name) key = boto.s3.key.Key(bucket) key.key = key_name return key.exists()
def exists(self, path): self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) return key.exists()
def remove(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): raise OSError('No such key: \'{0}\''.format(path)) key.delete()
def exists(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) return key.exists()
def get_content(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): raise IOError('No such key: \'{0}\''.format(path)) return key.get_contents_as_string()
def get_content(self, path): path = self._init_path(path) key = boto.s3.key.Key(self._s3_bucket, path) if not key.exists(): raise IOError("No such key: '{}'".format(path)) return key.get_contents_as_string().decode('utf-8')