def post_start_creation(self, filename, options): """Start object creation with a POST. This implements the resumable upload XML API. Args: filename: gs filename of form /bucket/filename. options: a dict containing all user specified request headers. e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}. Returns: a token used for continuing upload. Also used as blobkey to store the content. """ common.validate_file_path(filename) token = self._filename_to_blobkey(filename) gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) self._cleanup_old_file(gcs_file) new_file = _AE_GCSFileInfo_(key_name=token, filename=filename, finalized=False) new_file.options = options new_file.put() return token
def put_copy(self, src, dst): """Copy file from src to dst. Metadata is copied. Args: src: /bucket/filename. This file must exist. dst: /bucket/filename """ common.validate_file_path(src) common.validate_file_path(dst) ns = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') src_blobkey = self._filename_to_blobkey(src) source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey) token = self._filename_to_blobkey(dst) new_file = _AE_GCSFileInfo_(key_name=token, filename=dst, finalized=True) new_file.options = source.options new_file.etag = source.etag new_file.size = source.size new_file.creation = datetime.datetime.utcnow() new_file.put() finally: namespace_manager.set_namespace(ns) local_file = self.blob_storage.OpenBlob(src_blobkey) self.blob_storage.StoreBlob(token, local_file)
def get_object(self, filename, start=0, end=None): """Get file content with a GET. Args: filename: gcs filename of form '/bucket/filename'. start: start offset to request. Inclusive. end: end offset to request. Inclusive. Returns: The segment of file content requested. Raises: ValueError: if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) gcsfileinfo = db.get(key) if not gcsfileinfo or not gcsfileinfo.finalized: raise ValueError('File does not exist.') local_file = self.blob_storage.OpenBlob(blobkey) local_file.seek(start) if end: return local_file.read(end - start + 1) else: return local_file.read()
def get_object(self, filename, start=0, end=None): """Get file content with a GET. Args: filename: gs filename of form '/bucket/filename'. start: start offset to request. Inclusive. end: end offset to request. Inclusive. Returns: The segment of file content requested. Raises: ValueError: if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) gsfileinfo = _AE_GCSFileInfo_.get_by_key_name(blobkey) if not gsfileinfo or not gsfileinfo.finalized: raise ValueError('File does not exist.') local_file = self.blob_storage.OpenBlob(blobkey) local_file.seek(start) if end: return local_file.read(end - start + 1) else: return local_file.read()
def post_start_creation(self, filename, options): """Start object creation with a POST. This implements the resumable upload XML API. Only major limitation of current implementation is that we don't support multiple upload sessions for the same GCS file. Previous _AE_GCSFileInfo (which represents either a finalized file, or an upload session) will be removed when a new upload session is created. Args: filename: gcs filename of form /bucket/filename. options: a dict containing all user specified request headers. e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}. Returns: a token (blobkey) used for continuing upload. """ ns = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') common.validate_file_path(filename) token = self._filename_to_blobkey(filename) gcs_file = _AE_GCSFileInfo_.get_by_key_name(token) self._cleanup_old_file(gcs_file) new_file = _AE_GCSFileInfo_(key_name=token, filename=filename, finalized=False) new_file.options = options new_file.put() return token finally: namespace_manager.set_namespace(ns)
def put_copy(self, src, dst): """Copy file from src to dst. Metadata is copied. Args: src: /bucket/filename. This file must exist. dst: /bucket/filename """ common.validate_file_path(src) common.validate_file_path(dst) src_blobkey = self._filename_to_blobkey(src) source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey) ns = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') token = self._filename_to_blobkey(dst) new_file = _AE_GCSFileInfo_(key_name=token, filename=dst, finalized=True) new_file.options = source.options new_file.etag = source.etag new_file.size = source.size new_file.creation = datetime.datetime.utcnow() new_file.put() finally: namespace_manager.set_namespace(ns) local_file = self.blob_storage.OpenBlob(src_blobkey) self.blob_storage.StoreBlob(token, local_file)
def _filename_to_blobkey(self, filename): """Get blobkey for filename. Args: filename: gs filename of form /bucket/filename. Returns: blobinfo's datastore's key name, aka, blobkey. """ common.validate_file_path(filename) return blobstore_stub.BlobstoreServiceStub.CreateEncodedGoogleStorageKey( filename)
def _filename_to_blobkey(self, filename): """Get blobkey for filename. Args: filename: gcs filename of form /bucket/filename. Returns: blobinfo's datastore's key name, aka, blobkey. """ common.validate_file_path(filename) return blobstore_stub.BlobstoreServiceStub.CreateEncodedGoogleStorageKey( filename[1:])
def delete_object(self, filename): """Delete file with a DELETE. Args: filename: gs filename of form '/bucket/filename' Returns: True if file is deleted. False if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) gsfileinfo = _AE_GCSFileInfo_.get_by_key_name(blobkey) if not gsfileinfo: return False gsfileinfo.delete() self.blob_storage.DeleteBlob(blobkey) return True
def delete_object(self, filename): """Delete file with a DELETE. Args: filename: gcs filename of form '/bucket/filename' Returns: True if file is deleted. False if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) gcsfileinfo = db.get(key) if not gcsfileinfo: return False blobstore_stub.BlobstoreServiceStub.DeleteBlob(blobkey, self.blob_storage) return True
def put_copy(self, src, dst, options): """Copy file from src to dst. Metadata is copied. Args: src: /bucket/filename. This file must exist. dst: /bucket/filename. options: a dict containing all user specified request headers. e.g. {'content-type': 'foo', 'x-goog-meta-bar': 'bar'}. If None, old metadata is copied. """ common.validate_file_path(src) common.validate_file_path(dst) ns = namespace_manager.get_namespace() try: namespace_manager.set_namespace('') src_blobkey = self._filename_to_blobkey(src) source = _AE_GCSFileInfo_.get_by_key_name(src_blobkey) token = self._filename_to_blobkey(dst) new_file = _AE_GCSFileInfo_(key_name=token, filename=dst, finalized=True) if options: new_file.options = options else: new_file.options = source.options new_file.etag = source.etag new_file.size = source.size new_file.creation = source.creation new_file.put() finally: namespace_manager.set_namespace(ns) if src_blobkey != token: local_file = self.blob_storage.OpenBlob(src_blobkey) self.blob_storage.StoreBlob(token, local_file)
def head_object(self, filename): """Get file stat with a HEAD. Args: filename: gs filename of form '/bucket/filename' Returns: A CSFileStat object containing file stat. None if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) info = _AE_GCSFileInfo_.get_by_key_name(blobkey) if info and info.finalized: metadata = common.get_metadata(info.options) filestat = common.CSFileStat( filename=info.filename, st_size=info.size, etag=info.etag, st_ctime=calendar.timegm(info.creation.utctimetuple()), content_type=info.content_type, metadata=metadata) return filestat return None
def head_object(self, filename): """Get file stat with a HEAD. Args: filename: gs filename of form '/bucket/filename' Returns: A CSFileStat object containing file stat. None if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) info = _AE_GCSFileInfo_.get_by_key_name(blobkey) if info and info.finalized: metadata = common.get_metadata(info.options) filestat = common.CSFileStat(filename=info.filename, st_size=info.size, etag=info.etag, st_ctime=calendar.timegm( info.creation.utctimetuple()), content_type=info.content_type, metadata=metadata) return filestat return None
def head_object(self, filename): """Get file stat with a HEAD. Args: filename: gcs filename of form '/bucket/filename' Returns: A GCSFileStat object containing file stat. None if file doesn't exist. """ common.validate_file_path(filename) blobkey = self._filename_to_blobkey(filename) key = blobstore_stub.BlobstoreServiceStub.ToDatastoreBlobKey(blobkey) info = db.get(key) if info and info.finalized: metadata = common.get_metadata(info.options) filestat = common.GCSFileStat( filename=info.filename, st_size=info.size, etag=info.etag, st_ctime=calendar.timegm(info.creation.utctimetuple()), content_type=info.content_type, metadata=metadata) return filestat return None