def remove(self, path): _path = self.validatepath(path) if self.getinfo(path).is_dir: raise errors.FileExpected(path) self.client.clean(_path.encode('utf-8'))
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options) -> "GCSFile": _mode = Mode(mode) _mode.validate_bin() self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) def on_close(gcs_file): if _mode.create or _mode.writing: gcs_file.raw.seek(0) blob = self._get_blob(_key) if not blob: blob = self.bucket.blob(_key) mime_type, encoding = mimetypes.guess_type(path) if encoding is not None: mime_type = None blob.upload_from_file(gcs_file.raw, content_type=mime_type) gcs_file.raw.close() if _mode.create: dir_path = dirname(_path) if dir_path != "/": _dir_key = self._path_to_dir_key(dir_path) if not self.bucket.get_blob(_dir_key): raise errors.ResourceNotFound(path) try: info = self.getinfo(path) except errors.ResourceNotFound: pass else: if _mode.exclusive: raise errors.FileExists(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) if _mode.appending: blob = self._get_blob(_key) if blob: # in case there is an existing blob in GCS, we download it and seek until the end of the stream gcs_file.seek(0, os.SEEK_END) blob.download_to_file(gcs_file.raw) return gcs_file if self.strict: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) blob = self._get_blob(_key) if not blob: raise errors.ResourceNotFound blob.download_to_file(gcs_file.raw) gcs_file.seek(0) return gcs_file
def openbin(self, path, mode="r", buffering=-1, **options): _mode = Mode(mode) _mode.validate_bin() self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) if _mode.create: def on_close_create(s3file): """Called when the S3 file closes, to upload data.""" try: s3file.raw.seek(0) with s3errors(path): self.client.upload_fileobj( s3file.raw, self._bucket_name, _key ) finally: s3file.raw.close() try: info = self.getinfo(path) except errors.ResourceNotFound: pass else: if _mode.exclusive: raise errors.FileExists(path) if info.is_dir: raise errors.FileExpected(path) s3file = S3File.factory(path, _mode, on_close=on_close_create) if _mode.appending: try: with s3errors(path): self.client.download_fileobj( self._bucket_name, _key, s3file.raw ) except errors.ResourceNotFound: pass else: s3file.seek(0, os.SEEK_END) return s3file if self.strict: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) def on_close(s3file): """Called when the S3 file closes, to upload the data.""" try: if _mode.writing: s3file.raw.seek(0, os.SEEK_SET) with s3errors(path): self.client.upload_fileobj( s3file.raw, self._bucket_name, _key ) finally: s3file.raw.close() s3file = S3File.factory(path, _mode, on_close=on_close) with s3errors(path): self.client.download_fileobj( self._bucket_name, _key, s3file.raw ) s3file.seek(0, os.SEEK_SET) return s3file