def upload_blob(self, source_path, destination_path, destination_bucket_name, content_type): destination_bucket = self.__client.get_bucket(destination_bucket_name) blob = Blob(destination_path, destination_bucket) with open(source_path, 'rb') as f: blob.upload_from_file(f, content_type=content_type)
def _save(self, name, content): """ Saves new content to the file specified by name. The content should be a proper File object or any python file-like object, ready to be read from the beginning. """ path = os.path.join(self._location, content.name) if self._location else content.name blob = Blob(path, self._bucket) blob.upload_from_file(content, size=content.size) return blob.name
class GoogleCloudFile(File): def __init__(self, name, mode, storage, buffer_size=None): self.name = name self._mode = mode self._storage = storage self.blob = Blob(self.name, storage.bucket) self._file = None self._is_dirty = False @property def size(self): return self.blob.size def _get_file(self): if self._file is None: self._file = SpooledTemporaryFile( max_size=self._storage.max_memory_size, suffix=".GSStorageFile", dir=setting("FILE_UPLOAD_TEMP_DIR", None)) if 'r' in self._mode: self._is_dirty = False self.blob.download_to_file(self._file) self._file.seek(0) return self._file def _set_file(self, value): self._file = value file = property(_get_file, _set_file) def read(self, *args, **kwargs): if 'r' not in self._mode: raise AttributeError("File was not opened in read mode.") return super(GoogleCloudFile, self).read(*args, **kwargs) def write(self, content, *args, **kwargs): if 'w' not in self._mode: raise AttributeError("File was not opened in write mode.") self._is_dirty = True return super(GoogleCloudFile, self).write(force_bytes(content), *args, **kwargs) def close(self): if self._file is not None: if self._is_dirty: self.file.seek(0) content_type, _ = mimetypes.guess_type(self.name) content_type = getattr(self.file, 'content_type', content_type) size = getattr(self.file, 'size') self.blob.upload_from_file(self.file, content_type=content_type, size=size) self._file.close() self._file = None
def upload_file_object(self, file_obj, blob_name=None, connection=None): """Shortcut method to upload a file object into this bucket. Use this method to quickly put a local file in Cloud Storage. For example:: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt') >>> print bucket.list_blobs() [<Blob: my-bucket, remote-text-file.txt>] If you don't provide a blob name, we will try to upload the file using the local filename (**not** the complete path):: >>> from gcloud import storage >>> connection = storage.get_connection() >>> bucket = storage.get_bucket('my-bucket', connection=connection) >>> bucket.upload_file(open('~/my-file.txt')) >>> print bucket.list_blobs() [<Blob: my-bucket, my-file.txt>] :type file_obj: file :param file_obj: A file handle open for reading. :type blob_name: string :param blob_name: The name of the blob to upload the file to. If this is blank, we will try to upload the file to the root of the bucket with the same name as on your local file system. :type connection: :class:`gcloud.storage.connection.Connection` or ``NoneType`` :param connection: Optional. The connection to use when sending requests. If not provided, falls back to default. :rtype: :class:`Blob` :returns: The updated Blob object. """ if blob_name is None: blob_name = os.path.basename(file_obj.name) blob = Blob(bucket=self, name=blob_name) blob.upload_from_file(file_obj, connection=connection) return blob