def info_path(self, path: str, use_cache=True) -> Optional[OInfo]: if path == "/": return OInfo(DIRECTORY, self._root_id, None, "/", size=0, mtime=0) try: log.debug("res info path %s", path) res = self._api('files_get_metadata', path) log.debug("res info path %s", res) oid = res.id if oid[0:3] != 'id:': log.warning("invalid oid %s from path %s", oid, path) if isinstance(res, files.FolderMetadata): otype = DIRECTORY fhash = None size = 0 mtime = 0 else: otype = FILE fhash = res.content_hash size = res.size mtime = self._mtime_from_metadata(res) path = res.path_display or path return OInfo(otype, oid, fhash, path, size=size, mtime=mtime) except CloudFileNotFoundError: return None
def _box_get_oinfo(self, client: Client, box_object: BoxItem, parent_path=None, use_cache=True) -> Optional[OInfo]: assert isinstance(client, Client) if box_object is None: return None if box_object.object_type == 'folder': obj_type = DIRECTORY size = 0 mtime = self._parse_time(box_object.content_modified_at) else: obj_type = FILE size = box_object.size mtime = self._parse_time(box_object.content_modified_at) if parent_path: path = self.join(parent_path, box_object.name) else: if use_cache: path = self.__cache.get_path(box_object.object_id) else: path = None return OInfo( oid=box_object.object_id, path=path, otype=obj_type, hash=None if obj_type == DIRECTORY else box_object.sha1, size=size, mtime=mtime )
def info_oid(self, oid: str, use_cache=True) -> Optional[OInfo]: if oid == self._root_id: otype = DIRECTORY fhash = None path = "/" size = 0 mtime = 0 else: try: res = self._api('files_get_metadata', oid) log.debug("res info oid %s", res) path = res.path_display if isinstance(res, files.FolderMetadata): otype = DIRECTORY fhash = None size = 0 mtime = 0 else: otype = FILE fhash = res.content_hash size = res.size mtime = self._mtime_from_metadata(res) except CloudFileNotFoundError: return None return OInfo(otype, oid, fhash, path, size=size, mtime=mtime)
def create(self, path, file_like, metadata=None) -> 'OInfo': if not metadata: metadata = {} if self.exists_path(path): raise CloudFileExistsError() ul, size = self._media_io(file_like) fields = 'id, md5Checksum, size, modifiedTime' # Cache is accurate, just refreshed from exists_path() call parent_oid = self._get_parent_id(path, use_cache=True) metadata['appProperties'] = self._prep_app_properties(parent_oid) gdrive_info = self._prep_upload(path, metadata) gdrive_info['parents'] = [parent_oid] try: def api_call(): return self._api('files', 'create', body=gdrive_info, media_body=ul, fields=fields) if self._client: with patch.object(self._client._http.http, "follow_redirects", False): # pylint: disable=protected-access res = api_call() else: res = api_call() except OSError as e: self.disconnect() raise CloudDisconnectedError("OSError in file create: %s" % repr(e)) log.debug("response from create %s : %s", path, res) if not res: raise CloudTemporaryError("unknown response from drive on upload") self._ids[path] = res['id'] log.debug("path cache %s", self._ids) size = int(res.get("size", 0)) mtime = res.get('modifiedTime') mtime = mtime and self._parse_time(mtime) cache_ent = self.get_quota.get() # pylint: disable=no-member if cache_ent: cache_ent["used"] += size return OInfo(otype=FILE, oid=res['id'], hash=res['md5Checksum'], path=path, size=size, mtime=mtime)
def upload(self, oid, file_like, metadata=None) -> 'OInfo': if not metadata: metadata = {} gdrive_info = self._prep_upload(None, metadata) ul, size = self._media_io(file_like) fields = 'id, md5Checksum, modifiedTime' try: def api_call(): return self._api('files', 'update', body=gdrive_info, fileId=oid, media_body=ul, fields=fields) if self._client: with patch.object(self._client._http.http, "follow_redirects", False): # pylint: disable=protected-access res = api_call() else: res = api_call() except OSError as e: self.disconnect() raise CloudDisconnectedError("OSError in file upload: %s" % repr(e)) log.debug("response from upload %s", res) if not res: raise CloudTemporaryError("unknown response from drive on upload") mtime = res.get('modifiedTime') mtime = mtime and self._parse_time(mtime) md5 = res.get( 'md5Checksum', None) # can be none if the user tries to upload to a folder if md5 is None: possible_conflict = self._info_oid(oid) if possible_conflict and possible_conflict.otype == DIRECTORY: raise CloudFileExistsError("Can only upload to a file: %s" % possible_conflict.path) return OInfo(otype=FILE, oid=res['id'], hash=md5, path=None, size=size, mtime=mtime)
def info_path(self, path: str, use_cache=True) -> Optional[OInfo]: # otype: OType # fsobject type (DIRECTORY or FILE) # oid: str # fsobject id # hash: Any # fsobject hash (better name: ohash) # path: Optional[str] # path # size: int if path in ("/", ''): with self._api() as client: box_object = client.root_folder() box_object = self._unsafe_box_object_populate(client, box_object) return self._box_get_oinfo(client, box_object) cached_type = None cached_oid = None if use_cache: cached_type = self.__cache.get_type(path=path) cached_oid = self.__cache.get_oid(path=path) log.debug("cached oid = %s", cached_oid) if cached_type: metadata = self.__cache.get_metadata(path=path) if metadata: ohash = metadata.get("hash") size = metadata.get("size") mtime = metadata.get("mtime") if cached_oid and ohash and size: return OInfo(cached_type, cached_oid, ohash, path, size, mtime=mtime) with self._api() as client: log.debug("getting box object for %s:%s", cached_oid, path) box_object = self._get_box_object(client, oid=cached_oid, path=path, object_type=cached_type or NOTKNOWN, strict=False, use_cache=use_cache) log.debug("got box object for %s:%s %s", cached_oid, path, box_object) _, dir_info = self.__box_cache_object(client, box_object, path) log.debug("dirinfo = %s", dir_info) # pylint: disable=no-member if dir_info: return OInfo(dir_info.otype, dir_info.oid, dir_info.hash, dir_info.path, dir_info.size, mtime=dir_info.mtime) return None
def download(self, oid, file_like): ok = self._api('files_download', oid) if not ok: raise CloudFileNotFoundError() res, content = ok for data in content.iter_content(self.upload_block_size): file_like.write(data) return OInfo(otype=FILE, oid=oid, hash=res.content_hash, path=res.path_display, size=res.size, mtime=self._mtime_from_metadata(res))
def _upload(self, oid, file_like, metadata=None) -> OInfo: res = None metadata = metadata or {} file_like.seek(0, io.SEEK_END) size = file_like.tell() file_like.seek(0) if size < self.large_file_size: res = self._api('files_upload', file_like.read(), oid, mode=files.WriteMode('overwrite')) else: cursor = None while True: data = file_like.read(self.upload_block_size) if not data: if cursor: local_mtime = arrow.get( metadata.get('mtime', time.time())).datetime commit = files.CommitInfo( path=oid, mode=files.WriteMode.overwrite, autorename=False, client_modified=local_mtime, mute=True) res = self._api('files_upload_session_finish', data, cursor, commit) break if not cursor: res = self._api('files_upload_session_start', data) cursor = files.UploadSessionCursor(res.session_id, len(data)) else: self._api('files_upload_session_append_v2', data, cursor) cursor.offset += len(data) if res is None: raise CloudFileExistsError() ret = OInfo(otype=FILE, oid=res.id, hash=res.content_hash, path=res.path_display, size=size, mtime=self._mtime_from_metadata(res)) log.debug('upload result is %s', ret) return ret