def openbin(self, path, mode='r', buffering=-1, **options): _log.info(f'openbin({path}, {mode}, {buffering}, {options})') path = self.validatepath(path) with self._lock: parsedMode = Mode(mode) idsFromPath = self._itemsFromPath(path) item = idsFromPath.get(path) if parsedMode.exclusive and item is not None: raise FileExists(path) if parsedMode.reading and not parsedMode.create and item is None: raise ResourceNotFound(path) if item is not None and item['mimeType'] == _folderMimeType: raise FileExpected(path) parentDir = dirname(path) _log.debug( f'looking up id for {parentDir} in {list(idsFromPath.keys())}') parentDirItem = idsFromPath.get(parentDir) # make sure that the parent directory exists if we're writing if parsedMode.writing and parentDirItem is None: raise ResourceNotFound(parentDir) return _UploadOnClose(fs=self, path=path, thisMetadata=item, parentMetadata=parentDirItem, parsedMode=parsedMode, **options)
def add_shortcut(self, shortcut_path, target_path): _log.info(f"add_shortcut: {shortcut_path}, {target_path}") _CheckPath(shortcut_path) _CheckPath(target_path) with self._lock: idsFromTargetPath = self._itemsFromPath(target_path) if target_path not in idsFromTargetPath: raise ResourceNotFound(path=target_path) targetItem = idsFromTargetPath[target_path] if targetItem["mimeType"] == _folderMimeType: raise FileExpected(target_path) idsFromShortcutPath = self._itemsFromPath(shortcut_path) if shortcut_path in idsFromShortcutPath: raise DestinationExists(shortcut_path) shortcutParentDir, shortcutName = split(shortcut_path) shortcutParentDirItem = idsFromShortcutPath.get(shortcutParentDir) if shortcutParentDirItem is None: raise ResourceNotFound(shortcutParentDir) metadata = { "name": shortcutName, "parents": [shortcutParentDirItem["id"]], "mimeType": _shortcutMimeType, "shortcutDetails": { "targetId": targetItem["id"] }, "enforceSingleParent": self.enforceSingleParent } _ = self.drive.files().create(body=metadata, fields="id").execute(num_retries=self.retryCount)
def openbin(self, path, mode="r", buffering=-1, **options): # pylint: disable=unused-argument _CheckPath(path) with self._lock: _log.info(f"openbin: {path}, {mode}, {buffering}") parsedMode = Mode(mode) idsFromPath = self._itemsFromPath(path) item = idsFromPath.get(path) if parsedMode.exclusive and item is not None: raise FileExists(path) if parsedMode.reading and not parsedMode.create and item is None: raise ResourceNotFound(path) if item is not None and item["mimeType"] == _folderMimeType: raise FileExpected(path) parentDir = dirname(path) _log.debug(f"looking up id for {parentDir}") parentDirItem = idsFromPath.get(parentDir) # make sure that the parent directory exists if we're writing if parsedMode.writing and parentDirItem is None: raise ResourceNotFound(parentDir) return _UploadOnClose(fs=self, path=path, thisMetadata=item, parentMetadata=parentDirItem, parsedMode=parsedMode, **options)
def add_parent(self, path, parent_dir): _log.info(f"add_parent: {path} -> {parent_dir}") _CheckPath(path) _CheckPath(parent_dir) with self._lock: targetPath = join(parent_dir, basename(path)) idsFromPath = self._itemsFromPath(targetPath) # don't allow violation of our requirement to keep filename unique inside new directory if targetPath in idsFromPath: raise FileExists(targetPath) parentDirItem = idsFromPath.get(parent_dir) if parentDirItem is None: raise ResourceNotFound(parent_dir) if parentDirItem["mimeType"] != _folderMimeType: raise DirectoryExpected(parent_dir) sourceItem = self._itemFromPath(path) if sourceItem is None: raise ResourceNotFound(path) self.drive.files().update( fileId=sourceItem["id"], addParents=parentDirItem["id"], body={}).execute(num_retries=self.retryCount)
def openbin(self, path, mode="r", buffering=-1, **options): _CheckPath(path) with self._lock: if "t" in mode: raise ValueError("Text mode is not allowed in openbin") parsedMode = Mode(mode) exists = self.exists(path) if parsedMode.exclusive and exists: raise FileExists(path) if parsedMode.reading and not parsedMode.create and not exists: raise ResourceNotFound(path) if self.isdir(path): raise FileExpected(path) if parsedMode.writing: # make sure that the parent directory exists parentDir = dirname(path) response = self.session.get(_PathUrl(parentDir, "")) if response.status_code == 404: raise ResourceNotFound(parentDir) response.raise_for_status() itemId = None if exists: response = self.session.get(_PathUrl(path, "")) response.raise_for_status() itemId = response.json()["id"] return _UploadOnClose(session=self.session, path=path, itemId=itemId, mode=parsedMode)
def copy(self, src_path, dst_path, overwrite=False): info(f"copy: {src_path} -> {dst_path}, {overwrite}") _CheckPath(src_path) _CheckPath(dst_path) with self._lock: parentDir = dirname(dst_path) parentDirItem = self._itemFromPath(parentDir) if parentDirItem is None: raise ResourceNotFound(parentDir) dstItem = self._itemFromPath(dst_path) if overwrite is False and dstItem is not None: raise DestinationExists(dst_path) srcItem = self._itemFromPath(src_path) if srcItem is None: raise ResourceNotFound(src_path) if srcItem["mimeType"] == _folderMimeType: raise FileExpected(src_path) # TODO - we should really replace the contents of the existing file with the new contents, so that the history is correct if dstItem is not None: self.drive.files().delete(fileId=dstItem["id"]).execute(num_retries=self.retryCount) newMetadata = {"parents": [parentDirItem["id"]], "name": basename(dst_path)} self.drive.files().copy(fileId=srcItem["id"], body=newMetadata).execute(num_retries=self.retryCount)
def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): _log.info( f'copy({src_path}, {dst_path}, {overwrite}, {preserve_time})') src_path = self.validatepath(src_path) dst_path = self.validatepath(dst_path) with self._lock: if not overwrite and self.exists(dst_path): raise errors.DestinationExists(dst_path) driveItemResponse = self.session.get_path(src_path) if driveItemResponse.status_code == 404: raise ResourceNotFound(src_path) driveItemResponse.raise_for_status() driveItem = driveItemResponse.json() if 'folder' in driveItem: raise errors.FileExpected(src_path) newParentDir = dirname(dst_path) newFilename = basename(dst_path) parentDirResponse = self.session.get_path(newParentDir) if parentDirResponse.status_code == 404: raise ResourceNotFound(src_path) parentDirResponse.raise_for_status() parentDirItem = parentDirResponse.json() # This just asynchronously starts the copy response = self.session.post_item( driveItem['id'], '/[email protected]=replace', json={ 'parentReference': { 'driveId': parentDirItem['parentReference']['driveId'], 'id': parentDirItem['id'] }, 'name': newFilename, }) response.raise_for_status() assert response.status_code == 202, 'Response code should be 202 (Accepted)' monitorUri = response.headers['Location'] while True: # monitor uris don't require authentication # (https://docs.microsoft.com/en-us/onedrive/developer/rest-api/concepts/long-running-actions) jobStatusResponse = requests.get(monitorUri) jobStatusResponse.raise_for_status() jobStatus = jobStatusResponse.json() if 'operation' in jobStatus and jobStatus[ 'operation'] != 'itemCopy': _log.warning( f'Unexpected operation: {jobStatus["operation"]}') if jobStatus['status'] not in [ 'inProgress', 'completed', 'notStarted' ]: _log.warning(f'Unexpected status: {jobStatus}') if jobStatus['status'] == 'completed': break
def move(self, src_path, dst_path, overwrite=False): _CheckPath(src_path) _CheckPath(dst_path) with self._lock: if not overwrite and self.exists(dst_path): raise DestinationExists(dst_path) driveItemResponse = self.session.get(_PathUrl(src_path, "")) if driveItemResponse.status_code == 404: raise ResourceNotFound(src_path) driveItemResponse.raise_for_status() driveItem = driveItemResponse.json() if "folder" in driveItem: raise FileExpected(src_path) itemUpdate = {} newFilename = basename(dst_path) if not self.isdir(dst_path) and newFilename != basename(src_path): itemUpdate["name"] = newFilename parentDir = dirname(dst_path) if parentDir != dirname(src_path): parentDirItem = self.session.get(_PathUrl(parentDir, "")) if parentDirItem.status_code == 404: raise ResourceNotFound(parentDir) parentDirItem.raise_for_status() itemUpdate["parentReference"] = { "id": parentDirItem.json()["id"] } itemId = driveItem["id"] response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) if response.status_code == 409 and overwrite is True: # delete the existing version and then try again response = self.session.delete(_PathUrl(dst_path, "")) response.raise_for_status() # try again response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) response.raise_for_status() return if response.status_code == 409 and overwrite is False: debug( "Retrying move in case it's an erroneous error (see issue #7)" ) response = self.session.patch(_ItemUrl(itemId, ""), json=itemUpdate) response.raise_for_status() return response.raise_for_status()
def copy(self, src_path, dst_path, overwrite=False): _CheckPath(src_path) _CheckPath(dst_path) with self._lock: if not overwrite and self.exists(dst_path): raise DestinationExists(dst_path) driveItemResponse = self.session.get(_PathUrl(src_path, "")) if driveItemResponse.status_code == 404: raise ResourceNotFound(src_path) driveItemResponse.raise_for_status() driveItem = driveItemResponse.json() if "folder" in driveItem: raise FileExpected(src_path) newParentDir = dirname(dst_path) newFilename = basename(dst_path) parentDirResponse = self.session.get(_PathUrl(newParentDir, "")) if parentDirResponse.status_code == 404: raise ResourceNotFound(src_path) parentDirResponse.raise_for_status() parentDirItem = parentDirResponse.json() # This just asynchronously starts the copy response = self.session.post( _ItemUrl(driveItem["id"], "/copy"), json={ "parentReference": { "driveId": parentDirItem["parentReference"]["driveId"], "id": parentDirItem["id"] }, "name": newFilename }) response.raise_for_status() assert response.status_code == 202, "Response code should be 202 (Accepted)" monitorUri = response.headers["Location"] while True: # monitor uris don't require authentication # (https://docs.microsoft.com/en-us/onedrive/developer/rest-api/concepts/long-running-actions?view=odsp-graph-online) jobStatusResponse = get(monitorUri) jobStatusResponse.raise_for_status() jobStatus = jobStatusResponse.json() assert jobStatus[ "operation"] == "ItemCopy", f"Unexpected status: {jobStatus}" assert jobStatus["status"] in [ "inProgress", "completed", "notStarted" ], f"Unexpected status: {jobStatus}" if jobStatus["status"] == "completed": break
def move(self, src_path, dst_path, overwrite=False, preserve_time=False): _log.info( f'move({src_path}, {dst_path}, {overwrite}, {preserve_time})') src_path = self.validatepath(src_path) dst_path = self.validatepath(dst_path) with self._lock: dstItem = self._itemFromPath(dst_path) if overwrite is False and dstItem is not None: raise DestinationExists(dst_path) srcParentItem = self._itemFromPath(dirname(src_path)) if srcParentItem is None: raise ResourceNotFound(src_path) # TODO - it would be more efficient to go directly from srcParentItem to it's child here srcItem = self._itemFromPath(src_path) if srcItem is None: raise ResourceNotFound(src_path) if srcItem['mimeType'] == _folderMimeType: raise FileExpected(src_path) dstParentDir = dirname(dst_path) dstParentDirItem = self._itemFromPath(dstParentDir) if dstParentDirItem is None: raise ResourceNotFound(dstParentDir) if dstItem is not None: assert overwrite is True self._drive.files().delete( fileId=dstItem['id'], **self._file_kwargs, ).execute(num_retries=self.retryCount) metadata = { 'name': basename(dst_path), 'enforceSingleParent': True } if preserve_time is True: metadata['modifiedTime'] = srcItem['modifiedTime'] self._drive.files().update( fileId=srcItem['id'], addParents=dstParentDirItem['id'], removeParents=srcParentItem['id'], body=metadata, **self._file_kwargs, ).execute(num_retries=self.retryCount)
def setinfo(self, path, info): _log.info(f'setinfo({path}, {info})') path = self.validatepath(path) with self._lock: metadata = self._itemFromPath(path) if metadata is None: raise ResourceNotFound(path=path) updatedData = {} for namespace in info: for name, value in info[namespace].items(): if namespace == 'details': if name == 'modified': # incoming datetimes should be utc timestamps, Google Drive expects RFC 3339 updatedData['modifiedTime'] = epoch_to_datetime( value).replace( tzinfo=timezone.utc).isoformat() elif namespace == 'google': if name == 'indexableText': updatedData['contentHints'] = { 'indexableText': value } elif name == 'appProperties': assert isinstance(value, dict) updatedData['appProperties'] = value self._drive.files().update( fileId=metadata['id'], body=updatedData, **self._file_kwargs, ).execute(num_retries=self.retryCount)
def makedir(self, path, permissions=None, recreate=False): _CheckPath(path) with self._lock: parentDir = dirname(path) # parentDir here is expected to have a leading slash assert parentDir[0] == "/" response = self.session.get(_PathUrl(parentDir, "")) if response.status_code == 404: raise ResourceNotFound(parentDir) response.raise_for_status() if recreate is False: response = self.session.get(_PathUrl(path, "")) if response.status_code != 404: raise DirectoryExists(path) response = self.session.post(_PathUrl(parentDir, ":/children"), json={ "name": basename(path), "folder": {} }) # TODO - will need to deal with these errors locally but don't know what they are yet response.raise_for_status() # don't need to close this filesystem so we return the non-closing version return SubFS(self, path)
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options): """Open file from filesystem. :param path: Path to file on filesystem :param mode: Mode to open file in :param buffering: TBD returns: `BinaryIO` stream """ path = self.validatepath(path) mode = Mode(mode + 'b') if mode.create: if mode.exclusive: try: self.getinfo(path) except ResourceNotFound: pass else: raise FileExists(path) self.create(path) if "t" in mode: raise ValueError('Text-mode not allowed in openbin') try: info = self.getinfo(path) except ResourceNotFound: raise ResourceNotFound(path) else: if info.is_dir: raise FileExpected(path) return FatIO(self.fs, path, mode)
def makedir(self, path, permissions=None, recreate=False): _log.info(f'makedir({path}, {permissions}, {recreate})') path = self.validatepath(path) with self._lock: parentDir = dirname(path) # parentDir here is expected to have a leading slash assert parentDir[0] == '/' response = self.session.get_path(parentDir) if response.status_code == 404: raise ResourceNotFound(parentDir) response.raise_for_status() if recreate is False: response = self.session.get_path(path) if response.status_code != 404: raise DirectoryExists(path) response = self.session.post_path( parentDir, '/children', json={ 'name': basename(path), 'folder': {}, '@microsoft.graph.conflictBehavior': 'replace' }) # TODO - will need to deal with these errors locally but don't know what they are yet response.raise_for_status() # don't need to close this filesystem so we return the non-closing version return SubFS(self, path)
def getinfo(self, path: str, namespaces=None): """Generate PyFilesystem2's `Info` struct. :param path: Path to file or directory on filesystem :param namespaces: Info namespaces to query, `NotImplemented` :returns `Info` """ try: entry = self.fs.root_dir.get_entry(path) except PyFATException as e: if e.errno in [errno.ENOTDIR, errno.ENOENT]: raise ResourceNotFound(path) raise e info = { "basic": { "name": repr(entry), "is_dir": entry.is_directory() }, "details": { "accessed": entry.get_atime().timestamp(), "created": entry.get_ctime().timestamp(), "metadata_changed": None, "modified": entry.get_mtime().timestamp(), "size": entry.filesize, "type": self.gettype(path) } } return Info(info)
def removedir(self, path: str): """Remove empty directories from the filesystem. :param path: `str`: Directory to remove """ base = "/".join(path.split("/")[:-1]) dirname = path.split("/")[-1] # Plausability checks try: base = self.opendir(base) except DirectoryExpected: raise ResourceNotFound(path) dir_entry = self._get_dir_entry(path) # Verify if the directory is empty if not dir_entry.is_empty(): raise DirectoryNotEmpty(path) # Remove entry from parent directory base.remove_subdirectory(dirname) self.fs.update_directory_entry(base) # Free cluster in FAT self.fs.free_cluster_chain(dir_entry.get_cluster()) del dir_entry
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options: Dict[str, Any]) -> SwiftFile: def close_callback(f: SwiftFile): if mode != "r": f.seek(0) self._put_object(path, f.read()) try: meta, contents = self._get_object(path) swift_file = SwiftFile(close_callback=close_callback, mode=mode, data=contents) if mode.startswith("a"): swift_file.seek(0, io.SEEK_END) return swift_file except ClientException as e: if e.http_status == 404: if mode in ("r", "r+"): raise ResourceNotFound(path) from e return SwiftFile(close_callback=close_callback, mode=mode, data=bytes()) raise e
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options): """Open file from filesystem. :param path: Path to file on filesystem :param mode: Mode to open file in :param buffering: TBD returns: `BinaryIO` stream """ path = self.validatepath(path) if 'w' in mode: self.create(path) try: info = self.getinfo(path) except ResourceNotFound: raise ResourceNotFound(path) else: if info.is_dir: raise FileExpected(path) return FatIO(self.fs, path, mode)
def getinfo(self, path, namespaces=None): # pylint: disable=unused-argument _CheckPath(path) with self._lock: metadata = self._itemFromPath(path) if metadata is None: raise ResourceNotFound(path=path) return self._infoFromMetadata(metadata)
def getinfo( self, path: str, namespaces: Optional[Collection[str]] = ("basic", )) -> Info: cscs_url = self._get_tmp_url(path) head_response = self.cscs_session.head(cscs_url.raw) if head_response.status_code == 404: objects = self.listdir(path, limit=3) if len(objects) == 0: raise ResourceNotFound(path) is_dir = True else: is_dir = False return Info( raw_info={ "basic": { "name": PurePosixPath(path).name, "is_dir": is_dir, }, "details": { "type": ResourceType.directory if is_dir else ResourceType.file }, })
def share(self, path, email=None, role='reader'): """ Shares item. :param path: item path :param email: email of gmail-user to share item. If None, will share with anybody. :param role: google drive sharing role :return: URL """ _CheckPath(path) with self._lock: metadata = self._itemFromPath(path) if metadata is None or isinstance(metadata, list): raise ResourceNotFound(path=path) if role not in ('reader', 'writer', 'commenter', 'fileOrganizer', 'organizer', 'owner'): raise OperationFailed(path=path, msg=f'unknown sharing role: {role}') if email: permissions = { 'role': role, 'type': 'user', 'emailAddress': email } else: permissions = {'role': role, 'type': 'anyone'} self.drive.permissions().create( fileId=metadata['id'], body=permissions).execute(num_retries=self.retryCount) return self.geturl(path)
def setinfo(self, path, info): # pylint: disable=redefined-outer-name,too-many-branches,unused-argument _CheckPath(path) with self._lock: metadata = self._itemFromPath(path) if metadata is None or isinstance(metadata, list): raise ResourceNotFound(path=path) updatedData = {} for namespace in info: for name, value in info[namespace].items(): if namespace == "details": if name == "modified": # incoming datetimes should be utc timestamps, Google Drive expects RFC 3339 updatedData["modifiedTime"] = epoch_to_datetime( value).replace( tzinfo=timezone.utc).isoformat() elif namespace == "google": if name == "indexableText": updatedData["contentHints"] = { "indexableText": value } elif name == "appProperties": assert isinstance(value, dict) updatedData["appProperties"] = value self.drive.files().update( fileId=metadata["id"], body=updatedData).execute(num_retries=self.retryCount)
def _head_object(self, subpath: str) -> "CaseInsensitiveDict[str]": full_path = self.read_url.concatpath(subpath) resp = self.session.head(full_path.raw, verify=self.requests_verify) if resp.status_code == 404: raise ResourceNotFound(subpath) else: resp.raise_for_status() return resp.headers
def getinfo(self, path, namespaces=None): _log.info(f'getinfo({path}, {namespaces})') path = self.validatepath(path) with self._lock: metadata = self._itemFromPath(path) if metadata is None: raise ResourceNotFound(path=path) return _InfoFromMetadata(metadata)
def openbin(self, path, mode="r", buffering=-1, **options): # pylint: disable=unused-argument _CheckPath(path) with self._lock: info(f"openbin: {path}, {mode}, {buffering}") parsedMode = Mode(mode) exists = self.exists(path) if parsedMode.exclusive and exists: raise FileExists(path) elif parsedMode.reading and not parsedMode.create and not exists: raise ResourceNotFound(path) elif self.isdir(path): raise FileExpected(path) if parsedMode.writing: # make sure that the parent directory exists parentDir = dirname(path) if self._itemFromPath(parentDir) is None: raise ResourceNotFound(parentDir) return _UploadOnClose(fs=self, path=path, parsedMode=parsedMode)
def getinfo(self, path, namespaces=None): assert path[0] == "/" _CheckPath(path) with self._lock: response = self.session.get(_PathUrl(path, "")) if response.status_code == 404: raise ResourceNotFound(path=path) response.raise_for_status() return self._itemInfo(response.json())
def openbin(self, path, mode="r", buffering=-1, **options): parsedMode = Mode(mode) if parsedMode.exclusive and self.exists(path): raise FileExists(path) elif parsedMode.reading and not parsedMode.create and not self.exists(path): raise ResourceNotFound(path) elif self.isdir(path): raise FileExpected(path) return UploadOnClose(client=self.client, path=path, mode=parsedMode)
def copy(self, src_path, dst_path, overwrite=False, preserve_time=False): _log.info( f'copy({src_path}, {dst_path}, {overwrite}, {preserve_time})') src_path = self.validatepath(src_path) dst_path = self.validatepath(dst_path) with self._lock: parentDir = dirname(dst_path) parentDirItem = self._itemFromPath(parentDir) if parentDirItem is None: raise ResourceNotFound(parentDir) dstItem = self._itemFromPath(dst_path) if overwrite is False and dstItem is not None: raise DestinationExists(dst_path) srcItem = self._itemFromPath(src_path) if srcItem is None: raise ResourceNotFound(src_path) if srcItem['mimeType'] == _folderMimeType: raise FileExpected(src_path) # TODO - we should really replace the contents of the existing file with the new contents, so that the history is correct if dstItem is not None: self._drive.files().delete( fileId=dstItem['id'], **self._file_kwargs, ).execute(num_retries=self.retryCount) newMetadata = { 'parents': [parentDirItem['id']], 'name': basename(dst_path), 'enforceSingleParent': True } if preserve_time is True: newMetadata['modifiedTime'] = srcItem['modifiedTime'] self._drive.files().copy( fileId=srcItem['id'], body=newMetadata, **self._file_kwargs, ).execute(num_retries=self.retryCount)
def remove(self, path): _CheckPath(path) with self._lock: info(f"remove: {path}") metadata = self._itemFromPath(path) if metadata is None: raise ResourceNotFound(path=path) if metadata["mimeType"] == _folderMimeType: raise FileExpected(path=path) self.drive.files().delete(fileId=metadata["id"]).execute()
def getinfo(self, path, namespaces=None): if path == "/": return Info({"basic": {"name": "", "is_dir": True}}) try: if not path.startswith("/"): path = "/" + path metadata = self.dropbox.files_get_metadata(path, include_media_info=True) except ApiError as e: raise ResourceNotFound(path=path, exc=e) return self._infoFromMetadata(metadata)