def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) _key = self._path_to_key(_path) try: dir_path = dirname(_path) if dir_path != "/": _dir_key = self._path_to_dir_key(dir_path) with s3errors(path): obj = self.s3.Object(self._bucket_name, _dir_key) obj.load() except errors.ResourceNotFound: raise errors.ResourceNotFound(path) if _path == "/": return Info({ "basic": { "name": "", "is_dir": True }, "details": { "type": int(ResourceType.directory) }, }) obj = self._get_object(path, _key) info = self._info_from_object(obj, namespaces) return Info(info)
def openbin(self, path, mode="r", buffering=-1, **options): path = self.fix_path(path) _mode = Mode(mode) mode = _mode _mode.validate_bin() _path = self.validatepath(path) log.debug("openbin: %s, %s", path, mode) with self._lock: try: info = self.getinfo(_path) log.debug("Info: %s", info) except errors.ResourceNotFound: if not _mode.create: raise errors.ResourceNotFound(path) # Check the parent is an existing directory if not self.getinfo(self.get_parent(_path)).is_dir: raise errors.DirectoryExpected(path) else: if info.is_dir: raise errors.FileExpected(path) if _mode.exclusive: raise errors.FileExists(path) return DropboxFile(self.dropbox, path, mode)
def copy(self, src_path, dst_path, overwrite=False): if not overwrite and self.exists(dst_path): raise errors.DestinationExists(dst_path) _src_path = self.validatepath(src_path) _dst_path = self.validatepath(dst_path) if self.strict: if not self.isdir(dirname(_dst_path)): raise errors.ResourceNotFound(dst_path) _src_key = self._path_to_key(_src_path) _dst_key = self._path_to_key(_dst_path) try: with osserrors(src_path): self.client.copy_object( Bucket=self._bucket_name, Key=_dst_key, CopySource={ "Bucket": self._bucket_name, "Key": _src_key }, ) except errors.ResourceNotFound: if self.exists(src_path): raise errors.FileExpected(src_path) raise
def writebytes(self, path, contents): if not isinstance(contents, bytes): raise TypeError("contents must be bytes") _path = self.validatepath(path) _key = self._path_to_key(_path) if self.strict: if not self.isdir(dirname(path)): raise errors.ResourceNotFound(path) try: info = self._getinfo(path) if info.is_dir: raise errors.FileExpected(path) except errors.ResourceNotFound: pass bytes_file = io.BytesIO(contents) with osserrors(path): self.client.upload_fileobj( bytes_file, self._bucket_name, _key, ExtraArgs=self._get_upload_args(_key), )
def makedir(self, path: str, permissions: Optional[Permissions] = None, recreate: bool = False) -> SubFS[FS]: """Make a directory. Note: As GCS is not a real filesystem but a key-value store that does not have any concept of directories, we write empty blobs as a work around. See: https://fs-s3fs.readthedocs.io/en/latest/#limitations This implementation currently ignores the `permissions` argument, the empty blobs are written with default permissions. """ self.check() _path = self.validatepath(path) _key = self._path_to_dir_key(_path) if not self.isdir(dirname(_path)): raise errors.ResourceNotFound(path) try: self.getinfo(path) except errors.ResourceNotFound: pass else: if recreate: return self.opendir(_path) else: raise errors.DirectoryExists(path) blob = self.bucket.blob(_key) blob.upload_from_string(b"") return SubFS(self, path)
def makedir(self, path, permissions=None, recreate=False): path = self.fix_path(path) if self.exists(path) and not recreate: raise errors.DirectoryExists(path) if path == "/": return SubFS(self, path) if self.exists(path): meta = self.getinfo(path) if meta.is_dir: if recreate == False: raise errors.DirectoryExists(path) else: return SubFS(self, path) if meta.is_file: raise errors.DirectoryExpected(path) ppath = self.get_parent(path) if not self.exists(ppath): raise errors.ResourceNotFound(ppath) try: folderMetadata = self.dropbox.files_create_folder_v2(path) except ApiError as e: raise errors.DirectoryExpected(path=path) return SubFS(self, path)
def copy(self, src_path, dst_path, overwrite=False): src_path = self.fix_path(src_path) dst_path = self.fix_path(dst_path) try: src_meta = self.getinfo(src_path) if src_meta.is_dir: raise errors.FileExpected(src_path) except ApiError as e: raise errors.ResourceNotFound dst_meta = None try: dst_meta = self.getinfo(dst_path) except Exception as e: pass if dst_meta is not None: if overwrite == True: self.remove(dst_path) else: raise errors.DestinationExists(dst_path) parent_path = self.get_parent(dst_path) if not self.exists(parent_path): raise errors.ResourceNotFound(dst_path) self.dropbox.files_copy_v2(src_path, dst_path)
def osserrors(path): """Translate OSS errors to FSErrors.""" try: yield except ClientError as error: _error = error.response.get("Error", {}) error_code = _error.get("Code", None) response_meta = error.response.get("ResponseMetadata", {}) http_status = response_meta.get("HTTPStatusCode", 200) error_msg = _error.get("Message", None) if error_code == "NoSuchBucket": raise errors.ResourceError(path, exc=error, msg=error_msg) if http_status == 404: raise errors.ResourceNotFound(path) elif http_status == 403: raise errors.PermissionDenied(path=path, msg=error_msg) else: raise errors.OperationFailed(path=path, exc=error) except SSLError as error: raise errors.OperationFailed(path, exc=error) except EndpointConnectionError as error: raise errors.RemoteConnectionError(path, exc=error, msg="{}".format(error))
def getinfo(self, path, namespaces=None): _path = self.validatepath(path) namespaces = namespaces or () if _path in '/': info_dict = { "basic": { "name": "", "is_dir": True }, "details": { "type": ResourceType.directory } } else: try: info = self.client.info(_path.encode('utf-8')) info_dict = self._create_info_dict(info) if self.client.is_dir(_path.encode('utf-8')): info_dict['basic']['is_dir'] = True info_dict['details']['type'] = ResourceType.directory except we.RemoteResourceNotFound as exc: raise errors.ResourceNotFound(path, exc=exc) return Info(info_dict)
def copy(self, src_path, dst_path, overwrite=False): _src_path = self.validatepath(src_path) _dst_path = self.validatepath(dst_path) with self._lock: if not self.getinfo(_src_path).is_file: raise errors.FileExpected(src_path) if not overwrite and self.exists(_dst_path): raise errors.DestinationExists(dst_path) try: self.client.copy(_src_path.encode('utf-8'), _dst_path.encode('utf-8')) except we.RemoteResourceNotFound as exc: raise errors.ResourceNotFound(src_path, exc=exc) except we.RemoteParentNotFound as exc: raise errors.ResourceNotFound(dst_path, exc=exc)
def openbin(self, path: str, mode: str = "r", buffering: int = -1, **options) -> "GCSFile": _mode = Mode(mode) _mode.validate_bin() self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) def on_close(gcs_file): if _mode.create or _mode.writing: gcs_file.raw.seek(0) blob = self._get_blob(_key) if not blob: blob = self.bucket.blob(_key) blob.upload_from_file(gcs_file.raw) gcs_file.raw.close() if _mode.create: dir_path = dirname(_path) if dir_path != "/": _dir_key = self._path_to_dir_key(dir_path) if not self.bucket.get_blob(_dir_key): raise errors.ResourceNotFound(path) try: info = self.getinfo(path) except errors.ResourceNotFound: pass else: if _mode.exclusive: raise errors.FileExists(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) if _mode.appending: blob = self._get_blob(_key) if blob: # in case there is an existing blob in GCS, we download it and seek until the end of the stream gcs_file.seek(0, os.SEEK_END) blob.download_to_file(gcs_file.raw) return gcs_file if self.strict: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) gcs_file = GCSFile.factory(path, _mode, on_close=on_close) blob = self._get_blob(_key) if not blob: raise errors.ResourceNotFound blob.download_to_file(gcs_file.raw) gcs_file.seek(0) return gcs_file
def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) # we strip the last item from the path to get # the parent folder. since the pCloud API # provides no consistent way of geting the metadata # for both folders and files we extract it from the # folder listing if path == '/': parent_path = '/' else: parent_path = '/'.join(_path.split('/')[:-1]) parent_path = parent_path if parent_path else '/' folder_list = self.pcloud.listfolder(path=parent_path) metadata = None if 'metadata' in folder_list: if _path == '/': metadata = folder_list['metadata'] else: for item in folder_list['metadata']['contents']: if item['path'] == _path: metadata = item break if metadata is None: raise errors.ResourceNotFound(path=path) return self._info_from_metadata(metadata, namespaces)
def writebytes(self, path, contents): # type: (Text, bytes) -> None # FIXME(@althonos): accept bytearray and memoryview as well ? """Copy binary data to a file. Arguments: path (str): Destination path on the filesystem. contents (bytes): Data to be written. Raises: TypeError: if contents is not bytes. """ if not isinstance(contents, bytes): raise TypeError("contents must be bytes") with self._lock: _res = self._getresource(path) if _res: if not _res.isfile(): raise errors.FileExpected(path) _res.truncate(0) else: _path = self.validatepath(path) dir_path, file_name = split(_path) _dir_res = self._getresource(dir_path) if not _dir_res or not _dir_res.isdir(): raise errors.ResourceNotFound(path) _res = data_fs.mkfile(self._prep_path(_path)) _res.put_content(contents)
def download(self, path, file, chunk_size=None, **options): # type: (Text, BinaryIO, Optional[int], **Any) -> None """Copies a file from the filesystem to a file-like object. This may be more efficient that opening and copying files manually if the filesystem supplies an optimized method. Arguments: path (str): Path to a resource. file (file-like): A file-like object open for writing in binary mode. chunk_size (int, optional): Number of bytes to read at a time, if a simple copy is used, or `None` to use sensible default. **options: Implementation specific options required to open the source file. Note that the file object ``file`` will *not* be closed by this method. Take care to close it after this method completes (ideally with a context manager). Example: >>> with open('starwars.mov', 'wb') as write_file: ... my_fs.download('/movies/starwars.mov', write_file) """ with self._lock: _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if not _res.isfile(): raise errors.FileExpected(path) # Note: we always write in chunks here, regardless of the chunk_size _res.download(file)
def listdir(self, path): # type: (Text) -> List[Text] """Get a list of the resource names in a directory. This method will return a list of the resources in a directory. A *resource* is a file, directory, or one of the other types defined in `~fs.enums.ResourceType`. Arguments: path (str): A path to a directory on the filesystem Returns: list: list of names, relative to ``path``. Raises: fs.errors.DirectoryExpected: If ``path`` is not a directory. fs.errors.ResourceNotFound: If ``path`` does not exist. """ with self._lock: _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if not _res.isdir(): raise errors.DirectoryExpected(path) return _res.listdir()
def move(self, src_path, dst_path, overwrite=False): # type: (Text, Text, bool) -> None """Move a file from ``src_path`` to ``dst_path``. Arguments: src_path (str): A path on the filesystem to move. dst_path (str): A path on the filesystem where the source file will be written to. overwrite (bool): If `True`, destination path will be overwritten if it exists. Raises: fs.errors.FileExpected: If ``src_path`` maps to a directory instead of a file. fs.errors.DestinationExists: If ``dst_path`` exists, and ``overwrite`` is `False`. fs.errors.ResourceNotFound: If a parent directory of ``dst_path`` does not exist. """ _src_path = self.validatepath(src_path) _dst_path = self.validatepath(dst_path) with self._lock: if not overwrite and self.exists(dst_path): raise errors.DestinationExists(dst_path) dir_path, file_name = split(_dst_path) _dir_res = self._getresource(dir_path) if not _dir_res or not _dir_res.is_collection: raise errors.ResourceNotFound(dst_path) _src_res = self._getresource(src_path) if not _src_res: raise errors.ResourceNotFound(src_path) if _src_res.is_collection: raise errors.FileExpected(src_path) if not overwrite and _src_res.support_recursive_move(_dst_path): _src_res.move_recursive(_dst_path) else: # CHECKME: this doesn't actually seem to delete _src_res in DAV Provider _src_res.copy_move_single(_dst_path, True) try: _src_res.delete() except: pass
def openbin( self, path, # type: Text mode="r", # type: Text buffering=-1, # type: int **options # type: Any ): # type: (...) -> BinaryIO """Open a binary file-like object. Arguments: path (str): A path on the filesystem. mode (str): Mode to open file (must be a valid non-text mode, defaults to *r*). Since this method only opens binary files, the ``b`` in the mode string is implied. buffering (int): Buffering policy (-1 to use default buffering, 0 to disable buffering, or any positive integer to indicate a buffer size). **options: keyword arguments for any additional information required by the filesystem (if any). Returns: io.IOBase: a *file-like* object. Raises: fs.errors.FileExpected: If the path is not a file. fs.errors.FileExists: If the file exists, and *exclusive mode* is specified (``x`` in the mode). fs.errors.ResourceNotFound: If the path does not exist. """ # TODO: handle BLOB properties here if mode not in ("r", "rb"): raise errors.ResourceReadOnly(path) _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if not isinstance(_res, io.RawIOBase): if not isinstance(_res, db.DocumentReference) and not isinstance( _res, db.DocumentSnapshot): raise TypeError("io stream expected") # CHECKME: someone wants to read the whole document, so let's give it to them as a json dump if isinstance(_res, db.DocumentReference): doc = _res.get() else: doc = _res info = doc.to_dict() # add other doc properties too? info.update(doc.__dict__) data = json.dumps(info, indent=2, default=lambda o: repr(o)) stream = io.BytesIO(data.encode("utf-8")) name = str(doc.id) + ".json" return make_stream(name, stream, "rb") return _res
def setinfo(self, path, info): # type: (Text, RawInfo) -> None """Set info on a resource. This method is the complement to `~fs.base.FS.getinfo` and is used to set info values on a resource. Arguments: path (str): Path to a resource on the filesystem. info (dict): Dictionary of resource info. Raises: fs.errors.ResourceNotFound: If ``path`` does not exist on the filesystem The ``info`` dict should be in the same format as the raw info returned by ``getinfo(file).raw``. Example: >>> details_info = {"details": { ... "modified": time.time() ... }} >>> my_fs.setinfo('file.txt', details_info) """ with self._lock: _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if "details" in info: details = info["details"] if ("accessed" in details or "modified" in details or "created" in details): accessed_time = int(details.get("accessed", 0)) modified_time = int(details.get("modified", 0)) created_time = int(details.get("created", 0)) if accessed_time and not modified_time: modified_time = accessed_time if created_time: pass if modified_time: dt = epoch_to_datetime(modified_time) rfc1123_time = dt.strftime("%a, %d %b %Y %H:%M:%S GMT") _res.set_last_modified(_res.path, rfc1123_time, False) if "properties" in info: prop_names = _res.get_property_names(True) for prop_name in prop_names: # let the DAV provider handle the standard live properties if prop_name.startswith("{DAV:}"): continue # skip unknonwn properties if prop_name not in info["properties"]: continue _res.set_property_value(prop_name, info["properties"][prop_name])
def scandir( self, path, # type: Text namespaces=None, # type: Optional[Collection[Text]] page=None, # type: Optional[Tuple[int, int]] ): # type: (...) -> Iterator[Info] """Get an iterator of resource info. Arguments: path (str): A path to a directory on the filesystem. namespaces (list, optional): A list of namespaces to include in the resource information, e.g. ``['basic', 'access']``. page (tuple, optional): May be a tuple of ``(<start>, <end>)`` indexes to return an iterator of a subset of the resource info, or `None` to iterate over the entire directory. Paging a directory scan may be necessary for very large directories. Returns: ~collections.abc.Iterator: an iterator of `Info` objects. Raises: fs.errors.DirectoryExpected: If ``path`` is not a directory. fs.errors.ResourceNotFound: If ``path`` does not exist. """ namespaces = namespaces or () if path in ("/", "") or path is None: return self._scandir_root(namespaces) _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if not _res.isdir(): raise errors.DirectoryExpected(path) # info = ( # self.getinfo(join(_path, name), namespaces=namespaces) # for name in self.listdir(path) # ) # iter_info = iter(info) # iter_info = self._scandir_from_resource(_res, namespaces) if page is not None: start, end = page iter_info = self._scandir_from_resource(_res, namespaces, end - start, start) else: limit = self._limit offset = 0 iter_info = self._scandir_from_resource(_res, namespaces, limit, offset) # if page is not None: # start, end = page # iter_info = itertools.islice(iter_info, start, end) return iter_info
def copy(self, src_path: str, dst_path: str, overwrite: bool = False) -> None: if not overwrite and self.exists(dst_path): raise errors.DestinationExists(dst_path) _src_path = self.validatepath(src_path) _dst_path = self.validatepath(dst_path) if self.strict: if not self.isdir(dirname(_dst_path)): raise errors.ResourceNotFound(dst_path) _src_key = self._path_to_key(_src_path) _dst_key = self._path_to_key(_dst_path) blob = self.bucket.get_blob(_src_key) if not blob: if self.exists(src_path): raise errors.FileExpected(src_path) raise errors.ResourceNotFound(_src_key) self.bucket.copy_blob(blob, self.bucket, new_name=_dst_key)
def remove(self, path: Text) -> None: """ Remove a file. """ npath = self.normalize_path(path) if not self.exists(npath): raise errors.ResourceNotFound(path) if self.isdir(npath): raise errors.FileExpected(path) cursor = self.connection.cursor() cursor.execute("DELETE FROM sqlar WHERE name = ?", (npath,)) cursor.close()
def remove(self, path: str) -> None: self.check() _path = self.validatepath(path) _key = self._path_to_key(_path) if self.strict: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) try: self.bucket.delete_blob(_key) except google.cloud.exceptions.NotFound: raise errors.ResourceNotFound(path)
def makedir( self, path, # type: Text permissions=None, # type: Optional[Permissions] recreate=False, # type: bool ): # type: (...) -> SubFS[FS] """Make a directory. Arguments: path (str): Path to directory from root. permissions (~fs.permissions.Permissions, optional): a `Permissions` instance, or `None` to use default. recreate (bool): Set to `True` to avoid raising an error if the directory already exists (defaults to `False`). Returns: ~fs.subfs.SubFS: a filesystem whose root is the new directory. Raises: fs.errors.DirectoryExists: If the path already exists. fs.errors.ResourceNotFound: If the path is not found. """ # mode = Permissions.get_mode(permissions) _path = self.validatepath(path) with self._lock: if _path == "/": if recreate: return self.opendir(path) else: raise errors.DirectoryExists(path) if _path.endswith("/"): _path = _path[:-1] dir_path, dir_name = split(_path) _dir_res = self._getresource(dir_path) if not _dir_res or not _dir_res.is_collection: raise errors.ResourceNotFound(path) if dir_name in _dir_res.get_member_names(): if not recreate: raise errors.DirectoryExists(path) _res = self._getresource(path) if _res and _res.is_collection: return self.opendir(path) _dir_res.create_collection(dir_name) return self.opendir(path)
def remove(self, path): _path = self.fix_path(path) try: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path=path) self.dropbox.files_delete_v2(_path) except ApiError as e: if isinstance(e.error._value, LookupError): raise errors.ResourceNotFound(path=path) log.debug(e) raise errors.FileExpected(path=path, exc=e)
def getinfo(self, path, namespaces=None): """ Return an Info instance for the resource (file or directory). :param str path: Path pointing to a file or directory. :param set namespaces: The list of PyFilesystem `Info` namespaces which should be included in the response. """ # type: (Text, list) -> bool path = ensureUnicode(path) self.check() namespaces = namespaces or () _path = self.validatepath(path).encode('ascii', 'replace') try: attr = self._odfs.stat(_path) except RuntimeError as error: if error.message == 'No such file or directory': raise errors.ResourceNotFound(path) raise error info = { "basic": { "name": basename(_path), "is_dir": stat.S_ISDIR(attr.mode) } } rt = ResourceType.unknown if stat.S_ISREG(attr.mode): rt = ResourceType.file if stat.S_ISDIR(attr.mode): rt = ResourceType.directory info["details"] = { "accessed": attr.atime, "modified": attr.mtime, "size": attr.size, "uid": attr.uid, "gid": attr.gid, "type": rt } info["access"] = { "uid": attr.uid, "gid": attr.gid, "permissions": statToPermissions(attr) } return Info(info)
def setbinfile(self, path, file): _path = self.validatepath(path) _key = self._path_to_key(_path) if not self.isdir(dirname(path)): raise errors.ResourceNotFound(path) try: info = self.getinfo(path) if info.is_dir: raise errors.FileExpected(path) except errors.ResourceNotFound: pass with s3errors(path): self.client.upload_fileobj(file, self._bucket_name, _key)
def listdir(self, path, limit=None, offset=0): # type: (Text) -> List[Text] """Get a list of the resource names in a directory. This method will return a list of the resources in a directory. A *resource* is a file, directory, or one of the other types defined in `~fs.enums.ResourceType`. Arguments: path (str): A path to a directory on the filesystem Returns: list: list of names, relative to ``path``. Raises: fs.errors.DirectoryExpected: If ``path`` is not a directory. fs.errors.ResourceNotFound: If ``path`` does not exist. """ log.info(path) if path in ("/", "") or path is None: result = [] for coll_ref in db.list_root(): result.append(coll_ref.id) return result _res = self._getresource(path) if _res is None: raise errors.ResourceNotFound(path) log.info("%r" % _res) if isinstance(_res, db.DocumentReference): # we should return this error here, but we could also make it easier to navigate... # raise errors.DirectoryExpected(path) result = [] for coll_ref in _res.collections(): result.append(coll_ref.id) return result # TODO: apply limit, offset etc. if limit is None: limit = self._limit # return [str(ref.id) for ref in db.list_doc_refs(coll, limit, offset)] iter_info = _res.list_documents() if limit: iter_info = itertools.islice(iter_info, offset, offset + limit) result = [] for doc_ref in iter_info: result.append(doc_ref.id) return result
def move(self, src_path, dst_path, overwrite=False): _src_path = self.fix_path(src_path) _dst_path = self.fix_path(dst_path) if not self.getinfo(_src_path).is_file: raise errors.FileExpected(src_path) if not overwrite and self.exists(_dst_path): raise errors.DestinationExists(dst_path) if "/" in dst_path and not self.exists(self.get_parent(_dst_path)): raise errors.ResourceNotFound(src_path) with self._lock: try: if overwrite: try: # remove file anyways self.dropbox.files_delete_v2(_dst_path) except Exception as e: pass self.dropbox.files_move_v2(_src_path, _dst_path) except ApiError as e: raise errors.ResourceNotFound(src_path, exc=e)
def setinfo(self, path, info): # type: (Text, RawInfo) -> None """Set info on a resource. This method is the complement to `~fs.base.FS.getinfo` and is used to set info values on a resource. Arguments: path (str): Path to a resource on the filesystem. info (dict): Dictionary of resource info. Raises: fs.errors.ResourceNotFound: If ``path`` does not exist on the filesystem The ``info`` dict should be in the same format as the raw info returned by ``getinfo(file).raw``. Example: >>> details_info = {"details": { ... "modified": time.time() ... }} >>> my_fs.setinfo('file.txt', details_info) """ with self._lock: _res = self._getresource(path) if not _res: raise errors.ResourceNotFound(path) if "details" in info: details = info["details"] if ( "accessed" in details or "modified" in details or "created" in details ): accessed_time = int(details.get("accessed", 0)) modified_time = int(details.get("modified", 0)) created_time = int(details.get("created", 0)) if accessed_time and not modified_time: modified_time = accessed_time if created_time: _res.create_time = datetime.datetime.fromtimestamp( created_time, datetime.timezone.utc ) if modified_time: _res.modify_time = datetime.datetime.fromtimestamp( modified_time, datetime.timezone.utc )
def openbin(self, path: Text, mode: Text = "r", buffering: int = -1, **options: Any) -> BinaryIO: """ Open a binary file. """ npath = self.normalize_path(path) if self._closed: raise errors.FilesystemClosed if not self.exists(dirname(npath)): raise errors.ResourceNotFound(dirname(path)) if "t" in mode: raise ValueError if "b" not in mode: mode += "b" file_mode = Mode(mode) exists = self.exists(npath) if file_mode.exclusive and exists: raise errors.FileExists(path) if file_mode.reading and not exists: raise errors.ResourceNotFound(path) if (file_mode.reading or (file_mode.writing and exists)) and self.isdir(path): raise errors.FileExpected(path) data = gzip.compress(b"") if file_mode.create and not exists: cursor = self.connection.cursor() cursor.execute( "INSERT INTO sqlar (name, mode, mtime, sz, data) VALUES (?, ?, ?, ?, ?)", (npath, 0o700, datetime.utcnow().timestamp(), 0, data) ) cursor.close() elif file_mode.truncate: cursor = self.connection.cursor() cursor.execute("UPDATE sqlar SET data = ? WHERE name = ?", (data, npath)) cursor.close() return SQLiteFile(self, npath, file_mode)