def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) _key = self._path_to_key(_path) if _path == "/": return Info({ "basic": { "name": "", "is_dir": True }, "details": { "type": int(ResourceType.directory) }, }) info = None try: with dlkerrors(path): info = self.dlk.info(_key) except errors.ResourceNotFound: raise errors.ResourceNotFound(path) info_dict = self._info_from_object(info, namespaces) return Info(info_dict)
def getinfo(self, path, namespaces=None): if path == "/": return Info({ "basic": { "name": "", "is_dir": True }, "details": { "type": int(ResourceType.directory) } }) f = self._c.get_file_by_path(self._project_id, path) is_dir = f.mime_type == "directory" return Info({ "basic": { "name": f.name, "is_dir": is_dir }, "modified": datetime_to_epoch(f.mtime), "size": f.size, "type": int(ResourceType.directory if is_dir else ResourceType.file) })
def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) _key = self._path_to_key(_path) try: dir_path = dirname(_path) if dir_path != '/': _dir_key = self._path_to_dir_key(dir_path) self._get_object(dir_path, _dir_key) except errors.ResourceNotFound: raise errors.ResourceNotFound(path) if _path == '/': return Info({ "basic": { "name": "", "is_dir": True }, "details": { "type": int(ResourceType.directory) } }) obj = self._get_object(path, _key) info = self._info_from_object(obj, namespaces) return Info(info)
def test_has_namespace(self): info = Info({ "basic": {}, "details": {} }) self.assertTrue(info.has_namespace('basic')) self.assertTrue(info.has_namespace('details')) self.assertFalse(info.has_namespace('access'))
def test_copy(self): info = Info({ "basic": { "name": "bar", "is_dir": False } }) info_copy = info.copy() self.assertEqual(info.raw, info_copy.raw)
def test_basic(self): # Check simple file info = Info({"basic": {"name": "bar", "is_dir": False}}) self.assertEqual(info.name, "bar") self.assertIsInstance(info.is_dir, bool) self.assertFalse(info.is_dir) self.assertEqual(repr(info), "<file 'bar'>") # Check dir info = Info({"basic": {"name": "foo", "is_dir": True}}) self.assertTrue(info.is_dir) self.assertEqual(repr(info), "<dir 'foo'>")
def test_scandir(self): key = operator.attrgetter("name") expected = [ Info({"basic": {"name": "egg", "is_dir": False}}), Info({"basic": {"name": "foo", "is_dir": True}}), ] with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected) scandir.assert_has_calls([mock.call('/', namespaces=None, page=None)]) with mock.patch.object(self.fs, "scandir", wraps=self.fs.scandir) as scandir: self.assertEqual(sorted(self.cached.scandir("/"), key=key), expected) scandir.assert_not_called()
def test_suffix(self): info = Info({"basic": {"name": "foo.tar.gz"}}) self.assertEqual(info.suffix, ".gz") self.assertEqual(info.suffixes, [".tar", ".gz"]) self.assertEqual(info.stem, "foo") info = Info({"basic": {"name": "foo"}}) self.assertEqual(info.suffix, "") self.assertEqual(info.suffixes, []) self.assertEqual(info.stem, "foo") info = Info({"basic": {"name": ".foo"}}) self.assertEqual(info.suffix, "") self.assertEqual(info.suffixes, []) self.assertEqual(info.stem, ".foo")
def _infoFromMetadata(self, metadata): # pylint: disable=no-self-use isRoot = isinstance(metadata, list) or metadata == _rootMetadata isFolder = isRoot or (metadata["mimeType"] == _folderMimeType) rfc3339 = "%Y-%m-%dT%H:%M:%S.%fZ" permissions = metadata.get("permissions", None) rawInfo = { "basic": { "name": "" if isRoot else metadata["name"], "is_dir": isFolder }, "details": { "accessed": None, # not supported by Google Drive API "created": None if isRoot else datetime_to_epoch(datetime.strptime(metadata["createdTime"], rfc3339)), "metadata_changed": None, # not supported by Google Drive API "modified": None if isRoot else datetime_to_epoch(datetime.strptime(metadata["modifiedTime"], rfc3339)), "size": int(metadata["size"]) if "size" in metadata else None, # folders, native google documents etc have no size "type": ResourceType.directory if isFolder else ResourceType.file }, "sharing": { "id": None if isRoot else metadata["id"], "permissions": permissions, "is_shared": len(permissions) > 1 if permissions is not None else None } } if "contentHints" in metadata and "indexableText" in metadata["contentHints"]: rawInfo.update({"google": {"indexableText": metadata["contentHints"]["indexableText"]}}) if "appProperties" in metadata: rawInfo.update({"google": {"appProperties": metadata["appProperties"]}}) # there is also file-type-specific metadata like imageMediaMetadata return Info(rawInfo)
def getinfo(self, path, namespaces=None): _path = self.validatepath(path) namespaces = namespaces or () if _path in '/': info_dict = { "basic": { "name": "", "is_dir": True }, "details": { "type": ResourceType.directory } } else: try: info = self.client.info(_path.encode('utf-8')) if 'path' not in info: info['path'] = _path info_dict = self._create_info_dict(info) if self.client.is_dir(_path.encode('utf-8')): info_dict['basic']['is_dir'] = True info_dict['details']['type'] = ResourceType.directory except we.RemoteResourceNotFound as exc: raise errors.ResourceNotFound(path, exc=exc) return Info(info_dict)
def _infoFromMetadata(self, metadata): # pylint: disable=no-self-use isFolder = (metadata["mimeType"] == _folderMimeType) rfc3339 = "%Y-%m-%dT%H:%M:%S.%fZ" rawInfo = { "basic": { "name": metadata["name"], "is_dir": isFolder }, "details": { "accessed": None, # not supported by Google Drive API "created": datetime_to_epoch( datetime.strptime(metadata["createdTime"], rfc3339)), "metadata_changed": None, # not supported by Google Drive API "modified": datetime_to_epoch( datetime.strptime(metadata["modifiedTime"], rfc3339)), "size": int(metadata["size"]) if isFolder is False else None, # folders have no size "type": ResourceType.directory if isFolder else ResourceType.file }, "sharing": { "id": metadata["id"], "permissions": metadata["permissions"], "is_shared": True if len(metadata["permissions"]) > 1 else False } } # there is also file-type-specific metadata like imageMediaMetadata return Info(rawInfo)
def getinfo(self, path, namespaces=None): self.check() namespaces = namespaces or () _path = self.validatepath(path) _stat = self._fs.getinfo(_path) info = { "basic": {"name": basename(_path), "is_dir": stat.S_ISDIR(_stat["st_mode"])} } if "details" in namespaces: info["details"] = { "_write": ["accessed", "modified"], "accessed": _stat["st_atime"], "modified": _stat["st_mtime"], "size": _stat["st_size"], "type": int( self.STAT_TO_RESOURCE_TYPE.get( stat.S_IFMT(_stat["st_mode"]), ResourceType.unknown ) ), } if "stat" in namespaces: info["stat"] = _stat if "access" in namespaces: info["access"] = { "permissions": Permissions(mode=_stat["st_mode"]).dump(), "uid": 1000, # TODO: fix "gid": 100, # TODO: fix } return Info(info)
def _infoFromMetadata(self, metadata): # pylint: disable=no-self-use isFolder = (metadata["mimeType"] == _folderMimeType) rfc3339 = "%Y-%m-%dT%H:%M:%S.%fZ" rawInfo = { "basic": { "name": metadata["name"], "is_dir": isFolder, }, "details": { "accessed": None, # not supported by Google Drive API "created": datetime_to_epoch( datetime.strptime(metadata["createdTime"], rfc3339)), "metadata_changed": None, # not supported by Google Drive API "modified": datetime_to_epoch( datetime.strptime(metadata["modifiedTime"], rfc3339)), "size": metadata["size"] if isFolder is False else None, # folders have no size "type": 1 if isFolder else 0 } } # there is also file-type-specific metadata like imageMediaMetadata return Info(rawInfo)
def getinfo(self, path: str, namespaces: Any = None) -> Info: # The pyfilesystem2 documentation says namespaces should be a # list of strings, but the test-suite has a case expecting it # to succeed when it's a single string. Geez. # I REALLY REALLY hate untyped languages. self.check() if not namespaces: namespaces = ["basic"] if type(namespaces) is not list: namespaces = [namespaces] node = self._resolve_path_to_node(path) if not node: raise ValueError(f"Invalid path: {path}.") info = {} # Dict[str, Dict[str, object]] info["basic"] = { "is_dir": self.prims.is_dir(node), "name": fs.path.basename(node.path), } if "details" in namespaces: sys_path = self.getsyspath(path) if sys_path: with convert_os_errors("getinfo", path): _stat = os.stat(sys_path) info["details"] = self._make_details_from_stat(_stat) else: info["details"] = self._make_default_details(node) return Info(info)
def getinfo( self, path: str, namespaces: Optional[Collection[str]] = None ) -> Info: path = self.validatepath(path) # TODO: Support namespaces other than 'basic'. if namespaces is None: namespaces = ["basic"] # XXX: Just for now, raise error when unsupported namespace # encountered. for namespace in namespaces: if namespace != "basic": raise NotImplementedError( "TODO: What to do about unsupported namespaces?" ) path_obj, resource = self._get_file_at_path(path) raw_info = { "basic": { # Special-case root-dir. "name": "/" if path == "/" else path_obj.name, "is_dir": path == "/" # Resource is a directory if it suffixes include '.d' or '.c'. or len(set((".d", ".c")) & set(path_obj.suffixes)) > 0, } } return Info(raw_info)
def _getinfo(self, path, namespaces=None): """Gets info without checking for parent dir.""" namespaces = namespaces or () _path = self.validatepath(path) _key = self._path_to_key(_path) if _path == "/": return Info( { "basic": {"name": "", "is_dir": True}, "details": {"type": int(ResourceType.directory)}, } ) obj = self._get_object(path, _key) info = self._info_from_object(obj, namespaces) return Info(info)
def test_details(self): dates = [ datetime.datetime(2016, 7, 5, tzinfo=pytz.UTC), datetime.datetime(2016, 7, 6, tzinfo=pytz.UTC), datetime.datetime(2016, 7, 7, tzinfo=pytz.UTC), datetime.datetime(2016, 7, 8, tzinfo=pytz.UTC) ] epochs = [datetime_to_epoch(d) for d in dates] info = Info({ "details": { "accessed": epochs[0], "modified": epochs[1], "created": epochs[2], "metadata_changed": epochs[3], "type": int(ResourceType.file) } }) self.assertEqual(info.accessed, dates[0]) self.assertEqual(info.modified, dates[1]) self.assertEqual(info.created, dates[2]) self.assertEqual(info.metadata_changed, dates[3]) self.assertIsInstance(info.type, ResourceType) self.assertEqual(info.type, ResourceType.file) self.assertEqual(info.type, 2)
def getinfo(self, path: str, namespaces=None): """Generate PyFilesystem2's `Info` struct. :param path: Path to file or directory on filesystem :param namespaces: Info namespaces to query, `NotImplemented` :returns `Info` """ try: entry = self.fs.root_dir.get_entry(path) except PyFATException as e: if e.errno in [errno.ENOTDIR, errno.ENOENT]: raise ResourceNotFound(path) raise e info = { "basic": { "name": repr(entry), "is_dir": entry.is_directory() }, "details": { "accessed": entry.get_atime().timestamp(), "created": entry.get_ctime().timestamp(), "metadata_changed": None, "modified": entry.get_mtime().timestamp(), "size": entry.filesize, "type": self.gettype(path) } } return Info(info)
def getinfo( self, path: str, namespaces: Optional[Collection[str]] = ("basic", )) -> Info: cscs_url = self._get_tmp_url(path) head_response = self.cscs_session.head(cscs_url.raw) if head_response.status_code == 404: objects = self.listdir(path, limit=3) if len(objects) == 0: raise ResourceNotFound(path) is_dir = True else: is_dir = False return Info( raw_info={ "basic": { "name": PurePosixPath(path).name, "is_dir": is_dir, }, "details": { "type": ResourceType.directory if is_dir else ResourceType.file }, })
def _infoFromMetadata(self, metadata): # pylint: disable=no-self-use rawInfo = { "basic": { "name": metadata.name, "is_dir": isinstance(metadata, FolderMetadata), } } if isinstance(metadata, FileMetadata): rawInfo.update({ "details": { "accessed": None, # not supported by Dropbox API "created": None, # not supported by Dropbox API?, "metadata_changed": None, # not supported by Dropbox "modified": datetime_to_epoch(metadata.server_modified), # API documentation says that this is reliable "size": metadata.size, "type": 0 }, "dropbox": { "content_hash": metadata.content_hash, # see https://www.dropbox.com/developers/reference/content-hash "rev": metadata.rev, "client_modified": metadata.client_modified # unverified value coming from dropbox clients } }) if metadata.media_info is not None and metadata.media_info.is_metadata() is True: media_info_metadata = metadata.media_info.get_metadata() if media_info_metadata.time_taken is not None: rawInfo.update({ "media_info": { "taken_date_time": datetime_to_epoch(media_info_metadata.time_taken) } }) if media_info_metadata.location is not None: rawInfo.update({ "media_info": { "location_latitude": media_info_metadata.location.latitude, "location_longitude": media_info_metadata.location.longitude } }) # Dropbox doesn't parse some jpgs properly if media_info_metadata.dimensions is not None: rawInfo.update({ "media_info": { "dimensions_height": media_info_metadata.dimensions.height, "dimensions_width": media_info_metadata.dimensions.width } }) elif isinstance(metadata, FolderMetadata): rawInfo.update({ "details": { "accessed": None, # not supported by Dropbox API "created": None, # not supported by Dropbox API, "metadata_changed": None, # not supported by Dropbox "modified": None, # not supported for folders "size": None, # not supported for folders "type": 1 }}) else: assert False, f"{metadata.name}, {metadata}, {type(metadata)}" return Info(rawInfo)
def test_link(self): info = Info({ 'link': { 'target': 'foo' } }) self.assertTrue(info.is_link) self.assertEqual(info.target, 'foo')
def _scandir( self, path: Text, namespaces: Optional[Collection[Text]] = None) -> Iterator[Info]: for meta in self._client.scan_dir(path): yield Info(dict(basic=dict( name=meta.name, is_dir=meta.is_dir, ), ))
def getinfo(self, path, namespaces=None): if path == "/": return Info({"basic": {"name": "", "is_dir": True}}) try: if not path.startswith("/"): path = "/" + path metadata = self.dropbox.files_get_metadata(path, include_media_info=True) except ApiError as e: raise ResourceNotFound(path=path, exc=e) return self._infoFromMetadata(metadata)
def _dir_info(self, name: str) -> Info: return Info({ "basic": { "name": name.rstrip(self.DELIMITER), "is_dir": True }, "details": { "type": int(ResourceType.directory) } })
def getinfo(self, path: str, namespaces: Optional[Collection[str]] = None) -> Info: if path == "/": info = {"basic": {"name": "/", "is_dir": True}} else: _dirpath, name = fs.path.split(path) is_dir = isinstance(self.path_dict[path], _DirInfo) info = {"basic": {"name": name, "is_dir": is_dir}} return Info(info)
def test_suffix(self): info = Info({ 'basic': {'name': 'foo.tar.gz'} }) self.assertEqual(info.suffix, '.gz') self.assertEqual(info.suffixes, ['.tar', '.gz']) self.assertEqual(info.stem, 'foo') info = Info({ 'basic': {'name': 'foo'} }) self.assertEqual(info.suffix, '') self.assertEqual(info.suffixes, []) self.assertEqual(info.stem, 'foo') info = Info({ 'basic': {'name': '.foo'} }) self.assertEqual(info.suffix, '') self.assertEqual(info.suffixes, []) self.assertEqual(info.stem, '.foo')
def scandir(self, path, namespaces=None, page=None): from fs.info import Info for cf in self.repo.get_dir_contents(self.path + path): yield Info({ "basic": { "name": cf.name, "is_dir": cf.type == "dir", }, "hash": { "sha1": cf.sha, } })
def gen_info(): for result in _paginate: common_prefixes = result.get("CommonPrefixes", ()) for prefix in common_prefixes: _prefix = prefix.get("Prefix") _name = _prefix[prefix_len:] if _name: info = { "basic": { "name": _name.rstrip(self.delimiter), "is_dir": True, } } yield Info(info) for _obj in result.get("Contents", ()): name = _obj["Key"][prefix_len:] if name: with s3errors(path): obj = self.s3.Object(_bucket_name_copy, _obj["Key"]) info = self._info_from_object(obj, namespaces) yield Info(info)
def _make_info_from_resource(_res, namespaces): def epoch(dt): # return time.mktime(dt.utctimetuple()) return ( dt - datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc) ) / datetime.timedelta(seconds=1) doc = _res.get_doc() # if _res.isdir(): # st_size = 0 # else: # st_size = _res.size # when combined with FS2DAVProvider(), size None tells WsgiDAV to read until EOF st_size = doc.to_dict().get("size", 0) # st_size = None st_mtime = doc.update_time.seconds + float(doc.update_time.nanos / 1000000000.0) st_ctime = doc.create_time.seconds + float(doc.create_time.nanos / 1000000000.0) st_atime = st_mtime info = {"basic": {"name": _res.basename(_res.path), "is_dir": _res.isdir()}} if "details" in namespaces: info["details"] = { # "_write": ["accessed", "modified"], # "_write": ["created", "modified"], "_write": [], "accessed": st_atime, "modified": st_mtime, "created": st_ctime, "size": st_size, # "type": int(cls._get_type_from_stat(stat_result)), } if _res.isdir(): info["details"]["type"] = 1 else: info["details"]["type"] = 2 if "stat" in namespaces: info["stat"] = { "st_size": st_size, "st_atime": st_atime, "st_mtime": st_mtime, "st_ctime": st_ctime, } # if "lstat" in namespaces: # info["lstat"] = { # k: getattr(_lstat, k) for k in dir(_lstat) if k.startswith("st_") # } # if "link" in namespaces: # info["link"] = cls._make_link_info(sys_path) # if "access" in namespaces: # info["access"] = cls._make_access_from_stat(_stat) return Info(info)
def scandir( self, path, # type: Text namespaces=None, # type: Optional[Collection[Text]] ): # type: (...) -> Iterator[Info] namespaces = namespaces or () _path = self.validatepath(path) dir_list = self.client.list(_path.encode('utf-8'), get_info=True) for el in dir_list: info_dict = self._create_info_dict(el) yield Info(info_dict)