def __load_all_filenames(self, directory, whitelist=None): """ Loads recursively all files within the given folder and its subfolders and returns a flat list. """ response = [] if self.__safe_directory(directory, whitelist=whitelist): for (dirpath, dirnames, filenames) in safe_walk(directory, whitelist=whitelist): for filename in filenames: path = os.path.relpath(os.path.join(dirpath, filename), directory) statinfo = os.lstat(os.path.join(dirpath, filename)) response.append( dict(path=path, size=statinfo.st_size, ctime=time.strftime( "%m/%d/%Y %I:%M:%S %p", time.localtime(statinfo.st_ctime)))) else: log.warning("The directory \"%s\" does not exist." % directory) return response # sort by path response = sorted(response, key=itemgetter("path")) return response
def extra_files(self, trans, history_content_id, history_id, **kwd): """ GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/extra_files Generate list of extra files. """ decoded_content_id = self.decode_id(history_content_id) hda = self.hda_manager.get_accessible(decoded_content_id, trans.user) extra_files_path = hda.extra_files_path rval = [] for root, directories, files in safe_walk(extra_files_path): for directory in directories: rval.append({ "class": "Directory", "path": os.path.relpath(os.path.join(root, directory), extra_files_path) }) for file in files: rval.append({ "class": "File", "path": os.path.relpath(os.path.join(root, file), extra_files_path) }) return rval
def find_required_files(self, tool_directory: str) -> List[str]: def matches(ie_list: List, rel_path: str): for ie_item in ie_list: ie_item_path = ie_item["path"] ie_item_type = ie_item.get("path_type", "literal") if ie_item_type == "literal": if rel_path == ie_item_path: return True elif ie_item_type == "prefix": if rel_path.startswith(ie_item_path): return True elif ie_item_type == "glob": if fnmatch.fnmatch(rel_path, ie_item_path): return True else: if re.match(ie_item_path, rel_path) is not None: return True return False excludes = self.excludes if self.extend_default_excludes: excludes.append({"path": "tool-data", "path_type": "prefix"}) excludes.append({"path": "test-data", "path_type": "prefix"}) excludes.append({"path": ".hg", "path_type": "prefix"}) files: List[str] = [] for (dirpath, _, filenames) in safe_walk(tool_directory): for filename in filenames: rel_path = join(dirpath, filename).replace(tool_directory + os.path.sep, '') if matches(self.includes, rel_path) and not matches(self.excludes, rel_path): files.append(rel_path) return files
def __create_jstree(self, directory, disable='folders', whitelist=None): """ Loads recursively all files and folders within the given folder and its subfolders and returns jstree representation of its structure. """ jstree_paths = [] if self.__safe_directory(directory, whitelist=whitelist): for (dirpath, dirnames, filenames) in safe_walk(directory, whitelist=whitelist): for dirname in dirnames: dir_path = os.path.relpath(os.path.join(dirpath, dirname), directory) dir_path_hash = hashlib.sha1( smart_str(dir_path)).hexdigest() disabled = True if disable == 'folders' else False jstree_paths.append( jstree.Path( dir_path, dir_path_hash, { 'type': 'folder', 'state': { 'disabled': disabled }, 'li_attr': { 'full_path': dir_path } })) for filename in filenames: file_path = os.path.relpath( os.path.join(dirpath, filename), directory) file_path_hash = hashlib.sha1( smart_str(file_path)).hexdigest() disabled = True if disable == 'files' else False jstree_paths.append( jstree.Path( file_path, file_path_hash, { 'type': 'file', 'state': { 'disabled': disabled }, 'li_attr': { 'full_path': file_path } })) else: raise exceptions.ConfigDoesNotAllowException( 'The given directory does not exist.') userdir_jstree = jstree.JSTree(jstree_paths) return userdir_jstree
def _list(self, path="/", recursive=True, user_context=None): dir_path = self._to_native_path(path, user_context=user_context) if not self._safe_directory(dir_path): raise exceptions.ObjectNotFound(f'The specified directory does not exist [{dir_path}].') if recursive: res = [] for (p, dirs, files) in safe_walk(dir_path, allowlist=self._allowlist): rel_dir = os.path.relpath(p, dir_path) to_dict = functools.partial(self._resource_info_to_dict, rel_dir, user_context=user_context) res.extend(map(to_dict, dirs)) res.extend(map(to_dict, files)) return res else: res = os.listdir(dir_path) to_dict = functools.partial(self._resource_info_to_dict, path, user_context=user_context) return list(map(to_dict, res))
def extra_files(self, trans, history_content_id, history_id, **kwd): """ GET /api/histories/{encoded_history_id}/contents/{encoded_content_id}/extra_files Generate list of extra files. """ decoded_content_id = self.decode_id(history_content_id) hda = self.hda_manager.get_accessible(decoded_content_id, trans.user) extra_files_path = hda.extra_files_path rval = [] for root, directories, files in safe_walk(extra_files_path): for directory in directories: rval.append({"class": "Directory", "path": os.path.relpath(os.path.join(root, directory), extra_files_path)}) for file in files: rval.append({"class": "File", "path": os.path.relpath(os.path.join(root, file), extra_files_path)}) return rval
def extra_files( self, trans: ProvidesHistoryContext, history_content_id: EncodedDatabaseIdField, ): """ Generate list of extra files. """ decoded_content_id = self.decode_id(history_content_id) hda = self.hda_manager.get_accessible(decoded_content_id, trans.user) extra_files_path = hda.extra_files_path rval = [] for root, directories, files in safe_walk(extra_files_path): for directory in directories: rval.append({"class": "Directory", "path": os.path.relpath(os.path.join(root, directory), extra_files_path)}) for file in files: rval.append({"class": "File", "path": os.path.relpath(os.path.join(root, file), extra_files_path)}) return rval
def __load_all_filenames(self, directory, whitelist=None): """ Loads recursively all files within the given folder and its subfolders and returns a flat list. """ response = [] if self.__safe_directory(directory, whitelist=whitelist): for (dirpath, dirnames, filenames) in safe_walk(directory, whitelist=whitelist): for filename in filenames: path = os.path.relpath(os.path.join(dirpath, filename), directory) statinfo = os.lstat(os.path.join(dirpath, filename)) response.append(dict(path=path, size=statinfo.st_size, ctime=time.strftime("%m/%d/%Y %I:%M:%S %p", time.localtime(statinfo.st_ctime)))) else: log.warning("The directory \"%s\" does not exist." % directory) return response # sort by path response = sorted(response, key=itemgetter("path")) return response
def __create_jstree(self, directory, disable='folders', whitelist=None): """ Loads recursively all files and folders within the given folder and its subfolders and returns jstree representation of its structure. """ jstree_paths = [] if self.__safe_directory(directory, whitelist=whitelist): for (dirpath, dirnames, filenames) in safe_walk(directory, whitelist=whitelist): for dirname in dirnames: dir_path = os.path.relpath(os.path.join(dirpath, dirname), directory) dir_path_hash = hashlib.sha1(smart_str(dir_path)).hexdigest() disabled = True if disable == 'folders' else False jstree_paths.append(jstree.Path(dir_path, dir_path_hash, {'type': 'folder', 'state': {'disabled': disabled}, 'li_attr': {'full_path': dir_path}})) for filename in filenames: file_path = os.path.relpath(os.path.join(dirpath, filename), directory) file_path_hash = hashlib.sha1(smart_str(file_path)).hexdigest() disabled = True if disable == 'files' else False jstree_paths.append(jstree.Path(file_path, file_path_hash, {'type': 'file', 'state': {'disabled': disabled}, 'li_attr': {'full_path': file_path}})) else: raise exceptions.ConfigDoesNotAllowException('The given directory does not exist.') userdir_jstree = jstree.JSTree(jstree_paths) return userdir_jstree