def ilistdirinfo(self, path="/", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False): self._log(DEBUG, "Listing directory (listdirinfo) %s" % path) if dirs_only and files_only: raise ValueError("dirs_only and files_only can not both be True") for item in self.tahoeutil.list(self.dircap, path): if dirs_only and item['type'] == 'filenode': continue elif files_only and item['type'] == 'dirnode': continue if wildcard is not None: if isinstance(wildcard,basestring): if not fnmatch.fnmatch(item['name'], wildcard): continue else: if not wildcard(item['name']): continue if full: item_path = relpath(pathjoin(path, item['name'])) elif absolute: item_path = abspath(pathjoin(path, item['name'])) else: item_path = item['name'] yield (item_path, item)
def __init__(self, filesystem_url, settings_path, server="main", logging=None, disable_autoreload=False, breakpoint=False, breakpoint_startup=False, validate_db=False): self.filesystem_url = filesystem_url self.settings_path = settings_path self.server_ref = server self.logging = logging self.breakpoint = breakpoint self.validate_db = validate_db self.watching_fs = None self.rebuild_required = False self._new_build_lock = RLock() self.archive = None self._self = weakref.ref(self, self.on_close) if logging is not None: init_logging(pathjoin(self.filesystem_url, logging)) try: self.build(breakpoint=breakpoint_startup) except Exception as e: startup_log.critical(text_type(e)) raise if self.archive.auto_reload and not disable_autoreload: watch_location = self.archive.cfg.get('autoreload', 'location', '') watch_location = pathjoin(self.filesystem_url, watch_location) self.watcher = ReloadChangeWatcher(watch_location, self)
def listdirinfo(self, path="/", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False): self.wrapped_fs._log(DEBUG, "Listing directory (listdirinfo) %s" % path) _fixpath = self.wrapped_fs._fixpath _path = _fixpath(path) if dirs_only and files_only: raise errors.ValueError("dirs_only and files_only can not both be True") result = [] for item in self.wrapped_fs.tahoeutil.list(self.dircap, _path): if dirs_only and item['type'] == 'filenode': continue elif files_only and item['type'] == 'dirnode': continue if wildcard is not None and \ not fnmatch.fnmatch(item['name'], wildcard): continue if full: item_path = relpath(pathjoin(_path, item['name'])) elif absolute: item_path = abspath(pathjoin(_path, item['name'])) else: item_path = item['name'] cache_name = self.wrapped_fs._fixpath(u"%s/%s" % \ (path, item['name'])) self._cache_set(cache_name, 'getinfo', (), {}, (True, item)) result.append((item_path, item)) return result
def copydir(self, src, dst, overwrite=False, parallel=True): """Copy a directory from source to destination. By default the copy is done by recreating the source directory structure at the destination, and then copy files in parallel from source to destination. :param src: Source directory path. :type src: str :param dst: Destination directory path. :type dst: str :param overwrite: If True then any existing files in the destination directory will be overwritten. :type overwrite: bool :param parallel: If True (default), the copy will be done in parallel. :type parallel: bool """ if not self.isdir(src): if self.isfile(src): raise ResourceInvalidError( src, msg="Source is not a directory: %(path)s") raise ResourceNotFoundError(src) if self.exists(dst): if overwrite: if self.isdir(dst): self.removedir(dst, force=True) elif self.isfile(dst): self.remove(dst) else: raise DestinationExistsError(dst) if parallel: process = CopyProcess() def process_copy(src, dst, overwrite=False): process.add_job(src, dst) copyfile = process_copy else: copyfile = self.copy self.makedir(dst, allow_recreate=True) for src_dirpath, filenames in self.walk(src): dst_dirpath = pathcombine(dst, frombase(src, src_dirpath)) self.makedir(dst_dirpath, allow_recreate=True, recursive=True) for filename in filenames: src_filename = pathjoin(src_dirpath, filename) dst_filename = pathjoin(dst_dirpath, filename) copyfile(src_filename, dst_filename, overwrite=overwrite) if parallel: process.prepare() process.run() return True
def copydir(self, src, dst, overwrite=False, parallel=True): """Copy a directory from source to destination. By default the copy is done by recreating the source directory structure at the destination, and then copy files in parallel from source to destination. :param src: Source directory path. :type src: string :param dst: Destination directory path. :type dst: string :param overwrite: If True then any existing files in the destination directory will be overwritten. :type overwrite: bool :param parallel: If True (default), the copy will be done in parallel. :type parallel: bool """ if not self.isdir(src): if self.isfile(src): raise ResourceInvalidError( src, msg="Source is not a directory: %(path)s") raise ResourceNotFoundError(src) if self.exists(dst): if overwrite: if self.isdir(dst): self.removedir(dst, force=True) elif self.isfile(dst): self.remove(dst) else: raise DestinationExistsError(dst) if parallel: process = CopyProcess() def process_copy(src, dst, overwrite=False): process.add_job(src, dst) copyfile = process_copy else: copyfile = self.copy self.makedir(dst, allow_recreate=True) for src_dirpath, filenames in self.walk(src): dst_dirpath = pathcombine(dst, frombase(src, src_dirpath)) self.makedir(dst_dirpath, allow_recreate=True, recursive=True) for filename in filenames: src_filename = pathjoin(src_dirpath, filename) dst_filename = pathjoin(dst_dirpath, filename) copyfile(src_filename, dst_filename, overwrite=overwrite) if parallel: process.prepare() process.run() return True
def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if self._fs.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return None for index in ("index.html", "index.htm"): index = pathjoin(path, index) if self._fs.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: info = self._fs.getinfo(path) f = self._fs.open(path, 'r') except FSError, e: self.send_error(404, str(e)) return None
def walkfiles(self, path='/', wildcard=None, dir_wildcard=None, search='breadth', ignore_errors=False): # Bypass the WrapFS optimization because it avoids using # the "walk" method from this class. items = self.walk(path, wildcard=wildcard, dir_wildcard=dir_wildcard, search=search, ignore_errors=ignore_errors) for path, filenames in items: for filename in filenames: yield pathjoin(path, filename)
def read(self, path, app=None, mime_type=None): """Read a file""" if not path.startswith('/'): if app is None: raise RelativePathError( "Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) filename = basename(path) if mime_type is None: mime_type, encoding = mimetypes.guess_type(filename) _type, sub_type = mime_type.split('/', 1) if mime_type == "text/plain": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") elif mime_type == "application/json": with self.fs.open(path, 'rb') as f: data = json.load(f) elif mime_type == "application/octet-stream": data = self.fs.getcontents(path, mode="rb") elif _type == "text": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") else: raise UnknownFormat( "Moya doesn't know how to read file '{}' (in {!r})".format( path, self.fs)) return data
def read(self, path, app=None, mime_type=None): """Read a file""" if not path.startswith('/'): if app is None: raise RelativePathError("Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) filename = basename(path) if mime_type is None: mime_type, encoding = mimetypes.guess_type(filename) _type, sub_type = mime_type.split('/', 1) if mime_type == "text/plain": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") elif mime_type == "application/json": with self.fs.open(path, 'rt', encoding="utf-8") as f: data = json.load(f) elif mime_type == "application/octet-stream": data = self.fs.getcontents(path, mode="rb") elif _type == "text": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") else: raise UnknownFormat("Moya doesn't know how to read file '{}' (in {!r})".format(path, self.fs)) return data
def logic(self, context): params = self.get_parameters(context) if params.fsobj is not None: fs = params.fsobj else: try: fs = self.archive.filesystems[params.fs] except KeyError: self.throw("moya.image.no-fs", "No filesystem called '{}'".format(params.fs)) return path = pathjoin(params.dirpath, params.filename) img = params.image._img save_params = self.get_let_map(context) try: with fs.makeopendir(params.dirpath, recursive=True) as dir_fs: with dir_fs.open(params.filename, 'wb') as f: img.save(f, params.format, **save_params) except Exception as e: self.throw( 'moya.image.write-fail', "Failed to write {} to '{}' in {} ({})".format( params.image, path, fs, e))
def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False): if not path: raise PathError(path) path = normpath(path) item = self._get_item_by_path(path) if not item: raise ResourceNotFoundError(path) if item['type'] != _ITEM_TYPE_FOLDER: raise ResourceInvalidError(path) item_children = self._get_children_items(item['id']) result = [] for child in item_children.values(): child_type = child['type'] if dirs_only and child_type != _ITEM_TYPE_FOLDER: continue if files_only and child_type != _ITEM_TYPE_FILE: continue child_path = child['name'] if full: child_path = pathjoin(path, child_path) result.append(child_path) return result
def send_head(self): """Common code for GET and HEAD commands. This sends the response code and MIME headers. Return value is either a file object (which has to be copied to the outputfile by the caller unless the command was HEAD, and must be closed by the caller under all circumstances), or None, in which case the caller has nothing further to do. """ path = self.translate_path(self.path) f = None if self._fs.isdir(path): if not self.path.endswith('/'): # redirect browser - doing basically what apache does self.send_response(301) self.send_header("Location", self.path + "/") self.end_headers() return None for index in ("index.html", "index.htm"): index = pathjoin(path, index) if self._fs.exists(index): path = index break else: return self.list_directory(path) ctype = self.guess_type(path) try: info = self._fs.getinfo(path) f = self._fs.open(path, 'rb') except FSError, e: self.send_error(404, str(e)) return None
def getinfo(p): try: if full or absolute: return self.getinfo(p) else: return self.getinfo(pathjoin(path, p)) except FSError: return {}
def _p(self, path, encoding='utf-8'): """Join path to base path.""" # fs.path.pathjoin() omits the first '/' in self.base_path. # It is resolved by adding on an additional '/' to its return value. if isinstance(path, binary_type): path = path.decode(encoding) # pathjoin always returns unicode return ('/' + pathjoin(self.base_path, path))
def build_map_recursive(path, parent_id): for child in get_children(parent_id): if not child["title"]: continue child_path = pathjoin(path, child["title"]) ids[child_path] = child["id"] build_map_recursive(child_path, child["id"])
def exists(self, path, app): """Check if a file exists""" if not path.startswith('/'): if app is None: raise RelativePathError("Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) try: return self.fs.isfile(path) except FSError: return False
def resolve_symlink(linkpath): linkinfo = self.getinfo(linkpath) if not linkinfo.has_key('resolved'): linkinfo['resolved'] = linkpath if is_symlink(linkinfo): target = linkinfo['target'] base, fname = pathsplit(linkpath) return resolve_symlink(pathjoin(base, target)) else: return linkinfo
def _makedir(self, parent_id, dname): self._querycur.execute("SELECT fullpath from FsDirMetaData where rowid=?",(parent_id,)) row = fetchone(self._querycur) assert(row != None) parentpath = row[0] fullpath= pathjoin(parentpath, dname) fullpath= remove_end_slash(fullpath) created = datetime.datetime.now().isoformat() self._updatecur.execute('INSERT INTO FsDirMetaData(name, fullpath, parentid,created) \ VALUES(?,?,?,?)', (dname, fullpath, parent_id,created))
def resolve_template_path(self, path, app_name, base_path="/"): """Get a template path in the appropriate directory for an app""" if path.startswith('/'): return path if isinstance(app_name, Application): app = app_name else: app = self.find_app(text_type(app_name)) template_path = abspath(pathjoin(base_path, app.templates_directory, path)) return template_path
def _p(self, path, encoding='utf-8'): """Join path to base path.""" # fs.path.pathjoin() omits the first '/' in self.base_path. # It is resolved by adding on an additional '/' to its return value. if isinstance(path, binary_type): path = path.decode(encoding) # pathjoin always returns unicode return ( u'/' + pathjoin(self.base_path.decode('utf-8'), path) ).encode(encoding)
def exists(self, path, app): """Check if a file exists""" if not path.startswith('/'): if app is None: raise RelativePathError( "Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) try: return self.fs.isfile(path) except FSError: return False
def copydir_progress(progress_callback, fs1, fs2, create_destination=True, ignore_errors=False, chunk_size=64 * 1024): """ Copies the contents of a directory from one fs to another, with a callback function to display progress. `progress_callback` should be a function with two parameters; `step` and `num_steps`. `num_steps` is the number of steps in the copy process, and `step` is the current step. `num_steps` may be None if the number of steps is still being calculated. """ if isinstance(fs1, tuple): fs1, dir1 = fs1 fs1 = fs1.opendir(dir1) if isinstance(fs2, tuple): fs2, dir2 = fs2 if create_destination: fs2.makedir(dir2, allow_recreate=True, recursive=True) fs2 = fs2.opendir(dir2) def do_callback(step, num_steps): try: progress_callback(step, num_steps) except: pass do_callback(0, None) file_count = 0 copy_paths = [] for dir_path, file_paths in fs1.walk(): copy_paths.append((dir_path, file_paths)) file_count += len(file_paths) do_callback(0, file_count) step = 0 for i, (dir_path, file_paths) in enumerate(copy_paths): try: fs2.makedir(dir_path, allow_recreate=True) for path in file_paths: copy_path = pathjoin(dir_path, path) with fs1.open(copy_path, 'rb') as src_file: fs2.setcontents(copy_path, src_file, chunk_size=chunk_size) step += 1 except: if ignore_errors: continue raise do_callback(step, file_count)
def publish(self): super(PyFS, self).publish() deploy_fs = OSFS(self.site.config.deploy_root_path.path) for (dirnm, local_filenms) in deploy_fs.walk(): logger.info("Making directory: %s", dirnm) self.fs.makedir(dirnm, allow_recreate=True) remote_fileinfos = self.fs.listdirinfo(dirnm, files_only=True) # Process each local file, to see if it needs updating. for filenm in local_filenms: filepath = pathjoin(dirnm, filenm) # Try to find an existing remote file, to compare metadata. for (nm, info) in remote_fileinfos: if nm == filenm: break else: info = {} # Skip it if the etags match if self.check_etag and "etag" in info: with deploy_fs.open(filepath, "rb") as f: local_etag = self._calculate_etag(f) if info["etag"] == local_etag: logger.info("Skipping file [etag]: %s", filepath) continue # Skip it if the mtime is more recent remotely. if self.check_mtime and "modified_time" in info: local_mtime = deploy_fs.getinfo(filepath)["modified_time"] if info["modified_time"] > local_mtime: logger.info("Skipping file [mtime]: %s", filepath) continue # Upload it to the remote filesystem. logger.info("Uploading file: %s", filepath) with deploy_fs.open(filepath, "rb") as f: self.fs.setcontents(filepath, f) # Process each remote file, to see if it needs deleting. for (filenm, info) in remote_fileinfos: filepath = pathjoin(dirnm, filenm) if filenm not in local_filenms: logger.info("Removing file: %s", filepath) self.fs.remove(filepath)
def copydir_progress(progress_callback, fs1, fs2, create_destination=True, ignore_errors=False, chunk_size=64*1024): """ Copies the contents of a directory from one fs to another, with a callback function to display progress. `progress_callback` should be a function with two parameters; `step` and `num_steps`. `num_steps` is the number of steps in the copy process, and `step` is the current step. `num_steps` may be None if the number of steps is still being calculated. """ if isinstance(fs1, tuple): fs1, dir1 = fs1 fs1 = fs1.opendir(dir1) if isinstance(fs2, tuple): fs2, dir2 = fs2 if create_destination: fs2.makedir(dir2, allow_recreate=True, recursive=True) fs2 = fs2.opendir(dir2) def do_callback(step, num_steps): try: progress_callback(step, num_steps) except: pass do_callback(0, None) file_count = 0 copy_paths = [] for dir_path, file_paths in fs1.walk(): copy_paths.append((dir_path, file_paths)) file_count += len(file_paths) do_callback(0, file_count) step = 0 for i, (dir_path, file_paths) in enumerate(copy_paths): try: fs2.makedir(dir_path, allow_recreate=True) for path in file_paths: copy_path = pathjoin(dir_path, path) with fs1.open(copy_path, 'rb') as src_file: fs2.setcontents(copy_path, src_file, chunk_size=chunk_size) step += 1 except: if ignore_errors: continue raise do_callback(step, file_count)
def rename(self, src, dst): """Rename a file or directory. :param src: path to rename. :type src: string :param dst: new name. :type dst: string :raises DestinationExistsError: if destination already exists. :raises ResourceNotFoundError: if source does not exists. """ src = self._p(src) dst = self._p(pathjoin(dirname(src), dst)) if not self.exists(src): raise ResourceNotFoundError(src) return self._move(src, dst, overwrite=False)
def _get_file_list(self, dirpath, full): assert(dirpath != None) if( full==True): searchpath = dirpath + r"%" self._querycur.execute('SELECT FsFileMetaData.name, FsDirMetaData.fullpath \ FROM FsFileMetaData, FsDirMetaData where FsFileMetaData.parent=FsDirMetaData.ROWID \ and FsFileMetaData.parent in (SELECT rowid FROM FsDirMetaData \ where fullpath LIKE ?)',(searchpath,)) else: parentid = self._get_dir_id(dirpath) self._querycur.execute('SELECT FsFileMetaData.name, FsDirMetaData.fullpath \ FROM FsFileMetaData, FsDirMetaData where FsFileMetaData.parent=FsDirMetaData.ROWID \ and FsFileMetaData.parent =?',(parentid,)) filelist = [pathjoin(row[1],row[0]) for row in self._querycur] return(filelist)
def _make_package_fs(package_fs, output_fs, exclude_wildcards, auth_token=None): """Builds a package zip""" assert isinstance(exclude_wildcards, list), "wildcards must be a list" def match_wildcards(path): split_path = path.lstrip('/').split('/') for i in range(len(split_path)): p = '/'.join(split_path[i:]) if any(fnmatch.fnmatchcase(p, w) for w in exclude_wildcards): return False return True manifest = [] console = Console() paths = [] for dirpath, files in package_fs.walk(): output_fs.makedir(dirpath, allow_recreate=True) for filename in files: path = pathjoin(dirpath, filename) if not match_wildcards(path): continue paths.append(path) with console.progress("building package...", len(paths)) as progress: for path in paths: progress.step() data = package_fs.getcontents(path) m = hashlib.md5() m.update(data) file_hash = m.hexdigest() if auth_token is None: auth_hash = "" else: m.update(auth_token) auth_hash = m.hexdigest() output_fs.setcontents(path, data) manifest.append((path, file_hash, auth_hash)) return manifest
def removedir(self, path, recursive=False, force=False): if path == '/': raise RemoveRootError(path) # Remove directory tree from the bottom upwards depending upon the # flags. if force: for (del_dir, del_files) in self.walk(path, search='depth', ignore_errors=True): for f in del_files: self.remove(pathjoin(del_dir, f)) self.removedir(del_dir) elif recursive: paths = recursepath(path, reverse=True)[:-1] for p in paths: self._remove_dir(p) else: self._remove_dir(path)
def removedir(self, path, recursive=False, force=False): """Remove a directory from the filesystem. :param path: Path of the directory to remove. :type path: string :param recursive: Unsupported by XRootDFS implementation. :type recursive: bool :param force: If True, any directory contents will be removed (recursively). Note that this can be very expensive as the xrootd protocol does not support recursive deletes - i.e. the library will do a full recursive listing of the directory and send a network request per file/directory. :type force: bool :raises `fs.errors.DirectoryNotEmptyError`: If the directory is not empty and force is `False`. :raises `fs.errors.ResourceInvalidError`: If the path is not a directory. :raises `fs.errors.ResourceNotFoundError`: If the path does not exist. """ if recursive: raise UnsupportedError("recursive parameter is not supported.") status, res = self._client.rmdir(self._p(path)) if not status.ok: if force and status.errno == 3005: # xrootd does not support recursive removal so do we have to # do it ourselves. for d, filenames in self.walk(path, search="depth"): for filename in filenames: relpath = pathjoin(d, filename) status, res = self._client.rm(self._p(relpath)) if not status.ok: self._raise_status(relpath, status) status, res = self._client.rmdir(self._p(d)) if not status.ok: self._raise_status(path, status) return True self._raise_status(path, status) return True
def removedir(self, path, recursive=False, force=False): """Remove a directory from the filesystem. :param path: Path of the directory to remove. :type path: str :param recursive: Unsupported by XRootDPyFS implementation. :type recursive: bool :param force: If True, any directory contents will be removed (recursively). Note that this can be very expensive as the xrootd protocol does not support recursive deletes - i.e. the library will do a full recursive listing of the directory and send a network request per file/directory. :type force: bool :raises: `fs.errors.DirectoryNotEmptyError` if the directory is not empty and force is `False`. :raises: `fs.errors.ResourceInvalidError` if the path is not a directory. :raises: `fs.errors.ResourceNotFoundError` if the path does not exist. """ if recursive: raise UnsupportedError("recursive parameter is not supported.") status, res = self._client.rmdir(self._p(path)) if not status.ok: if force and status.errno == 3005: # xrootd does not support recursive removal so do we have to # do it ourselves. for d, filenames in self.walk(path, search="depth"): for filename in filenames: relpath = pathjoin(d, filename) status, res = self._client.rm(self._p(relpath)) if not status.ok: self._raise_status(relpath, status) status, res = self._client.rmdir(self._p(d)) if not status.ok: self._raise_status(path, status) return True self._raise_status(path, status) return True
def logic(self, context): params = self.get_parameters(context) if self.has_parameter('fsobj'): walk_fs = params.fsobj else: walk_fs = self.archive.get_filesystem(params.fs) wildcard = lambda name: params.files(context, name=basename(name)) if self.has_parameter('files') else lambda name: True dir_wildcard = lambda name: params.dirs(context, name=basename(name)) if self.has_parameter('dirs') else lambda name: True paths = [] add_path = paths.append for dirname, dir_paths in walk_fs.walk(path=params.path, search=params.search, wildcard=wildcard, dir_wildcard=dir_wildcard, ignore_errors=True): for path in dir_paths: add_path(pathjoin(dirname, path)) self.set_context(context, params.dst, paths)
def read(self, path, app=None, mime_type=None): """Read a file""" if not path.startswith('/'): if app is None: raise RelativePathError( "Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) filename = basename(path) if mime_type is None: mime_type, encoding = mimetypes.guess_type(filename) _type, sub_type = mime_type.split('/', 1) try: if mime_type == "text/plain": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") elif mime_type == "application/json": with self.fs.open(path, 'rt', encoding="utf-8") as f: data = json.load(f) elif mime_type == "application/octet-stream": data = self.fs.getcontents(path, mode="rb") elif _type == "text": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") else: raise UnknownFormat( "Moya doesn't know how to read file '{}' (in {!r})".format( path, self.fs)) except FSError as e: from .logic import MoyaException info = {"path": path, "mime_type": mime_type} raise MoyaException( "data.read-fail", "unable to read data from {path} ({e})".format(path=path, e=e), diagnosis="check the data exists with **moya fs data --tree**", info=info) return data
def read(self, path, app=None, mime_type=None): """Read a file""" if not path.startswith('/'): if app is None: raise RelativePathError("Can't use relative data paths with an application") path = pathjoin(app.data_directory, path) filename = basename(path) if mime_type is None: mime_type, encoding = mimetypes.guess_type(filename) _type, sub_type = mime_type.split('/', 1) try: if mime_type == "text/plain": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") elif mime_type == "application/json": with self.fs.open(path, 'rt', encoding="utf-8") as f: data = json.load(f) elif mime_type == "application/octet-stream": data = self.fs.getcontents(path, mode="rb") elif _type == "text": data = self.fs.getcontents(path, mode="rt", encoding="utf-8") else: raise UnknownFormat("Moya doesn't know how to read file '{}' (in {!r})".format(path, self.fs)) except FSError as e: from .logic import MoyaException info = { "path": path, "mime_type": mime_type } raise MoyaException("data.read-fail", "unable to read data from {path} ({e})".format(path=path, e=e), diagnosis="check the data exists with **moya fs data --tree**", info=info) return data
def logic(self, context): params = self.get_parameters(context) if params.fsobj is not None: fs = params.fsobj else: try: fs = self.archive.filesystems[params.fs] except KeyError: self.throw("image.no-fs", "No filesystem called '{}'".format(params.fs)) return path = pathjoin(params.dirpath, params.filename) img = params.image._img save_params = self.get_let_map(context) try: with fs.makeopendir(params.dirpath, recursive=True) as dir_fs: with dir_fs.open(params.filename, 'wb') as f: img.save(f, params.format, **save_params) except Exception as e: self.throw('image.write-fail', "Failed to write {} to '{}' in {} ({})".format(params.image, path, fs, e))
def logic(self, context): params = self.get_parameters(context) self.check_image(context, params.image) if params.fsobj is not None: fs = params.fsobj else: try: fs = self.archive.filesystems[params.fs] except KeyError: self.throw("image.no-fs", "No filesystem called '{}'".format(params.fs)) return path = pathjoin(params.dirpath, params.filename) with params.image._lock: img = params.image._img img_format = params.format or splitext( params.filename or '')[-1].lstrip('.') or 'jpeg' if img_format == 'jpeg': if img.mode != 'RGB': img = img.convert('RGB') save_params = self.get_let_map(context) try: with fs.makeopendir(params.dirpath, recursive=True) as dir_fs: with dir_fs.open(params.filename, 'wb') as f: img.save(f, img_format, **save_params) log.debug("wrote '%s'", params.filename) except Exception as e: self.throw( 'image.write-fail', "Failed to write {} to '{}' in {!r} ({})".format( params.image, path, fs, e))
def _readdir(self, path): path = abspath(normpath(path)) if self.dircache.count: cached_dirlist = self.dircache.get(path) if cached_dirlist is not None: return cached_dirlist dirlist = {} def _get_FEAT(ftp): features = dict() try: response = ftp.sendcmd("FEAT") if response[:3] == "211": for line in response.splitlines()[1:]: if line[3] == "211": break if line[0] != ' ': break parts = line[1:].partition(' ') features[parts[0].upper()] = parts[2] except error_perm: # some FTP servers may not support FEAT pass return features def on_line(line): if not isinstance(line, unicode): line = line.decode('utf-8') info = parse_ftp_list_line(line, self.use_mlst) if info: info = info.__dict__ if info['name'] not in ('.', '..'): dirlist[info['name']] = info try: encoded_path = _encode(path) ftp_features = _get_FEAT(self.ftp) if 'MLST' in ftp_features: self.use_mlst = True try: # only request the facts we need self.ftp.sendcmd("OPTS MLST type;unique;size;modify;") except error_perm: # some FTP servers don't support OPTS MLST pass # need to send MLST first to discover if it's file or dir response = self.ftp.sendcmd("MLST " + encoded_path) lines = response.splitlines() if lines[0][:3] == "250": list_line = lines[1] # MLST line is preceded by space if list_line[0] == ' ': on_line(list_line[1:]) else: # Matrix FTP server has bug on_line(list_line) # if it's a dir, then we can send a MLSD if dirlist[dirlist.keys()[0]]['try_cwd']: dirlist = {} self.ftp.retrlines("MLSD " + encoded_path, on_line) else: self.ftp.dir(encoded_path, on_line) except error_reply: pass self.dircache[path] = dirlist def is_symlink(info): return info['try_retr'] and info['try_cwd'] and info.has_key('target') def resolve_symlink(linkpath): linkinfo = self.getinfo(linkpath) if not linkinfo.has_key('resolved'): linkinfo['resolved'] = linkpath if is_symlink(linkinfo): target = linkinfo['target'] base, fname = pathsplit(linkpath) return resolve_symlink(pathjoin(base, target)) else: return linkinfo if self.follow_symlinks: for name in dirlist: if is_symlink(dirlist[name]): target = dirlist[name]['target'] linkinfo = resolve_symlink(pathjoin(path, target)) for key in linkinfo: if key != 'name': dirlist[name][key] = linkinfo[key] del dirlist[name]['target'] return dirlist
def run(self): args = self.args application = WSGIApplication(self.location, self.get_settings(), disable_autoreload=True) archive = application.archive try: libs = [archive.libs[lib_name] for lib_name in args.libs] except KeyError: self.console.error( 'No lib with name "{}" installed'.format(lib_name)) return -1 table = [] for lib in libs: template_text = set() extract_text = defaultdict(lambda: {"occurrences": []}) if not lib.translations_location: table.append([ lib.long_name, Cell("translations not enabled", fg="red", bold=True), '' ]) continue filename = "{}.pot".format(lib.long_name.replace('.', '_')) translations_dir = lib.load_fs.getsyspath( lib.translations_location) def add_text(path, line, text, comment=None, plural=None, attr=None, context=None): rel_path = os.path.relpath(path, translations_dir) entry = extract_text[(text, plural, attr, context)] if attr is not None and context is not None: context = "attribute '{}'".format(attr) if plural is not None: entry['msgid'] = text entry['msgid_plural'] = plural entry['msgstr_plural'] = {'0': '', '1': ''} else: entry['msgid'] = text if context is not None: entry['msgctxt'] = context entry['occurrences'].append((rel_path, line)) if comment is not None: entry['comment'] = comment with self.console.progress("extracting {}".format(lib), len(lib.documents)) as progress: for doc in lib.documents: progress.step() for element in doc.elements.itervalues(): if element._translate_text: text = element._text.strip() if text: add_text(element._location, element.source_line, text, comment=unicode(element)) for name, attribute in element._tag_attributes.items(): if attribute.translate or name in element._translatable_attrs: text = element._attrs.get(name, '').strip() if text: add_text( element._location, element.source_line, text, attr=name, comment="attribute '{}' of {}".format( name, unicode(element))) if 'location' in lib.templates_info: engine = archive.get_template_engine('moya') with lib.load_fs.opendir(lib.templates_info['location'] ) as templates_fs: for path in templates_fs.walkfiles(): sys_path = templates_fs.getsyspath( path, allow_none=True) or path contents = templates_fs.getcontents(path) template = Template(contents, path) template.parse(engine.env) for trans_text in template.translatable_text: line, start, end = trans_text.location text = trans_text.text comment = trans_text.comment plural = trans_text.plural translatable_text = (path, line, start, text, plural) if translatable_text not in template_text: add_text(sys_path, line, text, comment, plural=plural, context=trans_text.context) template_text.add(translatable_text) now = pytz.UTC.localize(datetime.utcnow()) po = polib.POFile() for text in extract_text.values(): po.append(polib.POEntry(**text)) po.metadata = { 'POT-Creation-Date': now.strftime('%Y-%m-%d %H:%M%z'), 'Project-Id-Version': lib.version, 'Language': lib.default_language or 'en', 'MIME-Version': '1.0', 'Content-Type': 'text/plain; charset=utf-8', 'Content-Transfer-Encoding': '8Bit', 'Plural-Forms': 'nplurals=2; plural=(n != 1);' } if lib.translations_location: lib.load_fs.makedir(lib.translations_location, allow_recreate=True) translations_location = lib.load_fs.getsyspath( lib.translations_location) translation_path = os.path.join(translations_location, filename) if os.path.exists(translation_path) and not args.overwrite: if not args.merge: self.console.error( 'message file "{}" exists, see --merge or --overwrite options' .format(filename)) return -1 existing_po = polib.pofile(translation_path) po.merge(existing_po) po.save(translation_path) else: po.save(translation_path) locale_fs = lib.load_fs.opendir(lib.translations_location) for lang in lib.languages: locale_fs.makeopendir("{}/LC_MESSAGES/".format(lang), recursive=True) table.append([ lib.long_name, Cell(pathjoin(lib.translations_location, filename), fg="green", bold=True), Cell(len(po), bold=True) ]) self.console.table(table, header_row=["lib", "file", "no. strings"])
def logic(self, context): start = time() (name, _location, py, priority, template_priority) = self.get_parameters(context, 'name', 'location', 'py', 'priority', 'templatepriority') if template_priority is None: template_priority = priority archive = self.document.archive absolute = False if _location is not None: location = _location else: if py in sys.modules: reload(sys.modules[py]) try: __import__(py) except ImportError as e: raise errors.ElementError("unable to import Python module '{}'".format(py), element=self, diagnosis=text_type(e)) module = sys.modules[py] location = dirname(abspath(module.__file__)) absolute = True if '::/' in location: import_fs = fsopendir(location) else: if absolute: import_fs = fsopendir(location) else: project_fs = context['fs'] try: if project_fs.hassyspath('/'): project_path = project_fs.getsyspath('/') import_path = pathjoin(project_path, location) import_fs = fsopendir(import_path) else: import_fs = context['fs'].opendir(location) except FSError as e: self.throw("import.fail", "unable to import library from {}".format(location), diagnosis=text_type(e)) lib = archive.load_library(import_fs, priority=priority, template_priority=template_priority, long_name=name, rebuild=context.root.get('_rebuild', False)) if lib.failed_documents: if _location is not None: msg = "Failed to load library '{}' from location '{}'" raise errors.StartupFailedError(msg.format(name or lib.long_name, _location)) elif py: msg = "Failed to load library '{}' from Python module '{}'" raise errors.StartupFailedError(msg.format(name or lib.long_name, py)) else: raise errors.StartupFailedError("Failed to load library '{}'".format(name or lib.long_name)) startup_log.debug("%s imported %.1fms", lib, (time() - start) * 1000.0) if lib.priority: startup_log.debug("%s priority is %s", lib, lib.priority) if lib.template_priority: startup_log.debug("%s template priority is %s", lib, lib.template_priority)
def _p(self, path, encoding='utf-8'): """Join path to base path.""" # fs.path.pathjoin() omits the first '/' in self.base_path. # It is resolved by adding on an additional '/' to its return value. return '/' + pathjoin(self.base_path, path)
def do_run(self, options, args): self.options = options if len(args) < 2: self.error("at least two filesystems required\n") return 1 srcs = args[:-1] dst = args[-1] dst_fs, dst_path = self.open_fs(dst, writeable=True, create_dir=True) if dst_path is not None and dst_fs.isfile(dst_path): self.error('Destination must be a directory\n') return 1 if dst_path: dst_fs = dst_fs.makeopendir(dst_path) dst_path = None copy_fs_paths = [] progress = options.progress if progress: sys.stdout.write(self.progress_bar(len(srcs), 0, 'scanning...')) sys.stdout.flush() self.root_dirs = [] for i, fs_url in enumerate(srcs): src_fs, src_path = self.open_fs(fs_url) if src_path is None: src_path = '/' if iswildcard(src_path): for file_path in src_fs.listdir(wildcard=src_path, full=True): copy_fs_paths.append( (self.FILE, src_fs, file_path, file_path)) else: if src_fs.isdir(src_path): self.root_dirs.append((src_fs, src_path)) src_sub_fs = src_fs.opendir(src_path) for dir_path, file_paths in src_sub_fs.walk(): if dir_path not in ('', '/'): copy_fs_paths.append( (self.DIR, src_sub_fs, dir_path, dir_path)) sub_fs = src_sub_fs.opendir(dir_path) for file_path in file_paths: copy_fs_paths.append( (self.FILE, sub_fs, file_path, pathjoin(dir_path, file_path))) else: if src_fs.exists(src_path): copy_fs_paths.append( (self.FILE, src_fs, src_path, src_path)) else: self.error('%s is not a file or directory\n' % src_path) return 1 if progress: sys.stdout.write( self.progress_bar(len(srcs), i + 1, 'scanning...')) sys.stdout.flush() if progress: sys.stdout.write( self.progress_bar(len(copy_fs_paths), 0, self.get_verb())) sys.stdout.flush() if self.options.threads > 1: copy_fs_dirs = [r for r in copy_fs_paths if r[0] == self.DIR] copy_fs_paths = [r for r in copy_fs_paths if r[0] == self.FILE] for path_type, fs, path, dest_path in copy_fs_dirs: dst_fs.makedir(path, allow_recreate=True, recursive=True) self.lock = threading.RLock() self.total_files = len(copy_fs_paths) self.done_files = 0 file_queue = queue.Queue() threads = [ FileOpThread(self.get_action(), 'T%i' % i, dst_fs, file_queue, self.on_done, self.on_error) for i in xrange(options.threads) ] for thread in threads: thread.start() self.action_errors = [] complete = False try: enqueue = file_queue.put for resource in copy_fs_paths: enqueue(resource) while not file_queue.empty(): time.sleep(0) if self.any_error(): raise SystemExit # Can't use queue.join here, or KeyboardInterrupt will not be # caught until the queue is finished #file_queue.join() except KeyboardInterrupt: options.progress = False self.output("\nCancelling...\n") except SystemExit: options.progress = False finally: sys.stdout.flush() for thread in threads: thread.finish_event.set() for thread in threads: thread.join() complete = True if not self.any_error(): self.post_actions() dst_fs.close() if self.action_errors: for error in self.action_errors: self.error(self.wrap_error(unicode(error)) + '\n') sys.stdout.flush() else: if complete and options.progress: sys.stdout.write( self.progress_bar(self.total_files, self.done_files, '')) sys.stdout.write('\n') sys.stdout.flush()
def resolve_relative_path(self, path): document_dirname = dirname(self.path) new_path = pathjoin(document_dirname, path) return new_path
def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None, hide_dotfiles=False, dirs_first=False, files_wildcard=None, dirs_only=False): """Prints a filesystem listing to stdout (including sub directories). This mostly useful as a debugging aid. Be careful about printing a OSFS, or any other large filesystem. Without max_levels set, this function will traverse the entire directory tree. For example, the following will print a tree of the files under the current working directory:: >>> from fs.osfs import * >>> from fs.utils import * >>> fs = OSFS('.') >>> print_fs(fs) :param fs: A filesystem object :param path: Path of a directory to list (default "/") :param max_levels: Maximum levels of dirs to list (default 5), set to None for no maximum :param file_out: File object to write output to (defaults to sys.stdout) :param terminal_colors: If True, terminal color codes will be written, set to False for non-console output. The default (None) will select an appropriate setting for the platform. :param hide_dotfiles: if True, files or directories beginning with '.' will be removed """ if file_out is None: file_out = sys.stdout file_encoding = getattr(file_out, 'encoding', 'utf-8') or 'utf-8' file_encoding = file_encoding.upper() if terminal_colors is None: if sys.platform.startswith('win'): terminal_colors = False else: terminal_colors = hasattr(file_out, 'isatty') and file_out.isatty() def write(line): file_out.write(line.encode(file_encoding, 'replace') + b'\n') def wrap_prefix(prefix): if not terminal_colors: return prefix return '\x1b[32m%s\x1b[0m' % prefix def wrap_dirname(dirname): if not terminal_colors: return dirname return '\x1b[1;34m%s\x1b[0m' % dirname def wrap_error(msg): if not terminal_colors: return msg return '\x1b[31m%s\x1b[0m' % msg def wrap_filename(fname): if not terminal_colors: return fname if fname.startswith('.'): fname = '\x1b[33m%s\x1b[0m' % fname return fname dircount = [0] filecount = [0] def print_dir(fs, path, levels=[]): if file_encoding == 'UTF-8' and terminal_colors: char_vertline = u'│' char_newnode = u'├' char_line = u'──' char_corner = u'╰' else: char_vertline = '|' char_newnode = '|' char_line = '--' char_corner = '`' try: dirs = fs.listdir(path, dirs_only=True) if dirs_only: files = [] else: files = fs.listdir(path, files_only=True, wildcard=files_wildcard) dir_listing = ([(True, p) for p in dirs] + [(False, p) for p in files]) except Exception, e: prefix = ''.join([(char_vertline + ' ', ' ')[last] for last in levels]) + ' ' write( wrap_prefix(prefix[:-1] + ' ') + wrap_error("unable to retrieve directory list (%s) ..." % str(e))) return 0 if hide_dotfiles: dir_listing = [(isdir, p) for isdir, p in dir_listing if not p.startswith('.')] if dirs_first: dir_listing.sort(key=lambda (isdir, p): (not isdir, p.lower())) else: dir_listing.sort(key=lambda (isdir, p): p.lower()) for i, (is_dir, item) in enumerate(dir_listing): if is_dir: dircount[0] += 1 else: filecount[0] += 1 is_last_item = (i == len(dir_listing) - 1) prefix = ''.join([(char_vertline + ' ', ' ')[last] for last in levels]) if is_last_item: prefix += char_corner else: prefix += char_newnode if is_dir: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_dirname(item))) if max_levels is not None and len(levels) + 1 >= max_levels: pass #write(wrap_prefix(prefix[:-1] + ' ') + wrap_error('max recursion levels reached')) else: print_dir(fs, pathjoin(path, item), levels[:] + [is_last_item]) else: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_filename(item))) return len(dir_listing)
def print_fs(fs, path='/', max_levels=5, file_out=None, terminal_colors=None, hide_dotfiles=False, dirs_first=False, files_wildcard=None, dirs_only=False): """Prints a filesystem listing to stdout (including sub directories). This mostly useful as a debugging aid. Be careful about printing a OSFS, or any other large filesystem. Without max_levels set, this function will traverse the entire directory tree. For example, the following will print a tree of the files under the current working directory:: >>> from fs.osfs import * >>> from fs.utils import * >>> fs = OSFS('.') >>> print_fs(fs) :param fs: A filesystem object :param path: Path of a directory to list (default "/") :param max_levels: Maximum levels of dirs to list (default 5), set to None for no maximum :param file_out: File object to write output to (defaults to sys.stdout) :param terminal_colors: If True, terminal color codes will be written, set to False for non-console output. The default (None) will select an appropriate setting for the platform. :param hide_dotfiles: if True, files or directories beginning with '.' will be removed """ if file_out is None: file_out = sys.stdout file_encoding = getattr(file_out, 'encoding', 'utf-8') or 'utf-8' file_encoding = file_encoding.upper() if terminal_colors is None: if sys.platform.startswith('win'): terminal_colors = False else: terminal_colors = hasattr(file_out, 'isatty') and file_out.isatty() def write(line): file_out.write(line.encode(file_encoding, 'replace') + b'\n') def wrap_prefix(prefix): if not terminal_colors: return prefix return '\x1b[32m%s\x1b[0m' % prefix def wrap_dirname(dirname): if not terminal_colors: return dirname return '\x1b[1;34m%s\x1b[0m' % dirname def wrap_error(msg): if not terminal_colors: return msg return '\x1b[31m%s\x1b[0m' % msg def wrap_filename(fname): if not terminal_colors: return fname if fname.startswith('.'): fname = '\x1b[33m%s\x1b[0m' % fname return fname dircount = [0] filecount = [0] def print_dir(fs, path, levels=[]): if file_encoding == 'UTF-8' and terminal_colors: char_vertline = u'│' char_newnode = u'├' char_line = u'──' char_corner = u'╰' else: char_vertline = '|' char_newnode = '|' char_line = '--' char_corner = '`' try: dirs = fs.listdir(path, dirs_only=True) if dirs_only: files = [] else: files = fs.listdir(path, files_only=True, wildcard=files_wildcard) dir_listing = ( [(True, p) for p in dirs] + [(False, p) for p in files] ) except Exception, e: prefix = ''.join([(char_vertline + ' ', ' ')[last] for last in levels]) + ' ' write(wrap_prefix(prefix[:-1] + ' ') + wrap_error("unable to retrieve directory list (%s) ..." % str(e))) return 0 if hide_dotfiles: dir_listing = [(isdir, p) for isdir, p in dir_listing if not p.startswith('.')] if dirs_first: dir_listing.sort(key = lambda (isdir, p):(not isdir, p.lower())) else: dir_listing.sort(key = lambda (isdir, p):p.lower()) for i, (is_dir, item) in enumerate(dir_listing): if is_dir: dircount[0] += 1 else: filecount[0] += 1 is_last_item = (i == len(dir_listing) - 1) prefix = ''.join([(char_vertline + ' ', ' ')[last] for last in levels]) if is_last_item: prefix += char_corner else: prefix += char_newnode if is_dir: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_dirname(item))) if max_levels is not None and len(levels) + 1 >= max_levels: pass #write(wrap_prefix(prefix[:-1] + ' ') + wrap_error('max recursion levels reached')) else: print_dir(fs, pathjoin(path, item), levels[:] + [is_last_item]) else: write('%s %s' % (wrap_prefix(prefix + char_line), wrap_filename(item))) return len(dir_listing)
def run_install(self): args = self.args console = self.console installed = [] install_package = args.package install_select = package_select = self.call('package.select', package=install_package) install_notes = package_select['notes'] if package_select['version'] is None: raise CommandError("no install candidate for '{}', run 'moya-pm list' to see available packages".format(install_package)) package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = package_select['md5'] download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] libs = [] output_fs = fsopendir(args.output) force = args.force installed_libs = {} archive = None if not args.download: try: application = WSGIApplication(self.location, args.settings, disable_autoreload=True) archive = application.archive if archive is None: console.text('unable to load project, use the --force switch to force installation') return -1 except Exception as e: if not args.force: console.exception(e) console.text('unable to load project, use the --force switch to force installation') return -1 else: libs = [(lib.long_name, lib.version, lib.install_location) for lib in archive.libs.values() if lib.long_name == package_name] installed_libs = archive.libs.copy() if not force: for name, version, location in libs: if name == package_name: if version > install_version: if not args.force: raise CommandError("a newer version ({}) is already installed, use --force to force installation".format(version)) elif install_version == version: if not args.force: raise CommandError("version {} is already installed, use --force to force installation".format(version)) else: if not args.upgrade: raise CommandError("an older version ({}) is installed, use --upgrade to force upgrade".format(version)) force = True username = self.settings.get('upload', 'username', None) password = self.settings.get('upload', 'password', None) if username and password: auth = (username, password) else: auth = None install_app = args.app or package_name.split('.')[-1] packages = dependencies.gather_dependencies(self.rpc, install_app, args.mount, install_package, console, no_deps=args.no_deps) if not args.no_add: for package_name, (app_name, mount, package_select) in packages.items(): if package_select['version'] is None: raise CommandError("no install candidate for required package '{}', run 'moya-pm list {}' to see available packages".format(package_name, package_name)) download_temp_fs = TempFS() for package_name, (app_name, mount, package_select) in packages.items(): package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = "{}-{}.{}".format(package_name, install_version, package_select['md5']) download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] with download_temp_fs.open(filename, 'wb') as package_file: checksum = downloader.download(download_url, package_file, console=console, auth=auth, verify_ssl=False, msg="requesting {name}=={version}".format(**package_select)) if checksum != package_select['md5']: raise CommandError("md5 checksum of download doesn't match server! download={}, server={}".format(checksum, package_select['md5'])) if args.download: with fsopendir(args.download) as dest_fs: fs.utils.copyfile(download_temp_fs, filename, dest_fs, package_filename) if args.download: return 0 changed_server_xml = False for package_name, (app_name, mount, package_select) in packages.items(): package_name = package_select['name'] install_version = versioning.Version(package_select['version']) filename = "{}-{}.{}".format(package_name, install_version, package_select['md5']) download_url = package_select['download'] package_filename = download_url.rsplit('/', 1)[-1] install_location = relativefrom(self.location, pathjoin(self.location, args.output, package_select['name'])) package_select['location'] = install_location with download_temp_fs.open(filename, 'rb') as package_file: with ZipFS(package_file, 'r') as package_fs: with output_fs.makeopendir(package_select['name']) as lib_fs: #if not lib_fs.isdirempty('/') and not force: # raise CommandError("install directory is not empty, use --force to erase and overwrite") fs.utils.remove_all(lib_fs, '/') fs.utils.copydir(package_fs, lib_fs) installed.append((package_select, mount)) if not args.no_add and archive: server_xml = archive.cfg.get('project', 'startup') changed_server_xml =\ installer.install(project_path=self.location, server_xml_location=archive.cfg.get('project', 'location'), server_xml=server_xml, server_name=application.server_ref, lib_path=install_location, lib_name=package_name, app_name=app_name, mount=mount) table = [] for _package, mount in installed: table.append([Cell("{name}=={version}".format(**_package), fg="magenta", bold=True), Cell(_package['location'], fg="blue", bold=True), Cell(mount or '', fg="cyan", bold=True)]) if table: console.table(table, ['package', 'location', 'mount']) if install_notes: console.table([[install_notes]], ['{} v{} release notes'.format(install_select['name'], install_select['version'])]) if changed_server_xml: console.text("moya-pm modified '{}' -- please check changes".format(server_xml), fg="green", bold="yes")
def walk(self, path='/', wildcard=None, dir_wildcard=None, search='breadth', ignore_errors=False): # Note: copy/pasted this to avoid walking into hidden directories. path = normpath(path) def listdir(path, *args, **kwargs): try: return self.listdir(path, *args, **kwargs) except Exception: if ignore_errors: return [] else: raise if wildcard is None: wildcard = lambda f: True elif not callable(wildcard): wildcard_re = re.compile(fnmatch.translate(wildcard)) wildcard = lambda fn: bool(wildcard_re.match(fn)) if dir_wildcard is None: dir_wildcard = lambda f: True elif not callable(dir_wildcard): dir_wildcard_re = re.compile(fnmatch.translate(dir_wildcard)) dir_wildcard = lambda fn: bool(dir_wildcard_re.match(fn)) if search == 'breadth': dirs = [path] while dirs: current_path = dirs.pop() paths = [] try: for filename in listdir(current_path): path = pathjoin(current_path, filename) if self.isdir(path): if dir_wildcard(path) and self._should_show(path): dirs.append(path) else: if wildcard(filename) and self._should_show(path): paths.append(filename) except ResourceNotFoundError: # Could happen if another thread / process deletes something whilst we are walking pass yield (current_path, paths) elif search == 'depth': def recurse(recurse_path): try: for path in listdir(recurse_path, wildcard=dir_wildcard, full=True, dirs_only=True): for p in recurse(path): yield p except ResourceNotFoundError: # Could happen if another thread / process deletes something whilst we are walking pass filenames = listdir(recurse_path, wildcard=wildcard, files_only=True) filenames = [filename for filename in filenames if self._should_show(pathcombine(recurse_path, filename))] yield (recurse_path, filenames) for p in recurse(path): yield p else: raise ValueError("Search should be 'breadth' or 'depth'")