def generate_relative_mounts(pvc_param, files): """ Maps a list of files as mounts, relative to the base volume mount. For example, given the pvc mount: { 'name': 'my_pvc', 'mountPath': '/galaxy/database/jobs', 'subPath': 'data', 'readOnly': False } and files: ['/galaxy/database/jobs/01/input.txt', '/galaxy/database/jobs/01/working'] returns each file as a relative mount as follows: [ { 'name': 'my_pvc', 'mountPath': '/galaxy/database/jobs/01/input.txt', 'subPath': 'data/01/input.txt', 'readOnly': False }, { 'name': 'my_pvc', 'mountPath': '/galaxy/database/jobs/01/working', 'subPath': 'data/01/working', 'readOnly': False } ] :param pvc_param: the pvc claim dict :param files: a list of file or folder names :return: A list of volume mounts """ if not pvc_param: return param_claim = parse_pvc_param_line(pvc_param) claim_name = param_claim['name'] base_subpath = PurePath(param_claim.get('subPath', "")) base_mount = PurePath(param_claim["mountPath"]) read_only = param_claim["readOnly"] volume_mounts = [] for f in files: file_path = PurePath(str(f)) if base_mount not in file_path.parents: # force relative directory, needed for the job working directory in particular file_path = base_mount.joinpath( file_path.relative_to("/") if file_path.is_absolute( ) else file_path) relpath = file_path.relative_to(base_mount) subpath = base_subpath.joinpath(relpath) volume_mounts.append({ 'name': claim_name, 'mountPath': str(file_path), 'subPath': str(subpath), 'readOnly': read_only }) return volume_mounts
def fmt_path(cwd: PurePath, path: PurePath, is_dir: bool) -> str: posfix = sep if is_dir else "" with suppress(ValueError): rel = path.relative_to(cwd) return f"{curdir}{sep}{normcase(rel)}{posfix}" with suppress(ValueError): rel = path.relative_to(_HOME) return f"~{sep}{normcase(rel)}{posfix}" return f"{normcase(path)}{posfix}"
def test_path( path_ref: pathlib.PurePath, force: bool = True, haz_bin: str = "HAZ", root_ref: str = "", root_test: str = "", rtol: float = 1e-3, ) -> bool: print(path_ref) path_test = pathlib.Path(root_test, path_ref.relative_to(root_ref)) if not path_test.exists() or force: try: shutil.rmtree(path_test) # Wait for my slow computer :-/ time.sleep(1) except FileNotFoundError: pass # Copy files over shutil.copytree(path_ref.joinpath("Input"), path_test) # Run HAZ and track the duration start = datetime.datetime.now() run_haz(path_test, haz_bin) time_diff = datetime.datetime.now() - start print("Calculation time: {} {}".format(path_ref.relative_to(root_ref), time_diff)) ok = True for fpath_test in path_test.iterdir(): ext = fpath_test.suffix fpath_ref = path_ref.joinpath("Output", fpath_test.name) if not fpath_ref.exists(): continue if ext == ".out3": expected = io_tools.read_out3(str(fpath_ref)) actual = io_tools.read_out3(str(fpath_test)) elif ext == ".out4": expected = io_tools.read_out4(str(fpath_ref)) actual = io_tools.read_out4(str(fpath_test)) else: continue # Check for errors errors = check_value(actual, expected, rtol, atol=1e-08) ok &= not errors if errors: print("Errors in: %s" % fpath_test) print_errors("%s: " % fpath_test, errors) return ok
def packs(exclude=[]): import os import glob import re from pathlib import PurePath current_dir = os.path.join(os.path.dirname(__file__), "phanterpwa") exclude = exclude packs = [] for x in glob.glob(os.path.join(current_dir, "**"), recursive=True): if os.path.isdir(x) and not os.path.basename(x).startswith( "_") and "." not in x: p = PurePath(x) p = p.relative_to(current_dir) result = os.path.join(os.path.basename(current_dir), *p.parts) if exclude: not_passed = [] for e in exclude: if callable(e): if e(x): not_passed.append(True) elif isinstance(e, str): if any(e == x or os.path.basename(x) == e or re.search(e, x)): not_passed.append(True) if not not_passed: packs.append(result.replace("\\", "/")) else: packs.append(result.replace("\\", "/")) return packs
def outputs(storage: Storage) -> dict: """ Get deployment outputs. :raises ParseError: :raises DataError: """ if storage.exists("inputs"): inputs = storage.read_json("inputs") else: inputs = {} if storage.exists("root_file"): service_template_path = PurePath(storage.read("root_file")) if storage.exists("csars"): csar_dir = Path(storage.path) / "csars" / "csar" ast = tosca.load(Path(csar_dir), service_template_path.relative_to(csar_dir)) else: ast = tosca.load(Path(service_template_path.parent), PurePath(service_template_path.name)) template = ast.get_template(inputs) # We need to instantiate the template in order # to get access to the instance state. template.instantiate(storage) result: Dict = template.get_outputs() return result else: print("There is no root_file in storage.") return {}
def setup_config(self, path: str) -> None: if not self.course.extended: self.conffile: Optional[str] = None return self.conffile = path conf = EvalConf() conf.add(self.course.evalconf) conf["qdir"] = self.course.qdir conf["question_path"] = self.question qdir_p = PurePath(self.course.qdir) if self.question is not None: question_p = PurePath(self.question) else: question_p = qdir_p question_rel = question_p.relative_to(qdir_p).parts # note we are skipping the last element (file name) for i in range(len(question_rel)): conf_p = Path(qdir_p.joinpath(*question_rel[:i], "eval.conf")) if conf_p.exists(): conf.load(str(conf_p)) if self.question is not None: conf.from_source_file(self.question) conf.dump(self.conffile)
def path_to_manifest_name(root_dir: str, path: PurePath) -> str: """ Helper function: given the name of a file (or a Path to that file), return the name as it would appear in MANIFEST.json. """ # If we can figure out the path "relative to" the root_dir, that means we # can find just the names of the subdirectories. This will raise an exception # if it can't figure out the answer, thus the try block. # If there's no relationship between the root_dir and the path, then we # can't really do anything useful with the root_dir at all, so we'll # ignore it and instead do something totally hacky and just strip off # the leading element of the path (which is probably the name of the # tally directory) and use the rest. # Python 3.9 adds a new "is_relative_to" method that would let us avoid # the try block, but we're coding for 3.8.x, so we can't count on that. try: elems = list(path.relative_to(root_dir).parts) return compose_manifest_name(elems[-1], elems[0:-1]) except ValueError: elems = list(path.parts) return compose_manifest_name(elems[-1], elems[1:-1])
def create_instructions_to_remove_redundant_files_in_sync_folder(self): """ Remove files in the sync folder that are not in info_map sync folder is scanned and list of files is created - the list has both the full path to file and partial path as it appears in the info_map db. The list is processed against the db which returns the indexes of the redundant files. The full path versions of the indexed files is used to create remove instructions """ self.instlObj.progress("removing redundant files from sync folder") pure_local_sync_dir = PurePath(self.local_sync_dir) files_to_check = list() for root, dirs, files in os.walk(self.local_sync_dir, followlinks=False): try: dirs.remove("bookkeeping") except Exception: pass # todo: use FOLDER_EXCLUDE_REGEX try: files.remove(".DS_Store") except Exception: pass # todo: use FILE_EXCLUDE_REGEX for disk_item in files: item_full_path = PurePath(root, disk_item) item_partial_path = item_full_path.relative_to( pure_local_sync_dir).as_posix() files_to_check.append(item_partial_path) files_to_check.sort() redundant_files = self.instlObj.info_map_table.get_files_that_should_be_removed_from_sync_folder( files_to_check) rm_commands = AnonymousAccum() for f in redundant_files: item_full_path = pure_local_sync_dir.joinpath(f) #log.info(f"remove redundant {item_full_path}") rm_commands += RmFile(f) return rm_commands
def get_base_filesystem_and_path(*path_parts: str, **kwargs) -> Tuple[fs.base.FS, str]: """Parses multiple strings that define a filesystem path and returns a filesystem object with a relative path on the filesystem. :param path_parts: One or more strings defining a filesystem path :param kwargs: Parameters passed to get_filesystem function :return: A filesystem object and a relative path """ path_parts = tuple( str(part).rstrip("/") for part in path_parts if part is not None) base_path = path_parts[0] if "://" in base_path: base_path_parts = base_path.split("/", 3) filesystem_path = "/".join(base_path_parts[:-1]) relative_path = "/".join([base_path_parts[-1], *path_parts[1:]]) return get_filesystem(filesystem_path, **kwargs), relative_path entire_path = os.path.abspath(os.path.join(*path_parts)) pure_path = PurePath(entire_path) posix_path = pure_path.relative_to(pure_path.anchor).as_posix() filesystem_path = base_path.split("\\")[0] if "\\" in base_path else "/" return get_filesystem(filesystem_path, **kwargs), posix_path
def test_case_relative_to_project(self, path: PurePath) -> PurePath: if path == self._project_root: test_case_relative_to_project = self._project_root else: test_case_relative_to_project = self._project_root / path.relative_to( self._project_root) return test_case_relative_to_project
def undeploy(storage: Storage, verbose_mode: bool, num_workers: int): """ Undeploy a deployment. :raises ParseError: :raises DataError: """ if storage.exists("inputs"): inputs = storage.read_json("inputs") else: inputs = {} if storage.exists("root_file"): service_template_path = PurePath(storage.read("root_file")) workdir = Path(service_template_path.parent) if storage.exists("csars"): csar_dir = Path(storage.path) / "csars" / "csar" workdir = csar_dir ast = tosca.load(workdir, service_template_path.relative_to(csar_dir)) else: ast = tosca.load(workdir, PurePath(service_template_path.name)) template = ast.get_template(inputs) topology = template.instantiate(storage) topology.undeploy(verbose_mode, workdir, num_workers) else: print("There is no root_file in storage.")
def on_created(self, event): if not event.is_directory: logging.debug("TaskHandlerAdapterEventHandler : on_created(), %s", event) pure_path = PurePath(event.src_path) relative = pure_path.relative_to(self.root_path) self.handler.handle_file_event('/' + str(relative))
def load(pathlike, relative_to=""): """ Loads a basic doc dictionary from a file path. This dictionary contains content string, and some basic information about the file. Typically, you decorate the doc later with meta and other fields. Create a doc dict, populating it with sensible defaults Returns a dictionary. """ # TODO need to grab date from meta file_created, file_modified = read_file_times(pathlike) with open(str(pathlike)) as f: meta, content = frontmatter.parse(f.read()) input_path = PurePath(pathlike) id_path = input_path.relative_to(relative_to) output_path = pathtools.to_nice_path(id_path) section = pathtools.tld(id_path) title = meta.get("title", pathtools.to_title(input_path)) created = meta.get("created", file_created) modified = meta.get("modified", file_modified) return doc(id_path=id_path, output_path=output_path, input_path=input_path, created=created, modified=modified, title=title, section=section, meta=meta, content=content)
def album_artist_from_path(filename, album, artist): """If album is not set, try to extract album and artist from path. Args: filename: The full file path album: Default album name artist: Default artist name Returns: A tuple (album, artist) """ if not album: path = PurePath(filename) dirs = list(path.relative_to(path.anchor).parent.parts) # Strip disc subdirectory from list if dirs and re.search(r'\b(?:CD|DVD|Disc)\s*\d+\b', dirs[-1], re.I): del dirs[-1] if dirs: # For clustering assume %artist%/%album%/file or %artist% - %album%/file album = dirs[-1] if ' - ' in album: new_artist, album = album.split(' - ', 1) if not artist: artist = new_artist elif not artist and len(dirs) >= 2: artist = dirs[-2] return album, artist
def _target(file_path, relative_to, tgt_path, ext_src, ext_tgt): p = PurePath(file_path) p = p.relative_to(relative_to) target_file = join(tgt_path, *p.parts) if ext_tgt is not None: target_file = "".join([target_file[0:-len(ext_src)], ext_tgt]) return [file_path, target_file]
def compile_sw(self, app): appConfig = self.config build_folder = appConfig.get('FRONTEND')[app]['build_folder'] files = self.get_files_dir(build_folder, ignore_files=["__init__.py"]) str_files = "" cont = 0 for x in files: if x.startswith(join(build_folder, "static")): p = PurePath(x) p = p.relative_to(build_folder) l = [*p.parts] y = "/".join(l) if not cont: str_files = "'/{0}'".format(y) else: str_files = ",".join( [str_files, "\n '/{0}'".format(y)]) cont = 1 sw = interpolate(ServiceWorker, { "files": str(str_files), "versioning": self.versioning }) with open(join(build_folder, 'sw.js'), "w") as o: o.write(sw)
def get_base_filesystem_and_path(*path_parts, **kwargs): """ Parses multiple strings that define a filesystem path and returns a filesystem object with a relative path on the filesystem :param path_parts: One or more strings defining a filesystem path :type path_parts: str :param kwargs: Parameters passed to get_filesystem function :return: A filesystem object and a relative path :rtype: (fs.FS, str) """ path_parts = [str(part) for part in path_parts if part is not None] base_path = path_parts[0] if '://' in base_path: base_path_parts = base_path.split('/', 3) filesystem_path = '/'.join(base_path_parts[:-1]) relative_path = '/'.join([base_path_parts[-1], *path_parts[1:]]) return get_filesystem(filesystem_path, **kwargs), relative_path entire_path = os.path.abspath(os.path.join(*path_parts)) pure_path = PurePath(entire_path) posix_path = pure_path.relative_to(pure_path.anchor).as_posix() return get_filesystem('/', **kwargs), posix_path
def _make_relative(self, prefix, subdir): prefix = PurePath(prefix) subdir = PurePath(subdir) try: return subdir.relative_to(prefix).as_posix() except ValueError: return subdir.as_posix()
def deploy_service_template(service_template_path: PurePath, inputs: typing.Optional[dict], storage: Storage, verbose_mode: bool, num_workers: int, delete_existing_state: bool): if delete_existing_state: storage.remove("instances") if inputs is None: if storage.exists("inputs"): inputs = yaml.safe_load(storage.read("inputs")) else: inputs = {} storage.write_json(inputs, "inputs") storage.write(str(service_template_path), "root_file") # set workdir and check if service template/CSAR has been initialized workdir = Path(service_template_path.parent) if storage.exists("csars"): csar_dir = Path(storage.path) / "csars" / "csar" workdir = csar_dir ast = tosca.load(workdir, service_template_path.relative_to(csar_dir)) else: ast = tosca.load(workdir, PurePath(service_template_path.name)) # initialize service template and deploy template = ast.get_template(inputs) topology = template.instantiate(storage) topology.deploy(verbose_mode, workdir, num_workers)
def process(self, cursor, include_patterns): try: cwd = Path.cwd() filepath = PurePath(cursor.location.file.name) if filepath.is_relative_to(cwd): filepath = filepath.relative_to(cwd) filepath = str(filepath) if not any( pattern.search(filepath) for pattern in include_patterns): return if not self.skip_defines and filepath not in self.parsed_headers: self.mark_macros(filepath) self.parsed_headers.add(filepath) except AttributeError: return if cursor.kind == clang.CursorKind.VAR_DECL: new_definition = Variable(cursor) self.defs.append(new_definition) if cursor.kind in (clang.CursorKind.TYPEDEF_DECL, clang.CursorKind.ENUM_DECL, clang.CursorKind.STRUCT_DECL, clang.CursorKind.UNION_DECL): self.process_type(cursor.type) elif cursor.kind == clang.CursorKind.FUNCTION_DECL: self.defs.append(Function(cursor))
def get(self, user_context: UserContext) -> UserContext: buffer_name = user_context.get('buffer_name', 'default') context = self._internal_options() context.update(self._vim.call('denite#init#_user_options')) context['custom'] = self._vim.call('denite#custom#_get') option = context['custom']['option'] if '_' in option: context.update(option['_']) if buffer_name in option: context.update(option[buffer_name]) context.update(user_context) if context['command'] == 'DeniteCursorWord': context['input'] = self._vim.call( 'denite#util#escape_match', self._vim.call('expand', '<cword>')) elif context['command'] == 'DeniteBufferDir': context['path'] = self._vim.call('expand', '%:p:h') elif context['command'] == 'DeniteProjectDir': context['path'] = self._vim.call( 'denite#project#path2project_directory', context['path'], context['root_markers']) context['filter_winrow'] = int(context['winrow']) context['max_candidate_width'] = int(context['max_candidate_width']) context['prev_winid'] = int(context['prev_winid']) context['preview_height'] = int(context['preview_height']) context['preview_width'] = int(context['preview_width']) context['wincol'] = int(context['wincol']) context['winheight'] = int(context['winheight']) context['winminheight'] = int(context['winminheight']) context['winrow'] = int(context['winrow']) context['winwidth'] = int(context['winwidth']) if (context['split'] in [ 'floating', 'floating_relative_cursor', 'floating_absolute_window' ] and not self._vim.call('exists', '*nvim_open_win')): context['split'] = 'no' if (context['filter_split_direction'] == 'floating' and not self._vim.call('exists', '*nvim_open_win')): context['filter_split_direction'] = 'botright' # Add buffer name to context bufname = PurePath(self._vim.current.buffer.name) try: context['bufname'] = str(bufname.relative_to(context['path'])) except ValueError: context['bufname'] = bufname.name # For compatibility for [old_option, new_option] in [ x for x in self._vim.call( 'denite#init#_deprecated_options').items() if x[0] in context and x[1] ]: context[new_option] = context[old_option] return context
def target_static_file_by_source(self, src, app) -> str: relative_to = self.path_statics_folder(app) target_path = join(self.path_build_statics_folder(app), self.versioning) p = PurePath(src) p = p.relative_to(relative_to) target_file = join(target_path, *p.parts) return target_file
def target_template_file_by_source(self, src, app) -> str: relative_to = self.path_templates_folder(app) target_path = self.path_build_templates_folder(app) p = PurePath(src) p = p.relative_to(relative_to) target_file = join(target_path, *p.parts) target_file = "".join([target_file[0:-3], ".html"]) return target_file
def _make_relative(self, prefix, subdir): prefix = PurePath(prefix) subdir = PurePath(subdir) try: libdir = subdir.relative_to(prefix) except ValueError: libdir = subdir # pathlib joining makes sure absolute libdir is not appended to '${prefix}' return ('${prefix}' / libdir).as_posix()
def get_ld_section(self): replace_ext = options.get("ld_o_replace_extension", True) sect_name = self.ld_name_override if self.ld_name_override else self.get_ld_section_name() vram_or_rom = self.rom_start if self.vram_start == 0 else self.vram_start subalign_str = f"SUBALIGN({self.subalign})" s = ( f"SPLAT_BEGIN_SEG({sect_name}, 0x{self.rom_start:X}, 0x{vram_or_rom:X}, {subalign_str})\n" ) i = 0 do_next = False for subdir, path, obj_type, start in self.get_ld_files(): # Manual linker segment creation if obj_type == "linker": s += ( "}\n" f"SPLAT_BEGIN_SEG({path}, 0x{start:X}, 0x{self.rom_to_ram(start):X}, {subalign_str})\n" ) # Create new sections for non-0x10 alignment (hack) if start % 0x10 != 0 and i != 0 or do_next: tmp_sect_name = path.replace(".", "_") tmp_sect_name = tmp_sect_name.replace("/", "_") s += ( "}\n" f"SPLAT_BEGIN_SEG({tmp_sect_name}, 0x{start:X}, 0x{self.rom_to_ram(start):X}, {subalign_str})\n" ) do_next = False if start % 0x10 != 0 and i != 0: do_next = True path_cname = re.sub(r"[^0-9a-zA-Z_]", "_", path) s += f" {path_cname} = .;\n" if subdir == options.get("assets_dir"): path = PurePath(path) else: path = PurePath(subdir) / PurePath(path) # Remove leading ..s while path.parts[0] == "..": path = path.relative_to("..") path = path.with_suffix(".o" if replace_ext else path.suffix + ".o") if obj_type != "linker": s += f" BUILD_DIR/{path}({obj_type});\n" i += 1 s += ( f"SPLAT_END_SEG({sect_name}, 0x{self.rom_end:X})\n" ) return s
def app_name_from_relative_child(project_path, child_path): """Returns the application's folder name via a child folder :param project_path: Project path :child_path: Child folder of the project folder """ apps_list_basedir = os.path.join(project_path, "frontapps") p = PurePath(child_path) r = p.relative_to(os.path.join(apps_list_basedir)) return r.parts[0]
def iter_launchers(self) -> Iterator[ROMLauncher]: file_list = [] #rom_list: list[tuple[ROM, Sequence[str]]] = [] for rom_dir in self.platform_config.paths: if not rom_dir.is_dir(): print('Oh no', self.name, 'has invalid ROM dir', rom_dir) continue #used_m3u_filenames = [] for root, dirs, files in os.walk(rom_dir): root_path = PurePath(root) if any(root_path.is_relative_to(ignored_directory) for ignored_directory in main_config.ignored_directories): continue subfolders = root_path.relative_to(rom_dir).parts if subfolders: if any(subfolder in main_config.skipped_subfolder_names for subfolder in subfolders): continue folder_check = self.platform.folder_check if folder_check: remaining_subdirs = [] #The subdirectories of rom_dir that aren't folder ROMs for d in dirs: folder_path = Path(root, d) if not main_config.full_rescan: if has_been_done('ROM', str(folder_path)): continue folder_rom = FolderROM(folder_path) media_type = folder_check(folder_rom) if media_type: folder_rom.media_type = media_type #rom_list.append((folder_rom, subfolders)) launcher = self._process_rom(folder_rom, subfolders) if launcher: yield launcher #file_list.append((folder_path, subfolders)) #Avoid descending further, even if we get a NotARomException #This will not work well if we have multiple emulators for these folder-having systems and one supports folders and one doesn't, but eh, worry about that later I think continue remaining_subdirs.append(d) dirs[:] = remaining_subdirs dirs.sort() for name in sorted(files): path = Path(root, name) #TODO: We might actually want to do something with associated documents later, but for now, we know we aren't doing anything with them if (not self.platform.is_valid_file_type(path.suffix[1:].lower())) and path.suffix[1:].lower() in {'txt', 'md', 'jpg', 'nfo', 'gif', 'bmp'}: continue if not main_config.full_rescan: if has_been_done('ROM', str(path)): continue file_list.append((path, subfolders)) yield from self._process_file_list(file_list)
def relativize_filename(filename, prefix): """ Extract the relative path of the source code file. :param filename: absolute path included by Lizard tool, e.g. '/abs_path/rel_path/d1.c' :param prefix: the prefix path to remove to make the path relative :return: filename relative to the temporal source directory, e.g. 'rel_path/d1.c' """ p = PurePath(filename) return str(p.relative_to(prefix))
def get_relative_path(): ''' The provided argument must have the same root (whether relative or absolute) as the PurePath object - Example: - Original PurePath: <old>/<path>/<components>/<foo> - Argument: <old>/<path> - New, shorter Purepath: <components>/<foo> ''' relative = PurePath('austinchang/tutorials/python/language/python_37/popular_modules/pathlib_/purepath/methods.r2d2.py') #print(relative.relative_to('python_37')) # ValueError print(relative.relative_to('austinchang/tutorials/python/language/python_37')) # popular_modules/pathlib_/purepath/methods.r2d2.py
def handle_import(self, name, compilation, rule): """ Re-implementation of the core Sass import mechanism, which looks for files using the staticfiles storage and staticfiles finders. """ original_path = PurePath(name) if original_path.suffix: search_exts = [original_path.suffix] else: search_exts = compilation.compiler.dynamic_extensions if original_path.is_absolute(): # Remove the beginning slash search_path = original_path.relative_to('/').parent elif rule.source_file.origin: search_path = rule.source_file.origin if original_path.parent: search_path = search_path / original_path.parent else: search_path = original_path.parent basename = original_path.stem for prefix, suffix in product(('_', ''), search_exts): filename = PurePath(prefix + basename + suffix) full_filename, storage = get_file_and_storage(str(search_path / filename)) if full_filename: with storage.open(full_filename) as f: return SourceFile.from_file(f, origin=search_path, relpath=filename)
def compile(self, *paths): compilation = self.make_compilation() for path in paths: path = PurePath(path) if path.is_absolute(): path = path.relative_to('/') filename, storage = get_file_and_storage(str(path)) with storage.open(filename) as f: source = SourceFile.from_file(f, origin=path.parent, relpath=PurePath(path.name)) compilation.add_source(source) return self.call_and_catch_errors(compilation.run)
def remap_url(src, remap): """ If src is in a subdirectory of any of the keys in `remap`, rewrite the path to point to a local file relative to the corresponding value. """ path = PurePath(src) for url_path, file_path in remap.items(): print(url_path, file_path) try: rel_path = path.relative_to(url_path) return PurePath(file_path).joinpath(rel_path) except ValueError: continue
def _make_zip(file, workdir, dirprefix): with zipfile.ZipFile(file, 'w', compression=zipfile.ZIP_DEFLATED) as zip: for root, dirs, files in os.walk(workdir): relroot = PurePath(root).relative_to(workdir) dirs[:] = [p for p in dirs if str(relroot / p) not in _IGNORED_DIRS] for f in files: abspath = PurePath(root) / f relpath = abspath.relative_to(workdir) if str(relpath) in _IGNORED_FILES: continue # Python 3.5 zipfile does not support pathlib zip.write(str(abspath), str(dirprefix / relpath))
def get_absolute_path(self, path, with_base=False): path = PurePath(path) try: new_path = None if path.is_absolute(): new_path = self.base_path / path.relative_to('/') else: new_path = self.base_path / self.cwd.relative_to('/') / path if new_path.exists(): new_path = new_path.resolve() except ValueError: new_path = None if new_path: if with_base: return str(new_path) else: return str(PurePath('/') / new_path.relative_to(self.base_path)) return None
class ShellManager(GObject.GObject): """ Manages the available commands for the shell """ def __init__(self, shellname="zsh", user="", host=""): GObject.GObject.__init__(self) self.commands = {} self.shellname = shellname self.user = user self.host = host self.cwd = PurePath("/") self.base_path = Path(os.path.join(USER_PATH, self.host)) if not os.path.isdir(str(self.base_path)): self.base_path.mkdir(parents=True) self.create_initial_state() def create_initial_state(self, remove_existing_content=False): if remove_existing_content: shutil.rmtree(str(self.base_path)) self.base_path.mkdir(parents=True) initial_state_file = os.path.join(DATA_PATH, "{}.zip".format(self.host)) if os.path.isfile(initial_state_file): with zipfile.ZipFile(initial_state_file) as z: z.extractall(str(self.base_path)) @property def prompt(self): return "{}@{}{}> ".format(self.user, self.host, str(self.cwd)) def add_command(self, command): self.commands[command.program_name] = command def find_command(self, command_string): parts = command_string.split() if not parts[0] in self.commands: return None else: return self.commands[parts[0]] def get_absolute_path(self, path, with_base=False): path = PurePath(path) try: new_path = None if path.is_absolute(): new_path = self.base_path / path.relative_to('/') else: new_path = self.base_path / self.cwd.relative_to('/') / path if new_path.exists(): new_path = new_path.resolve() except ValueError: new_path = None if new_path: if with_base: return str(new_path) else: return str(PurePath('/') / new_path.relative_to(self.base_path)) return None def change_directory(self, path): absolute_path = Path(self.get_absolute_path(path, True)) if absolute_path.is_dir(): self.cwd = PurePath('/') / absolute_path.relative_to(self.base_path) return True return False def get_files(self, path): path = PurePath(path) try: actual_path = self.base_path / self.cwd.relative_to('/') / path actual_path = actual_path.resolve() actual_path.relative_to(self.base_path) except ValueError: return [] else: return map(str, [ child.relative_to(self.base_path / actual_path) for child in actual_path.glob('*') ])
def relative_to_sds_root(self, file_in_sub_dir: pathlib.PurePath) -> pathlib.PurePath: return file_in_sub_dir.relative_to(self.root_dir)