def find_tool(name, additional_paths = [], path_last = False): """ Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'. If found in path, returns 'name'. If found in additional paths, returns full name. If the tool is found in several directories, returns the first path found. Otherwise, returns the empty string. If 'path_last' is specified, path is checked after 'additional_paths'. """ assert(isinstance(name, str)) assert(isinstance(additional_paths, list)) assert(isinstance(path_last, bool)) programs = path.programs_path() match = path.glob(programs, [name, name + '.exe']) additional_match = path.glob(additional_paths, [name, name + '.exe']) result = [] if path_last: result = additional_match if not result and match: result = match else: if match: result = match elif additional_match: result = additional_match if result: return path.native(result[0]) else: return ''
def find_tool(name, additional_paths=[], path_last=False): """ Attempts to find tool (binary) named 'name' in PATH and in 'additional-paths'. If found in path, returns 'name'. If found in additional paths, returns full name. If the tool is found in several directories, returns the first path found. Otherwise, returns the empty string. If 'path_last' is specified, path is checked after 'additional_paths'. """ assert (isinstance(name, str)) assert (isinstance(additional_paths, list)) assert (isinstance(path_last, bool)) programs = path.programs_path() match = path.glob(programs, [name, name + '.exe']) additional_match = path.glob(additional_paths, [name, name + '.exe']) result = [] if path_last: result = additional_match if not result and match: result = match else: if match: result = match elif additional_match: result = additional_match if result: return path.native(result[0]) else: return ''
def count_lines_and_files(paths_lines=None, paths_files=None, line_glob=None, file_glob=None) -> dict: """Counts lines and files in the given paths.""" result = {} for path in arg_to_iter(paths_lines): path = Path(path).resolve() if path.is_dir(): files = path.glob(line_glob) if line_glob else path.iterdir() elif path.is_file(): files = (path, ) else: files = () for file in files: LOGGER.info("Counting lines in <%s>...", file) name = os.path.splitext(file.name)[0] result[f"lc_{name}"] = count_lines(file) for path in arg_to_iter(paths_files): path = Path(path).resolve() if not path.is_dir(): continue for subdir in path.glob("**"): LOGGER.info("Counting files in <%s>...", subdir) if path == subdir: name = path.name else: relative = subdir.relative_to(path) name = "_".join(relative.parts) result[f"fc_{name}"] = count_files(subdir, glob=file_glob) return result
def handle(data_path, sess): path = pathlib.Path(data_path) files = list(path.glob('*.jpg')) + list(path.glob('*.png')) + list( path.glob('*.tif')) + list(path.glob('*.tiff')) + list( path.glob('*.mha')) x = [np.array(imageio.imread(str(fn))) for fn in files] with sess.as_default(): mean, std = get_inception_score(x) return [mean, std]
def _handle_path(path): if path.endswith('.npz'): f = np.load(path) m, s = f['mu'][:], f['sigma'][:] f.close() else: path = pathlib.Path(path) files = list(path.glob('*.jpg')) + list(path.glob('*.png')) x = [imread(str(fn)).astype(np.float32) for fn in files] return x
def _handle_path(path, sess): if path.endswith('.npz'): f = np.load(path) m, s = f['mu'][:], f['sigma'][:] f.close() else: path = pathlib.Path(path) files = list(path.glob('*.jpg')) + list(path.glob('*.png')) x = np.array([imread(str(fn)).astype(np.float32) for fn in files]) #m, s = calculate_activation_statistics(x, sess, batch_size=args.batch_size) m, s = calculate_activation_statistics(x, sess, batch_size=32) del x return m, s
def _compute_statistics_of_path(path, model, batch_size, dims, cuda): if path.endswith('.npz'): f = np.load(path) m, s = f['mu'][:], f['sigma'][:] f.close() act = m else: path = pathlib.Path(path) files = list(path.glob('*.jpg')) + list(path.glob('*.png')) act, m, s = calculate_activation_statistics(files, model, batch_size, dims, cuda) return act,m, s
def _extract_sinergise_fields(path: Path, p: DatasetPrepare) -> Iterable[Path]: """Extract Sinergise metadata and return list of image offsets""" product_info_path = path / "productInfo.json" metadata_xml_path = path / "metadata.xml" if not product_info_path.exists(): raise ValueError( "No productInfo.json file found. " "Are you sure the input is a sinergise dataset folder?") # Tile/Granule metadata p.properties.update(process_tile_metadata(metadata_xml_path.read_text())) p.note_accessory_file("metadata:s2_tile", metadata_xml_path) # Whole-product metadata for prop, value in process_sinergise_product_info( product_info_path).items(): # We don't want to override properties that came from the (more-specific) tile metadata. if prop not in p.properties: p.properties[prop] = value p.note_accessory_file("metadata:sinergise_product_info", product_info_path) # TODO: sinergise folders could `process_datastrip_metadata()` in an outer directory? return list(path.glob("*.jp2"))
def Write(self): path = pathlib.Path(self.__args.output_dir) for ext in ['', '.md', '.txt', '.MD', '.TXT']: for filepath in path.glob('*' + ext): if not filepath.is_file(): continue if 'ReadMe'.lower() == filepath.name.split('.')[0].lower(): print('ReadMeファイルが既存のため作成は中止します。: {}'.format(filepath)) return """ path = os.path.join(self.__args.output_dir, self.__filename) if os.path.isfile(path): print('ReadMe.mdが既存のため作成は中止します。') return """ self.__LoadConfig() tpl_var_dict = {} tpl_var_dict['Description'] = self.__args.description tpl_var_dict['Environment'] = self.__GetEnvironmentText() tpl_var_dict['License'] = self.__GetLicenseText() tpl_var_dict['UseLibLicense'] = self.__GetUseLibLicenseText() path_tpl = os.path.join( pathlib.Path(__file__).parent.parent / ('res/template/' + self.__filename)) source = None with open(path_tpl) as f: source = f.read() res = TemplateRenderer().Render(source, **tpl_var_dict) path_out = os.path.join(self.__args.output_dir, self.__filename) with open(path_out, 'w') as f: f.write(res)
def genFileTree(widget, pathobj, expandAbovePathName=None): """ Construct the file tree :param widget: Initial object is root TreeWidget :param pathobj: Root directory that contains files that show up in the file tree :param expandAbovePathName: Specifies path of a new file so directories can be expanded to reveal the file :return: """ childrange = range(widget.childCount()) for path in pathobj.iterdir(): if str(path) in [widget.child(p).path for p in childrange ]: #check if tree item already exists for childind in childrange: if widget.child(childind).path == str(path): if path.is_dir(): genFileTree(widget.child(childind), path, expandAbovePathName) else: #otherwise make a new tree item. if path.parts[-1].split('.')[-1] == 'py': child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) child.isdir = False widget.addChild(child) if not expandAbovePathName is None and path == Path( expandAbovePathName ): # expand directories containing a new file expandAboveChild(widget) elif path.is_dir() and len(list(path.glob('**/*.py'))): child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) widget.addChild(child) genFileTree(child, path, expandAbovePathName) widget.sortChildren(0, 0)
def load_scripts(path): if not isinstance(path, pathlib.Path): path = str(path) path = os.path.expanduser(path) path = pathlib.Path(path) if path.is_dir(): l = path.glob('*.py') # sort b.50.py before a.80.py l = sorted(l, key=lambda p: [p.suffixes[-2][1:] if len(p.suffixes)>1 else "",p]) for p in l: load_scripts(p) elif path.is_file(): with path.open("rt") as f: c = f.read() c = compile(c, str(path), "exec") try: env.Env['__FILE__'] = str(path.absolute()) exec (c, env.Env.dict) del env.Env['__FILE__'] except NotImplementedError: # Script wanted to be ignored pass SCRIPTS.append(path) else: raise Exception("neither file nor dir in load_Scripts", path)
def _glob_resources( resource_path: pathlib.Path, subdir: str, ext: str, ) -> Iterable[str]: """Find resources with the given extension. Yields a resource name like "html/log.html" (as string). """ assert '*' not in ext, ext assert ext.startswith('.'), ext path = resource_path / subdir if isinstance(resource_path, pathlib.Path): for full_path in path.glob(f'*{ext}'): # . is contained in ext yield full_path.relative_to(resource_path).as_posix() else: # zipfile.Path or importlib_resources compat object # Unfortunately, we can't tell mypy about resource_path being of type # Union[pathlib.Path, zipfile.Path] because we set "python_version = 3.6" in # .mypy.ini, but the zipfiel stubs (correctly) only declare zipfile.Path with # Python 3.8... assert path.is_dir(), path # type: ignore[unreachable] for subpath in path.iterdir(): if subpath.name.endswith(ext): yield posixpath.join(subdir, subpath.name)
def match_directory(path, move_files=False, recursive=True): # type: (Path, bool, bool) -> None """ Scans directory `path` for files known to QBittorrent and either adjusts the fastresume files to have the correct path, or moves the files to the path specified in the fastresume files. """ qb = QBittorrentMeta() if recursive: it = path.rglob("*") else: it = path.glob("*") if move_files: for p in it: try: if qb.move_single_file(p): logger.info("Moved file %s", p) else: logger.info("File %s already in destination", p) except NotFound: logger.debug("Did not find torrent file for %s", p) else: for p in it: try: if qb.single_file_moved(p): logger.info("Adjusted torrent path for %s", p) else: logger.info("Torrent path already correct for %s", p) except NotFound: logger.debug("Did not find torrent file for %s", p)
def _process_path(path: str) -> Iterable[Path]: """ Convert path pattern into path iterable. If non-pattern path is given return tuple of one element: (path,) :param path: :return: """ split_path = path_split(path) clean_path = list( itertools.takewhile(lambda part: "*" not in part and "?" not in part, split_path)) pattern_path = split_path[len(clean_path):] if clean_path: clean_path = os.path.join(*clean_path) else: clean_path = "." if pattern_path: pattern_path = os.path.join(*pattern_path) else: pattern_path = None path = Path(clean_path) if pattern_path: return path.glob(pattern_path) else: return path,
def _prepare_path(self): path = Path('signals').resolve() if not os.path.exists(path): err_path_str = 'There is no "signals" folder in: ' + str( Path.cwd()) raise FileNotFoundError(err_path_str) filelist = list(path.glob('*.txt')) return filelist
def render_index(path): assert path.is_dir() return index_section_template.format( name=escape(path.name.capitalize()), entries='\n'.join( indent(lines=(render_index_entry(f) for f in sorted( path.glob('*.html'), key=lambda p: extract_numbers(p.stem))))))
def create_dataset(path, return_pairs=None): path = Path(path) ims_path_list = path.glob('*_m.png') pairs_path_list = [] for i in ims_path_list: pair_name = i.parts[-1].split('_m.png')[0] + '.png' pair_path = path.glob(pair_name) pair_path = str(list(pair_path)[0]) pair_path_mirror = str(i) pairs_path_list.append([pair_path, pair_path_mirror]) random.seed(1984) [random.shuffle(pairs_path_list) for i in range(int(1e4))] if return_pairs == None: pairs_path_list = reduce(operator.add, pairs_path_list, []) return pairs_path_list
def isUntypedProject(path): files = { 'pom.xml', 'build.xml', 'package.json', '.git', '.gitignore', '.svn', '.hg', '.editorconfig', '.settings', '.project', 'requirments.txt', 'readme.txt', 'readme.md', 'readme.rst' } for file in path.glob('*.*'): return file.name in files
def convert_all(folder): """ Converts all the notebooks in a given folder in a html pages. """ path = Path(folder) nb_files = path.glob('*.ipynb') print(nb_files) for file in nb_files: convert_nb(file)
def _iter_markdown_filepaths(input_paths: InputPaths) -> FilePaths: for path_str in input_paths: path = pl.Path(path_str) if path.is_file(): yield path else: for ext in MARKDOWN_FILE_EXTENSIONS: for fpath in path.glob(f"**/*.{ext}"): yield fpath
def get_requirements(path: Path) -> List[str]: req = [] for filename in path.glob("requirements*.txt"): LOG.info("Reading reqs from %s", filename) for line in filename.read_text().splitlines(): line = line.split("#", 1)[0].strip() if not line: continue req.append(line) return req
def main(): template = Path('service_worker.tpl.js').read_text(encoding='utf-8') cacheURLs = {str(p).replace('\\', '/') for p in Path('.').glob('**/*.*') if allow(p)} path = Path('.') for p in path.glob('**/*.html'): html = p.read_text(encoding='utf-8') cacheURLs |= set(linkiter(html)) template = template.replace('$cacheURLs', json.dumps(sorted(cacheURLs), indent=4)) Path('service_worker.js').write_text(template, encoding='utf-8')
def hash_dir(path, algorithm='md5'): pathlib = requires_package('pathlib') path = pathlib.Path(path) files = [i for i in sorted(path.glob('**/*')) if i.is_file()] hashes = '' for i in files: hash = hash_file(i, algorithm).hexdigest() file = i.relative_to(path) hashes += '{hash} ./{file}\n'.format(hash=hash, file=file) return hashes
def stamp_dir(path, format='{file} {size} {mtime}\n'): pathlib = requires_package('pathlib') path = pathlib.Path(path) files = [i for i in sorted(path.glob('**/*')) if i.is_file()] stamp = '' for file in files: stat = file.stat() size = stat.st_size mtime = datetime.fromtimestamp(stat.st_mtime) stamp += format.format(file=file, stat=stat, mtime=mtime, size=size) return stamp
def search_upwards(path, glob): if path is not None: path = pathlib.Path(path) while path is not None: results = list(path.glob(glob)) yield from results if path.parent == path: break else: path = path.parent
def convert_all(folder, dest_path='.'): "Convert all notebooks in `folder` to html pages in `dest_path`." path = Path(folder) fname_last_checked = path / ".last_checked" last_checked = os.path.getmtime( fname_last_checked) if fname_last_checked.exists() else None for fname in path.glob("*.ipynb"): # avoid to change too many files if the nb hasn't changed since the last conversion if last_checked: last_changed = os.path.getmtime(fname) if last_changed < last_checked: continue convert_nb(fname, dest_path=dest_path)
def calc_state_size(self): # Should work even for pre-7.4 versions, counting only files and folder related to that version result = 0 for filename in STATE_FILES_TO_COPY: path = self.directory / filename if path.exists(): result += path.stat().st_size for dirname in STATE_DIRS_TO_COPY: path = self.directory / dirname for f in path.glob('**/*'): result += f.stat().st_size return result
def get_filenames(subject, dtype='source', session='*', epoch='*'): if 'source' in dtype: path = preprocessed_path / 'source_space' globstr = 'S%i-SESS%s-*%s*HF*lcmv.hdf' % (subject, str(session), str(epoch)) elif 'meta' in dtype: path = preprocessed_path / 'meta' globstr = 'down_sample_%s_meta_sub%i_sess%s_b*.hdf' % ( epoch[:4], subject, str(session)) else: raise ValueError('dtype can only be "source" at the moment') return path.glob(globstr), str(path / globstr)
def on_new_topfolder(self): logger.info("User initiates top-level folder creation") message_box = QMessageBox( QMessageBox.Question, "Create Folder", "Create folder in the default location?", QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel, self.window()) message_box.button(QMessageBox.No).setText( "Create elsewhere") # TODO: i18n result = message_box.exec_() self.window().app.monitor.suspend() if result == QMessageBox.Yes: logger.debug("User creates a new top-level folder.") self.__createFolder(None) elif result == QMessageBox.No: logger.debug( "User creates a new folder and chose to create it elsewhere") QMessageBox.warning( self.window(), "Beware", "AutoKey will take the full ownership of the directory you are about to select or create. " "It is advisable to only choose empty directories or directories that contain data created by AutoKey " "previously.\n\nIf you delete or move the directory from within AutoKey " "(for example by using drag and drop), all files unknown to AutoKey will be deleted.", QMessageBox.Ok) path = QFileDialog.getExistingDirectory( self.window(), "Where should the folder be created?") if path != "": path = pathlib.Path(path) if list(path.glob("*")): result = QMessageBox.warning( self.window(), "The chosen directory already contains files", "The selected directory already contains files. " "If you continue, AutoKey will take the ownership.\n\n" "You may lose all files in '{}' that are not related to AutoKey if you select this directory.\n" "Continue?".format(path), QMessageBox.Yes | QMessageBox.No) == QMessageBox.Yes else: result = True if result: folder = model.Folder(path.name, path=str(path)) new_item = ak_tree.FolderWidgetItem(None, folder) self.treeWidget.addTopLevelItem(new_item) self.configManager.folders.append(folder) self.window().app.config_altered(True) self.window().app.monitor.unsuspend() else: logger.debug("User canceled top-level folder creation.") self.window().app.monitor.unsuspend()
def glob_each(path, patterns): for pattern in patterns: matches = [] for match in path.glob(pattern): matches.append(match) yield match if len(matches) == 0: raise ValueError(f'Failed, no match for "{pattern}".') if len(matches) == 1: raise ValueError( f'Failed, only one match for "{pattern}": {matches[0]}.')
def get_absolute_tool_path(command): """ Given an invocation command, return the absolute path to the command. This works even if commnad has not path element and is present in PATH. """ if os.path.dirname(command): return os.path.dirname(command) else: programs = path.programs_path() m = path.glob(programs, [command, command + '.exe' ]) if not len(m): print "Could not find:", command, "in", programs return os.path.dirname(m[0])
def get_absolute_tool_path(command): """ Given an invocation command, return the absolute path to the command. This works even if commnad has not path element and is present in PATH. """ if os.path.dirname(command): return os.path.dirname(command) else: programs = path.programs_path() m = path.glob(programs, [command, command + '.exe']) if not len(m): print "Could not find:", command, "in", programs return os.path.dirname(m[0])
def get_data_files(self): data_files = [] for package in find_packages(): path = Path(package) if not path.is_dir(): continue for child in path.glob('**/*'): if not child.is_file(): continue if child.match('*.py'): continue if child.match('*.pyc'): continue data_files.append((str(child.parent), [str(child)])) return data_files
def create_dataset_from_dir2subdir(dir, nitems=None): """ Create a list of paths from a nesteed dir with two levels, selecting nitems from each dir of the last level """ EXT_RECURSIVE = ['**/*.jpg', '**/*.JPG', '**/*.png', '**/*.ppm'] from collections import OrderedDict path = Path(dir) id_names = [i.parts[-1] for i in list(path.glob('*')) if os.path.isdir(i)] n_items_per_last_level = nitems data_dict = OrderedDict({i: {} for i in sorted(id_names)}) data_dict_nitems = OrderedDict({i: {} for i in sorted(id_names)}) # INITIALISE for i in id_names: for j in os.listdir(path/i): data_dict[i][j] = None # FILLING import random random.seed() for i in data_dict.keys(): for j in data_dict[i].keys(): txt_pl = reduce( operator.add, [list((path/i/j).glob('**/*.isomap.png'))], []) # DICT WITH ALL PATHS data_dict[i][j] = txt_pl # DICT WITH MAX(N) PATHS random_idx = random.sample(range(len(txt_pl)), min(len(txt_pl), n_items_per_last_level)) txt_pl_nitems = [str(txt_pl[i]) for i in random_idx] data_dict_nitems[i][j] = txt_pl_nitems print('Total found IDs in path %s: %d' %(path, len(data_dict_nitems)), '.. and selected %d per ID' %n_items_per_last_level) data_list_n_shuffled = reduce_and_shuffle_dict_values_nested1level(data_dict_nitems) return data_list_n_shuffled
def convert_all(folder, dest_path='.', force_all=False): "Convert modified notebooks in `folder` to html pages in `dest_path`." path = Path(folder) changed_cnt = 0 for fname in path.glob("*.ipynb"): # only rebuild modified files fname_out = Path(dest_path)/fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue print(f"converting: {fname} => {fname_out}") changed_cnt += 1 convert_nb(fname, dest_path=dest_path) if not changed_cnt: print("No notebooks were modified")
def get_absolute_tool_path(command): """ Given an invocation command, return the absolute path to the command. This works even if command has not path element and is present in PATH. """ assert isinstance(command, basestring) if os.path.dirname(command): return os.path.dirname(command) else: programs = path.programs_path() m = path.glob(programs, [command, command + '.exe' ]) if not len(m): if __debug_configuration: print "Could not find:", command, "in", programs return None return os.path.dirname(m[0])
def get_hash(input_paths, init_string=""): """Get hash of files or directories.""" hasher = hashlib.sha256() hasher.update(init_string.encode('utf-8')) for path in sorted(input_paths): path = Path(path) if path.is_file(): file_paths = [path] elif path.is_dir(): file_paths = sorted(p for p in path.glob('**/*') if p.is_file()) else: raise ValueError( f"x-hash-paths: {path} is not a file or directory") for file_path in file_paths: print(f" - Hashing {file_path}") hasher.update(file_path.read_bytes()) return hasher.hexdigest()[:32]
def convert_all(folder, dest_path='.', force_all=False): "Convert modified notebooks in `folder` to html pages in `dest_path`." path = Path(folder) changed_cnt = 0 for fname in path.glob("*.ipynb"): # only rebuild modified files fname_out = Path(dest_path) / fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue print(f"converting: {fname} => {fname_out}") changed_cnt += 1 convert_nb(fname, dest_path=dest_path) if not changed_cnt: print("No notebooks were modified")
def check_tool_aux(command): """ Checks if 'command' can be found either in path or is a full name to an existing file. """ assert(isinstance(command, str)) dirname = os.path.dirname(command) if dirname: if os.path.exists(command): return command # Both NT and Cygwin will run .exe files by their unqualified names. elif on_windows() and os.path.exists(command + '.exe'): return command # Only NT will run .bat files by their unqualified names. elif os_name() == 'NT' and os.path.exists(command + '.bat'): return command else: paths = path.programs_path() if path.glob(paths, [command]): return command
def find_files(base_dir): path = Path(base_dir) yield from path.glob('**/*.png')