def init_dir(adir: PathLike, exist_ok=False, parents=False, rmtree=False): adir = Path(adir) if adir.is_dir(): if rmtree: shutil.rmtree(adir) adir.mkdir(exist_ok=exist_ok, parents=parents) return adir
def find_root_with_reason( criterion, start: _PathLike = None) -> _typing.Tuple[_pathlib.Path, str]: """ Find directory matching root criterion with reason. Recursively search parents of start path for directory matching root criterion with reason. """ # TODO: Implement reasons # Prepare inputs criterion = _as_root_criterion(criterion) start = as_start_path(start) # Check start if start.is_dir() and criterion(start): return start, "Pass" # Iterate over all parents # TODO: Consider adding maximum depth # TODO: Consider limiting depth to path (e.g. "if p == stop: raise") for p in start.parents: if criterion(p): return p, "Pass" # Not found raise RuntimeError("Project root not found.")
def write_csvs( dirname: PathLike, adata: AnnData, skip_data: bool = True, sep: str = ',' ): """See :meth:`~anndata.AnnData.write_csvs`. """ dirname = Path(dirname) if dirname.suffix == '.csv': dirname = dirname.with_suffix('') logger.info("writing '.csv' files to %s", dirname) if not dirname.is_dir(): dirname.mkdir(parents=True, exist_ok=True) dir_uns = dirname / 'uns' if not dir_uns.is_dir(): dir_uns.mkdir(parents=True, exist_ok=True) d = dict( obs=adata._obs, var=adata._var, obsm=adata._obsm.to_df(), varm=adata._varm.to_df(), ) if not skip_data: d['X'] = pd.DataFrame( adata._X.toarray() if issparse(adata._X) else adata._X ) d_write = {**d, **adata._uns} not_yet_raised_sparse_warning = True for key, value in d_write.items(): if issparse(value): if not_yet_raised_sparse_warning: warnings.warn( 'Omitting to write sparse annotation.', WriteWarning ) not_yet_raised_sparse_warning = False continue filename = dirname if key not in {'X', 'var', 'obs', 'obsm', 'varm'}: filename = dir_uns filename /= f'{key}.csv' df = value if not isinstance(value, pd.DataFrame): value = np.array(value) if np.ndim(value) == 0: value = value[None] try: df = pd.DataFrame(value) except Exception as e: warnings.warn( f'Omitting to write {key!r} of type {type(e)}.', WriteWarning, ) continue df.to_csv( filename, sep=sep, header=key in {'obs', 'var', 'obsm', 'varm'}, index=key in {'obs', 'var'}, )
def write_csvs(dirname: PathLike, adata: AnnData, skip_data: bool = True, sep: str = ","): """See :meth:`~anndata.AnnData.write_csvs`.""" dirname = Path(dirname) if dirname.suffix == ".csv": dirname = dirname.with_suffix("") logger.info(f"writing .csv files to {dirname}") if not dirname.is_dir(): dirname.mkdir(parents=True, exist_ok=True) dir_uns = dirname / "uns" if not dir_uns.is_dir(): dir_uns.mkdir(parents=True, exist_ok=True) d = dict( obs=adata._obs, var=adata._var, obsm=adata._obsm.to_df(), varm=adata._varm.to_df(), ) if not skip_data: d["X"] = pd.DataFrame( adata._X.toarray() if issparse(adata._X) else adata._X) d_write = {**d, **adata._uns} not_yet_raised_sparse_warning = True for key, value in d_write.items(): if issparse(value): if not_yet_raised_sparse_warning: warnings.warn("Omitting to write sparse annotation.", WriteWarning) not_yet_raised_sparse_warning = False continue filename = dirname if key not in {"X", "var", "obs", "obsm", "varm"}: filename = dir_uns filename /= f"{key}.csv" df = value if not isinstance(value, pd.DataFrame): value = np.array(value) if np.ndim(value) == 0: value = value[None] try: df = pd.DataFrame(value) except Exception as e: warnings.warn( f"Omitting to write {key!r} of type {type(e)}.", WriteWarning, ) continue df.to_csv( filename, sep=sep, header=key in {"obs", "var", "obsm", "varm"}, index=key in {"obs", "var"}, )
def is_excluded(self, path: PathLike) -> bool: # The exclude pattern is constructed bearing in mind that will # be called using fullmatch. if self.exclude is None: return False path = Path(path) if path.is_dir(): return self.exclude["d"].fullmatch(path.name) is not None return self.exclude["f"].fullmatch(path.name) is not None
def _validate_confd(confd: os.PathLike) -> None: if not confd.exists(): raise NoConfigDirError(f"{confd} does not exist.") if not confd.is_dir(): raise ConfigDirNoDirError(f"{confd} is not a directory.") if stat.S_IMODE(confd.lstat().st_mode) > 0o550: oct_str = oct(stat.S_IMODE(confd.lstat().st_mode)) raise ConfigDirOwnershipError( f"{confd} ownership is {oct_str}, max allowed is `0o550`") for child in confd.iterdir(): if not child.is_file(): _log.warning( f"Config dir '{confd}' only contains files, not '{child}'!") continue if child.suffix != ".json": _log.warning( f"Config dir '{confd}' only contains json files, not '{child.name}'!" )
def write(self, filename: PathLike, overwrite: bool = False, include_version: bool = False) -> Path: """ write this configuration to file :param filename: path to file :param overwrite: overwrite an existing file :param include_version: include NEMSpy version information :returns: path to written file """ if not isinstance(filename, Path): filename = Path(filename) ensure_directory(filename.parent) output = f'{self}\n' if include_version: output = f'{self.version_header}\n' f'{output}' if filename.is_dir(): filename = filename / self.name logging.debug( f'creating new file "{os.path.relpath(filename.resolve(), Path.cwd())}"' ) if filename.exists(): logging.debug( f'{"overwriting" if overwrite else "skipping"} existing file "{os.path.relpath(filename.resolve(), Path.cwd())}"' ) if not filename.exists() or overwrite: with open(filename, 'w', newline='\n') as output_file: output_file.write(output) return filename
def is_dir(localpath: PathLike) -> bool: localpath = Path(localpath) return localpath.is_dir()