def download_zip_folder(url: str, destination: os.PathLike, show_size: bool = False, skip_if_exists: bool = True): """Download and extract a ZIP folder from a URL. The file is first downloaded to a temporary location and then extracted to the target folder. Args: url (str): the URL of the ZIP file destination (os.PathLike): the destination folder show_size (bool, optional): whether to display a progress bar. Defaults to False. skip_if_exists (bool, optional): if true, will do nothing when the destination path exists already. Defaults to True. """ destination = URI(destination) if skip_if_exists and destination.exists(): logger.info( f"Not downloading {url} to {destination} again because it already exists" ) return with tempfile.TemporaryDirectory() as tmp_dir: zip_file = Path(tmp_dir) / f"{destination.name}.zip" download_file(url, zip_file, show_size) logger.info(f"Unzipping {zip_file} to {destination}") shutil.rmtree(destination, ignore_errors=True) with zipfile.ZipFile(zip_file, "r") as f: f.extractall(destination, _get_members(f)) logger.info(f"Finished downloading {url} to {destination}")
def __init__(self, path: os.PathLike = pathlib.Path.home() / ".scidd_cache", name: str = "_SciDD_API_Cache.sqlite"): self._dbFilepath = path / name # the full path + filename for the cache # create database path if needed if not path.exists(): try: os.makedirs(path) except FileExistsError as e: logger.debug( f"Path '{path}' appears not to exist, but 'os.makedirs(path)' is raising FileExistsError: {e}" ) except OSError as e: raise OSError( f"Unable to create specified path '{path}'; error: {e} ") if path.is_symlink(): # or os.path.islink(fp) if not os.path.exists(os.readlink(path)): # broken link raise Exception( f"The path where the SciDD cache is expected ('{path}') is symlink pointing to a target that is no longer there. " + "Either remove the symlink or fix the destination.") self._initialize_database()
def put_license_in_file(spdx_identifier: str, root: PathLike = None, destination: PathLike = None) -> None: """Download a license and put it in the correct file. This function exists solely for convenience. :param spdx_identifier: SPDX identifier of the license. :param root: The root of the project. :param destination: An override path for the destination of the license. :raises requests.RequestException: if the license could not be downloaded. :raises FileExistsError: if the license file already exists. """ header = "" if destination is None: licenses_path = find_licenses_directory(root=root) licenses_path.mkdir(exist_ok=True) destination = licenses_path / "".join((spdx_identifier, ".txt")) else: is_exception = spdx_identifier in EXCEPTION_MAP header = ("Valid-{licexc}-Identifier: {identifier}\n" "{licexc}-Text:\n\n".format( identifier=spdx_identifier, licexc="Exception" if is_exception else "License", )) destination = Path(destination) if destination.exists(): raise FileExistsError(errno.EEXIST, "File exists", str(destination)) text = download_license(spdx_identifier) with destination.open("w") as fp: fp.write(header) fp.write(text)
def save_skeleton(path: PathLike, adjacency: List[List[int]], coordinates: np.ndarray): path = Path(path) assert not path.exists() skeleton = {'adjacency': adjacency, 'coordinates': coordinates.tolist()} with path.open('wb') as file: pickle.dump(skeleton, file)
def write_loom(filename: PathLike, adata: AnnData, write_obsm_varm: bool = False): filename = Path(filename) row_attrs = {k: np.array(v) for k, v in adata.var.to_dict('list').items()} row_attrs['var_names'] = adata.var_names.values col_attrs = {k: np.array(v) for k, v in adata.obs.to_dict('list').items()} col_attrs['obs_names'] = adata.obs_names.values if adata.X is None: raise ValueError('loompy does not accept empty matrices as data') if write_obsm_varm: for key in adata.obsm.keys(): col_attrs[key] = adata.obsm[key] for key in adata.varm.keys(): row_attrs[key] = adata.varm[key] else: if len(adata.obsm.keys()) > 0 or len(adata.varm.keys()) > 0: logger.warning( 'The loom file will lack these fields:\n{}\n' 'Use write_obsm_varm=True to export multi-dimensional annotations' .format(adata.obsm.keys() + adata.varm.keys())) layers = {'': adata.X.T} for key in adata.layers.keys(): layers[key] = adata.layers[key].T from loompy import create if filename.exists(): filename.unlink() create(fspath(filename), layers, row_attrs=row_attrs, col_attrs=col_attrs)
def cue_soundfile( self, file_path: PathLike, channel_count: Optional[int] = None, duration: Optional[float] = None, frame_count: int = 1024 * 32, starting_frame: int = 0, offset: Optional[float] = None, ) -> "supriya.nonrealtime.buffers.Buffer": if isinstance(file_path, str): file_path = pathlib.Path(file_path) if isinstance(file_path, pathlib.Path): assert file_path.exists() soundfile = supriya.soundfiles.SoundFile(str(file_path)) channel_count = channel_count or soundfile.channel_count elif isinstance(file_path, type(self)): channel_count = channel_count or len(file_path.audio_output_bus_group) elif hasattr(file_path, "__session__"): channel_count = channel_count or getattr( file_path, "output_bus_channel_count" ) buffer_ = self.add_buffer( channel_count=channel_count, duration=duration, frame_count=frame_count, offset=offset, ) buffer_.read( file_path, leave_open=True, starting_frame_in_file=starting_frame, offset=offset, ) return buffer_
def download_zip_folder_from_google_drive(file_id: str, destination: os.PathLike, show_size: bool = False, skip_if_exists: bool = True): """Download and extract a ZIP file from Google Drive. Args: file_id (str): the Google Drive file ID destination (os.PathLike): the destination folder show_size (bool, optional): whether to display a progress bar. Defaults to False. skip_if_exists (bool, optional): if true, will do nothing when the destination path exists already. Defaults to True. """ destination = URI(destination) if skip_if_exists and destination.exists(): logger.info( f"Not downloading {file_id} to {destination} again because it already exists" ) return with tempfile.TemporaryDirectory() as tmp_dir: zip_file = Path(tmp_dir) / f"{destination.name}.zip" logger.info(f"Downloading {file_id} to {zip_file}") gdd.download_file_from_google_drive(file_id=file_id, dest_path=zip_file, overwrite=True, showsize=show_size) logger.info(f"Unzipping {zip_file} to {destination}") shutil.rmtree(destination, ignore_errors=True) with zipfile.ZipFile(zip_file, "r") as f: f.extractall(destination, _get_members(f)) logger.info(f"Finished downloading {file_id} to {destination}")
def write_loom(filename: PathLike, adata: AnnData, write_obsm_varm: bool = False): filename = Path(filename) row_attrs = {k: np.array(v) for k, v in adata.var.to_dict("list").items()} row_names = adata.var_names row_dim = row_names.name if row_names.name is not None else "var_names" row_attrs[row_dim] = row_names.values col_attrs = {k: np.array(v) for k, v in adata.obs.to_dict("list").items()} col_names = adata.obs_names col_dim = col_names.name if col_names.name is not None else "obs_names" col_attrs[col_dim] = col_names.values if adata.X is None: raise ValueError("loompy does not accept empty matrices as data") if write_obsm_varm: for key in adata.obsm.keys(): col_attrs[key] = adata.obsm[key] for key in adata.varm.keys(): row_attrs[key] = adata.varm[key] elif len(adata.obsm.keys()) > 0 or len(adata.varm.keys()) > 0: logger.warning( f"The loom file will lack these fields:\n" f"{adata.obsm.keys() | adata.varm.keys()}\n" f"Use write_obsm_varm=True to export multi-dimensional annotations" ) layers = {"": adata.X.T} for key in adata.layers.keys(): layers[key] = adata.layers[key].T from loompy import create if filename.exists(): filename.unlink() create(fspath(filename), layers, row_attrs=row_attrs, col_attrs=col_attrs)
def extract_download( url: str, directory: PathLike, filenames: [str] = None, known_hash: str = None, overwrite: bool = False, ): if not isinstance(directory, Path): directory = Path(directory) if filenames is None: filenames = [] if not directory.exists(): directory.mkdir(parents=True, exist_ok=True) temporary_filename = directory / 'temp.tar.gz' logging.debug(f'downloading {url} -> {temporary_filename}') temporary_filename = pooch.retrieve(url, known_hash=known_hash, fname=temporary_filename) logging.debug(f'extracting {temporary_filename} -> {directory}') with tarfile.open(temporary_filename) as local_file: if len(filenames) > 0: for filename in filenames: if filename in local_file.getnames(): path = directory / filename if not path.exists() or overwrite: if path.exists(): os.remove(path) local_file.extract(filename, directory) else: local_file.extractall(directory)
def write(self, path: PathLike, overwrite: bool = False): if not isinstance(path, pathlib.Path): path = pathlib.Path(path) if path.exists() and overwrite is False: raise Exception( 'File exist, set overwrite=True to allow overwrite.') with open(path, 'w') as f: f.write(str(self))
def to_file(self, filename: PathLike = None, overwrite: bool = True): if not isinstance(filename, Path): filename = Path(filename) if overwrite or not filename.exists(): content = typepigeon.convert_to_json( self._Configuration__configuration) with open(filename, "w") as input_file: yaml.safe_dump(content, input_file)
def ResolveJitter( cubes: list, conf: dict, jitter_path: os.PathLike, temp_token: str, keep=False, ): set_gen = ccd_set_generator(conf["ResolveJitter"]) while True: (resjit_cubes, common_cube) = determine_resjit_set(cubes, set_gen) flats = make_flats( resjit_cubes, common_cube, conf, temp_token, keep=keep ) if len(flats) + 1 == len(resjit_cubes): break # keep_regdefs() writes 'KEEP' into the files status of the regdef files. # Not sure that this is required, so skipping. # # run resolveJitter3HiJACK.m or resolveJitter4HiJACK.cc # rj_args = [ # resolve_jitter_path, # str(jitter_path.parent), # str(common_cube.get_obsid()), # str(conf["AutoRegistration"]["ControlNet"]["Control_Lines"]), # ] # for f in flats: # rj_args.append(f.relative_to(jitter_path.parent)) # rj_args.append("-1") # logger.info(rj_args) # subprocess.run(rj_args, check=True) # print(jitter_path) # print(flats[1]) rj.start( flats[0], False, flats[1], False, flats[2], False, line_interval=conf["AutoRegistration"]["ControlNet"]["Control_Lines"], outdir=jitter_path.parent, outprefix=common_cube.get_obsid(), plotshow=False, plotsave=True, writecsv=False ) # Just double-check that the file we expect was created: if not jitter_path.exists(): raise FileNotFoundError(jitter_path) return
def __init__(self, path: os.PathLike): self._path = path self._update = False if path.exists(): with path.open("r") as fh: self._state = json.load(fh) else: self._state = None
def backup(path: os.PathLike, list_flag: str, archive: bool): """Back up android device.""" if isinstance(path, pathlib.Path) is False: path = pathlib.Path(path) if path.exists(): click.echo("Back up path already exists") sys.exit(0) server = Adb(auto_start=True) if server.state is not server.ConnectionState.CONNECTED: click.echo("Device not connected.") server.stop_server() sys.exit(0) log.info("Device connected") # get user installed packages apks = server.list_device(list_flag) log.info("Discovering apk paths, this may take a while...") # get full path on the android filesystem for each installed package parsed_paths = map_apk_paths(apks) log.info("Found %s installed packages", len(apks)) try: os.makedirs(path) except FileExistsError: click.echo("Back up destination already exists", err=True) sys.exit(-1) for progress, item in enumerate(parsed_paths, 1): msg = f"[{progress:4}/{len(parsed_paths):4}] pulling ... {item.name}" log.info(msg) try: server.pull(item.fullpath) # get apk from device except AdbError as error: click.echo("Error during pulling {0}\n{1}".format( item, str(error)), err=True) server.stop_server() sys.exit(-1) # all apks retrieved with adb are saved under the same name, `base.apk` # time to use a handy AbsPath named tuple to set correct name to apk # pulled with adb this is the correct name of the apk relative to the filesystem dest = os.path.join(path, f"{item.name}.apk") shutil.move("base.apk", dest) if archive: log.info(f"Creating zip archive: {path}.zip, this may take a while") zipify(path, path.parent / (path.name + ".zip")) shutil.rmtree(path) server.stop_server() log.info("Back up done.")
def create_opmap_from_file(file_path: os.PathLike) -> Dict[str, int]: """Return an opcode map dictionary of OPNAME : OPCODE from a JSON file. The JSON file must enumerate a complete opmap for the specified Python version. Even if only a few bytes have been swapped, all operations and opcodes must have a value for the version specified. Parameters ---------- file_path : os.PathLike The path to the JSON remapping file. This file *must* follow this format. .. code-block:: { "python_version": "<major>.<minor>(.<patch>)", "remapped_opcodes": [ { "opcode": 1, "opname": "POP_TOP", "remapped_value": 5 }, { "opcode": 2, "opname": "ROT_TWO", "remapped_value": 4 }, ... Returns ------- Dict[str, int] A dictionary of OPNAME : OPCODE. For example:: { 'POP_TOP': 5, 'ROT_TWO': 4, ... } """ if not file_path.exists(): raise FileNotFoundError(file_path) remappings: Dict[str, int] = {} with file_path.open("r") as remapping_file: file_json: str = json.loads(remapping_file.read()) version: str = file_json["python_version"] subdict: Dict[str, Union[str, int]] for subdict in file_json["remapped_opcodes"]: remappings[subdict["opname"]] = subdict["remapped_value"] if not validate_opmap(version, remappings): raise RuntimeError("[!] Opcode map is not valid!") return remappings
def download_mesh(url: str, directory: PathLike, overwrite: bool = False): if not isinstance(directory, Path): directory = Path(directory) if not directory.exists(): directory.mkdir(parents=True, exist_ok=True) if not (directory / 'fort.14').exists() or overwrite: logging.info(f'downloading mesh files to {directory}') extract_download(url, directory, ['fort.13', 'fort.14']) return directory
def load_vol(filename: os.PathLike, dtype=np.float32, mmap_mode: str = None, shape: tuple = None) -> np.ndarray: """Load data from `.vol` file. The image shape is deduced from the `.vol.info` file. If this file is not present, the shape can be specified using the `shape` keyword. Parameters ---------- filename : os.PathLike Path to the file. dtype : dtype, optional Numpy dtype of the data. mmap_mode : None, optional If not None, open the file using memory mapping. For more info on the modes, see: https://numpy.org/doc/stable/reference/generated/numpy.memmap.html shape : tuple, optional Tuple of three ints specifying the shape of the data (order: z, y, x). Returns ------- result : np.ndarray Data stored in the file. """ filename = Path(filename) if not filename.exists(): raise IOError(f'No such file: {filename}') try: filename_info = filename.with_suffix(filename.suffix + '.info') if not shape: info = read_info(filename_info) shape = info['NUM_Z'], info['NUM_Y'], info['NUM_X'] except FileNotFoundError: raise ValueError( f'Info file not found: {filename_info.name}, specify ' 'the volume shape using the `shape` parameter.') from None result: np.ndarray if mmap_mode: result = np.memmap(filename, dtype=dtype, shape=shape, mode=mmap_mode) else: result = np.fromfile(filename, dtype=dtype) result = result.reshape(shape) return result
def to_file(self, filename: PathLike = None, overwrite: bool = True): if not isinstance(filename, Path): filename = Path(filename) if overwrite or not filename.exists(): configuration = self._Configuration__configuration default = configuration["default"] for perturbation in configuration["perturbations"].values(): for key in list(perturbation): if key in default and perturbation[key] == default[key]: del perturbation[key] content = typepigeon.convert_to_json(configuration) with open(filename, "w") as input_file: yaml.safe_dump(content, input_file)
def write(self, filename: PathLike, overwrite: bool = False, include_version: bool = False) -> Path: """ write this configuration to file :param filename: path to file :param overwrite: overwrite an existing file :param include_version: include NEMSpy version information :returns: path to written file """ if not isinstance(filename, Path): filename = Path(filename) ensure_directory(filename.parent) output = f'{self}\n' if include_version: output = f'{self.version_header}\n' f'{output}' if filename.is_dir(): filename = filename / self.name logging.debug( f'creating new file "{os.path.relpath(filename.resolve(), Path.cwd())}"' ) if filename.exists(): logging.debug( f'{"overwriting" if overwrite else "skipping"} existing file "{os.path.relpath(filename.resolve(), Path.cwd())}"' ) if not filename.exists() or overwrite: with open(filename, 'w', newline='\n') as output_file: output_file.write(output) return filename
def ensure_directory(directory: PathLike) -> Path: """ ensure that a directory exists :param directory: directory path to ensure :returns: path to ensured directory """ if not isinstance(directory, Path): directory = Path(directory) directory = directory.expanduser() if directory.is_file(): directory = directory.parent if not directory.exists(): directory.mkdir(parents=True, exist_ok=True) return directory
def create(file_path: PathLike, exp_id: str, variables: Mapping[str, VarType], exp_title: str = '') -> ExperimentWriter: """ Initializes an HDF5 file and constructs an experiment using it as the underlying structure. Parameters ---------- file_path Path to the new HDF5 file. Should not exist, otherwise this a FileExistsError will be raised. exp_id Unique ID for this experiment. variables A mapping, variable_name -> variable_type, indicating the desired variables to store under the experiment. exp_title The title, or one-line description of this experiment. Returns ------- exp_writer An ExperimentWriter instance. """ # make sure parent folders exist, but file should be new file_path = Path(file_path) if file_path.exists(): # file exists, don't delete but raise an error raise FileExistsError(file_path) file_path.parent.mkdir(exist_ok=True, parents=True) h5 = tables.open_file(str(file_path), mode='a', title='Experiment Data File') try: return _TopLevelExperimentWriter(h5, h5.root, exp_id, exp_title, variables) except ExperimentElementError: # error while creating experiment. # delete the file to avoid leaving junk around # then re-raise error h5.close() file_path.unlink() raise
def _validate_confd(confd: os.PathLike) -> None: if not confd.exists(): raise NoConfigDirError(f"{confd} does not exist.") if not confd.is_dir(): raise ConfigDirNoDirError(f"{confd} is not a directory.") if stat.S_IMODE(confd.lstat().st_mode) > 0o550: oct_str = oct(stat.S_IMODE(confd.lstat().st_mode)) raise ConfigDirOwnershipError( f"{confd} ownership is {oct_str}, max allowed is `0o550`") for child in confd.iterdir(): if not child.is_file(): _log.warning( f"Config dir '{confd}' only contains files, not '{child}'!") continue if child.suffix != ".json": _log.warning( f"Config dir '{confd}' only contains json files, not '{child.name}'!" )
def load(self, path: PathLike) -> None: """ Load deck from a ydk file. Parameters ---------- path : PathLike Deck will be loaded from this `path`. Raises ------ FileNotFoundError raises when `path` does not exist ValueError raise when suffix of `path` is not ydk """ path = Path(path) if not path.exists(): raise FileNotFoundError(f'File `{str(path)}` does not exist') if path.suffix != '.ydk': raise ValueError( f'The file extension is invalid. valid: ydk, got {path.suffix}' ) box: List[int] = self.main with path.open() as f: for line in f.readlines(): line = line.strip() if line == '#extra': box = self.extra elif line == '!side': box = self.side try: id = int(line) box.append(id) except ValueError as err: pass
def put_license_in_file(spdx_identifier: str, destination: PathLike) -> None: """Download a license and put it in the destination file. This function exists solely for convenience. :param spdx_identifier: SPDX License Identifier of the license. :param destination: Where to put the license. :raises requests.RequestException: if the license could not be downloaded. :raises FileExistsError: if the license file already exists. """ header = "" destination = Path(destination) destination.parent.mkdir(exist_ok=True) if destination.exists(): raise FileExistsError(errno.EEXIST, "File exists", str(destination)) text = download_license(spdx_identifier) with destination.open("w", encoding="utf-8") as fp: fp.write(header) fp.write(text)
def _download_file( fname: Union[AVAILABLE_MODELS, AVAILABLE_DATA], branch: str, save_dir: PathLike, type: Literal["model", "data"], ) -> Path: """Download a file to disk if it does not already exist. Args: fname: The file name. branch: Which branch of the unlockNN repository to download from. save_dir: The directory to check for already-downloaded models and in which to save newly downloaded models. type: The type of file. Returns: The path to the downloaded file/folder. """ save_dir = Path(save_dir) if not save_dir.exists(): save_dir.mkdir() specific_dir = save_dir / ( f"{fname}-{branch}" + (".parquet" if type == "data" else "") ) # Add .pkl extension only if we're downloading data url = MODELS_URL if type == "model" else DATA_URL download_url = url.format(branch=branch, fname=fname) if not specific_dir.exists(): r = requests.get(download_url) if type == "model": tar_f = tarfile.open(fileobj=BytesIO(r.content)) tar_f.extractall(specific_dir) tar_f.close() else: specific_dir.write_bytes(r.content) return specific_dir
def download_file( url: PathLike, filename: PathLike = None, directory: PathLike = None, show_progress: bool = True, silent: bool = False, timeout: int = 5, ) -> str: """ Download a file from a url and save it to the local filesystem. The file is saved to the current directory by default, or to `directory` if specified. If a filename is not given, the filename of the URL will be used. :param url: URL that points to the file to download :param filename: Name of the local file to save. Should point to the name of the file only, not the full path. If None the filename from the url will be used :param directory: Directory to save the file to. Will be created if it doesn't exist If None the file will be saved to the current working directory :param show_progress: If True, show an TQDM ProgressBar :param silent: If True, do not print a message if the file already exists :return: path to downloaded file """ try: opener = urllib.request.build_opener() opener.addheaders = [("User-agent", "Mozilla/5.0")] urllib.request.install_opener(opener) urlobject = urllib.request.urlopen(url, timeout=timeout) if filename is None: filename = urlobject.info().get_filename() or Path(urllib.parse.urlparse(url).path).name except urllib.error.URLError as error: if isinstance(error.reason, socket.timeout): raise Exception( "Connection timed out. If you access the internet through a proxy server, please " "make sure the proxy is set in the shell from where you launched Jupyter. If your " "internet connection is slow, you can call `download_file(url, timeout=30)` to " "wait for 30 seconds before raising this error." ) from None except urllib.error.HTTPError as e: raise Exception(f"File downloading failed with error: {e.code} {e.msg}") from None filename = Path(filename) if len(filename.parts) > 1: raise ValueError( "`filename` should refer to the name of the file, excluding the directory. " "Use the `directory` parameter to specify a target directory for the downloaded file." ) # create the directory if it does not exist, and add the directory to the filename if directory is not None: directory = Path(directory) directory.mkdir(parents=True, exist_ok=True) filename = directory / Path(filename) # download the file if it does not exist, or if it exists with an incorrect file size urlobject_size = int(urlobject.info().get("Content-Length", 0)) if not filename.exists() or (os.stat(filename).st_size != urlobject_size): progress_callback = DownloadProgressBar( total=urlobject_size, unit="B", unit_scale=True, unit_divisor=1024, desc=str(filename), disable=not show_progress, ) urllib.request.urlretrieve(url, filename, reporthook=progress_callback.update_to) if os.stat(filename).st_size >= urlobject_size: progress_callback.update(urlobject_size - progress_callback.n) progress_callback.refresh() else: if not silent: print(f"'{filename}' already exists.") return filename.resolve()
def write(mesh: {str: {str: (float, float)}}, path: PathLike, overwrite: bool = False): if not isinstance(path, pathlib.Path): path = pathlib.Path(path) triangles = mesh[MeshGeometryType.TRIANGLE.value] triangles.insert(0, 'type', MeshGeometryType.TRIANGLE.value) triangles.insert(1, 'id', triangles.index) quadrilaterals = mesh[MeshGeometryType.QUADRILATERAL.value] quadrilaterals.insert(0, 'type', MeshGeometryType.QUADRILATERAL.value) quadrilaterals.insert(1, 'id', quadrilaterals.index) nodes = mesh['ND'] nodes.insert(0, 'type', 'ND') nodes.insert(1, 'id', nodes.index) if 'boundaries' in mesh: boundaries = mesh['boundaries'] boundaries.insert(0, 'type', 'NS') boundaries.iloc[:, 2:] *= -1 else: boundaries = None def float_format(value: float): return f'{value:<.16E}' if overwrite or not path.exists(): with open(path, 'w') as f: f.write('MESH2D\n') if len(triangles) > 0: logging.debug('writing triangles') start_time = datetime.now() triangles.to_string(f, header=False, index=False, justify='left') f.write('\n') logging.debug(f'wrote triangles in {datetime.now() - start_time}') if len(quadrilaterals) > 0: logging.debug('writing quadrilaterals') start_time = datetime.now() quadrilaterals.to_string(f, header=False, index=False, justify='left') f.write('\n') logging.debug(f'wrote quadrilaterals in {datetime.now() - start_time}') logging.debug('writing nodes') start_time = datetime.now() nodes.to_string( f, header=False, index=False, justify='left', float_format=float_format ) f.write('\n') logging.debug(f'wrote nodes in {datetime.now() - start_time}') if boundaries in mesh: logging.debug('writing boundaries') start_time = datetime.now() boundaries.to_string(f, header=False, index=False, justify='left') f.write('\n') logging.debug(f'wrote boundaries in {datetime.now() - start_time}') return 0 # for unittests else: logging.debug(f'skipping existing file "{path}"') return 1
def exists(localpath: PathLike) -> bool: localpath = Path(localpath) return localpath.exists()