def set_up(self) -> None: # --- # Paths # --- self.root = zpath.repository_file_tree() self.root_temp = None self.filename = 'saved.yaml' self.path_rel = paths.cast('saved', 'test-campaign', 'game') self.path_file = paths.cast(self.root, self.path_rel, self.filename) self.path_temp = None self.path_temp_file = None # --- # Create a Repo. # --- self.context = ConfigContext(self.root, self.dotted, id=zpath.config_id(self.type, None)) # Finish set-up. Inject stuff repo needs to init proper - force them to # be this for this test. self.config.ut_inject(self.root, Document.CONFIG, 'data', 'repository', 'directory') self.config.ut_inject('veredi.paths.sanitize.human', Document.CONFIG, 'data', 'repository', 'sanitize') # Should be enough info to make our repo now. self.repo = FileTreeRepository(self.context)
def set_dotted(self, filename: Union[str, paths.Path], *dotted: label.LabelInput) -> None: ''' Set test class's `dotted` class-level descriptor based on `filename` and `dotted`. ''' filename = paths.cast(filename) self.dotted = (filename, *dotted)
def _helper_test_path(self, *unsafe: str, expected: str = None, context: DataSaveContext = None, ensure: bool = False, glob: bool = False) -> paths.Path: ''' Do the test for repo._path() in a subTest() based on input params. Returns safe'd path. ''' with self.subTest(unsafe=unsafe, expected=expected, action=context.action, ensure=ensure, glob=glob): expected = paths.cast(expected) preexisting = expected.parent.exists() safe = None # ------------------------------ # If it's a SAVE, then it should throw on glob=TRUE # ------------------------------ # Does this safe a path? if glob and context.action == DataAction.SAVE: with log.LoggingManager.disabled(): with self.assertRaises(SaveError): safe = self.repo._path(*unsafe, context=context, ensure=ensure, glob=glob) # ------------------------------ # If it's a LOAD or non-glob SAVE, it should just be ok. # ------------------------------ else: safe = self.repo._path(*unsafe, context=context, ensure=ensure, glob=glob) self.assertNotEqual(unsafe, safe) self.assertEqual(safe, expected) # Did it ensure the parent(s) exist if it was asked to? if ensure and context.action == DataAction.SAVE: self.assertTrue(expected.parent.exists()) self.assertTrue(safe.parent.exists()) self.assertFalse( preexisting, "Cannot ensure parent's creation if " "parent already exists... " f"parent: {safe.parent}") return safe return None
def test_path_safed(self) -> None: # Test... that path can be safed? self._set_up_repo() # Does this safe a path? Very basic test... unsafe = 'user/name' expected = paths.cast('user_name') safe = self.repo._path_safed(unsafe) self.assertNotEqual(unsafe, safe) self.assertEqual(safe, expected) self._tear_down_repo()
def test_load(self): self.set_all_events_external(True) self.set_up_events(clear_self=True, clear_manager=True) # Create our load request. load_ctx = self.context_load(self.TestLoad.PLAYER) # Any old id and type... event = DataLoadRequest( 42, 43, load_ctx) self.assertFalse(self.events) # Shouldn't have repo context yet - haven't given it to repo yet. repo_ctx = load_ctx.repo_data self.assertFalse(repo_ctx) # Trigger the load... self.trigger_events(event) self.assertEqual(len(self.events), 1) self.assertIsInstance(self.events[0], _LoadedEvent) # And now the repo context should be there. repo_ctx = load_ctx.repo_data self.assertTrue(repo_ctx) self.assertIn('meta', repo_ctx) self.assertTrue(repo_ctx['meta']) self.assertIn('path', repo_ctx['meta']) self.assertTrue(repo_ctx['meta']['path']) self.assertIn('root', repo_ctx['meta']['path']) self.assertTrue(repo_ctx['meta']['path']['root']) # ...and it should have the load path it used in it. self.assertIn('paths', repo_ctx) self.assertTrue(repo_ctx['paths']) self.assertIsInstance(repo_ctx['paths'], list) self.assertEqual(len(repo_ctx['paths']), 1) load_path = paths.cast(repo_ctx['paths'][0]) self.assertTrue(load_path) self.assertTrue(load_path.exists()) # read file directly, assert contents are same. with load_path.open(mode='r') as file_stream: self.assertIsNotNone(file_stream) file_data = file_stream.read(None) repo_data = self.events[0].data(seek_to=0) self.assertIsNotNone(file_data) self.assertIsNotNone(repo_data) self.assertEqual(repo_data, file_data)
def path_root(klass: Type['ConfigRegistration'], entry: Dict[str, Any], config: 'Configuration') -> Nullable[paths.Path]: ''' Returns the PATH_ROOT entry of this registration entry. PATH_ROOT is the resolved (absolute) path to the root of the file tree we should search for registration. ''' field = klass._get(klass.PATH_ROOT, entry) path = config.path(field) # Clean up the path if we found it. if path: if not path.is_absolute(): path = paths.cast(os.getcwd()) / path path = path.resolve() return path
def do_load_test( self, load: Optional['Test_FileTreeRepo.TaxonCtx'] = None, min_len: int = 1024, ) -> None: with log.LoggingManager.on_or_off(self.debugging): context = self.context_load(load) # Did we get something? self.assertTrue(context) self.assertIsInstance(context, DataLoadContext) self.assertTrue(context.taxon) self.assertIsInstance(context.taxon, Taxon) self.assertIsInstance(context.taxon, SavedTaxon) self.assertTrue(context.dotted) self.assertEqual(context.dotted, self.dotted) self.assertTrue(context.action) self.assertIsInstance(context.action, DataAction) self.assertEqual(context.action, DataAction.LOAD) # Shouldn't have repo context yet - haven't given it to repo yet. repo_ctx = context.repo_data self.assertFalse(repo_ctx) # Ok; give to repo to load... loaded_stream = self.repo.load(context) self.assertIsNotNone(loaded_stream) # And now the repo context should be there. repo_ctx = context.repo_data self.assertTrue(repo_ctx) self.assertTrue(repo_ctx['meta']) self.assertTrue(repo_ctx['paths']) self.assertIsInstance(repo_ctx['paths'], list) self.assertEqual(len(repo_ctx['paths']), 1) # ....and make sure the path exists. path = paths.cast(repo_ctx['paths'][0]) self.assertTrue(path) # There should be a good amount of data in that file... whatever it is. data = self._helper_data_stream(loaded_stream, min_len) # read file directly, assert contents are same. self._helper_file_contents(path, min_len, data)
def path(self, *input: paths.PathType) -> paths.Path: ''' Takes input path and returns a resolved path. If `input` is Falsy, use the config file's directory as the path. If `input` is absolute, use the `input` as the path. If `input` is relative, assume it is relative to the config file's path, make it absolute, and use it. Resolve path to get rid of "~", "..", etc and return. ''' if not input: return self.path_config_dir() path = paths.cast(*input) if not path.is_absolute(): path = self.path_config_dir() / path path = path.resolve() return path
def _key(self, context: DataBareContext) -> paths.Path: ''' Turns load/save meta-data in the context into a key we can use to retrieve the data. ''' self._log_data_processing(self.dotted, "Getting data from context to " "create key...") if not context.key: self._log_data_processing(self.dotted, "Context must have a key: {}", context.key, context=context, success=False) raise self._log_exception( self._error_type(context), "Context must have a key: {}", context.key, context=context) # Get the path to the file. Should be a full path for # FileBareRepository. key = paths.cast(context.key) # Make sure our 'key' (path) is safe to use and is in the temp dir if # needed. key = self._path(*key.parts, context=context, ensure=True, glob=False) self._log_data_processing(self.dotted, "Created key: {}", key, context=context, success=True) return key
def _configure(self, context: Optional[ConfigContext], require_config: bool = True) -> None: ''' Allows repos to grab anything from the config data that they need to set up themselves. ''' self._log_group_multi(self._LOG_INIT, self.dotted, "FileRepository(Base) configure...") # ------------------------------ # Get Config. # ------------------------------ config = background.config.config(self.klass, self.dotted, context, raises_error=require_config) if null_or_none(config): if not require_config: self._log_group_multi(self._LOG_INIT, self.dotted, "Config not required and is Null/None.", log_minimum=log.Level.DEBUG) else: self._log_group_multi(self._LOG_INIT, self.dotted, "Config required and is Null/None!", log_minimum=log.Level.ERROR, log_success=False) msg = (f"{self.klass}: " "Configuration required, but found Null/None!") raise background.config.exception(context, msg) # ------------------------------ # Game ID # ------------------------------ # Grab our primary id from the context too. self._primary_id = ConfigContext.id(context) self._log_group_multi(self._LOG_INIT, self.dotted, "Set primary-id to: {}", self._primary_id, log_minimum=log.Level.DEBUG) # ------------------------------ # Paths # ------------------------------ self._log_group_multi(self._LOG_INIT, self.dotted, "Setting up paths...", self._primary_id, log_minimum=log.Level.DEBUG) # --- # Path Safing # --- self._init_path_safing(context, require_config) # --- # Repo Paths # --- # Start at ConfigContext's path... self._root = ConfigContext.path(context) # ...and then it depends. if not require_config: # No config required. So the root is.... done. self._log_group_multi(self._LOG_INIT, self.dotted, "No config required; set root to " "context path: {}", self._root, log_minimum=log.Level.DEBUG) else: # We have a config. So add config's repo path on top of it (in case # it's a relative path (pathlib is smart enough to correctly handle # when it's not)). self._root = self._root / paths.cast( config.get_data(*self._PATH_KEYCHAIN)) # Resolve it to turn into absolute path and remove ".."s and stuff. self._root = self._root.resolve() self._log_group_multi(self._LOG_INIT, self.dotted, "Set root based on context and config: {}", self._root, log_minimum=log.Level.DEBUG) # Now we can set the temp root based on root. self._root_temp = self._path_temp() self._log_group_multi(self._LOG_INIT, self.dotted, "Set root-temp to: {}", self._root_temp, log_minimum=log.Level.DEBUG) # ------------------------------ # Background # ------------------------------ # Add our data to the background context. self._make_background() self._log_group_multi(self._LOG_INIT, self.dotted, "Made background data.", log_minimum=log.Level.DEBUG) # ------------------------------ # Done. # ------------------------------ self._log_group_multi(self._LOG_INIT, self.dotted, "FileRepository._configure() completed.", log_minimum=log.Level.DEBUG)
def _path_temp( self, path_non_temp: Optional[paths.PathType] = None, context: Optional['VerediContext'] = None, raise_errors: bool = True, ) -> Nullable[paths.Path]: ''' Returns a path to either our temp directory, a path /in/ our temp directory, or Null(). ''' self._log_data_processing(self.dotted, "Get temp path for non-temp: {}...", paths.to_str(path_non_temp), context=context) path_non_temp = (paths.cast(path_non_temp) if path_non_temp else None) path_temp = None # ------------------------------ # No `self.root` Cases: # ------------------------------ # No root is possible for some FileRepositories... # FileBareRepository was like that for a long time. if not self.root(): # ------------------------------ # Invalid. # ------------------------------ # No root and no input? Gonna have a bad time. if not path_non_temp: msg = "Cannot make a temp path: no root and no path provided." self._log_data_processing(self.dotted, msg + "root: {}, path: {}", paths.to_str(self.root()), paths.to_str(path_non_temp), context=context, success=False) if raise_errors: error = self._error_type(context)( msg, data={ 'root': paths.to_str(self.root()), 'path': paths.to_str(path_non_temp), }) raise self._log_exception(error, msg, context=context) self._log_warning(msg + "root: {}, path: {}", paths.to_str(self.root()), paths.to_str(path_non_temp), context=context) return Null() # No root and input is relative? Can't be sure it's valid so don't # return anything. if not path_non_temp.is_absolute(): msg = ("Cannot make a temp path: no root and provided path " "is not absolute.") self._log_data_processing(self.dotted, msg + "root: {}, path: {}", str(self.root()), paths.to_str(path_non_temp), context=context, success=False) if raise_errors: error = self._error_type(context)( msg, data={ 'root': self.root(), 'path': paths.to_str(path_non_temp), 'absolute?': path_non_temp.is_absolute(), }) raise self._log_exception(error, msg, context=context) else: self._log_warning(msg + "root: {}, path: {}", str(self.root()), paths.to_str(path_non_temp), context=context) return Null() # Otherwise, we have no root and an absolute path. Best we can do # is make sure the temp dir is in there somewhere? So... complain # as well. if self._TEMP_PATH not in path_non_temp.parts: msg = ("Cannot create a temp path when we have no repository " f"root and '{self._TEMP_PATH}' is not in input: " f"{path_non_temp}") self._log_data_processing(self.dotted, msg, context=context, success=False) if raise_errors: error = self._error_type(context)( msg, data={ 'root': self.root(), 'path': paths.to_str(path_non_temp), 'absolute?': path_non_temp.is_absolute(), }) raise self._log_exception(error, msg, context=context) else: self._log_warning(msg, context=context) return Null() # ------------------------------ # Valid? # ------------------------------ # Ok; We have: # 1) No root. # 2) Absolute input path. # 3) Input path with one `parts` being `self._TEMP_PATH`. # So... just send it back? path_temp = path_non_temp self._log_data_processing(self.dotted, "Temp Path is (root-less): {}", paths.to_str(path_temp), context=context, success=False) # ------------------------------ # Normal/Expected Cases (w/ `self.root()`): # ------------------------------ # We do have a root. Use it if the provided path is relative. # Nothing requested? elif not path_non_temp: # Provide the temp dir itself... path_temp = self.root() / self._TEMP_PATH self._log_data_processing(self.dotted, "Temp Path is (rooted default): {}", paths.to_str(path_temp), context=context, success=False) # Specific path requested. else: path = path_non_temp # Make sure it's relative so it can be in our repo. if path.is_absolute(): # Let this raise a ValueError if path isn't relative to root. path = path.relative_to(self.root()) # It should have our `_TEMP_PATH` in it. path = (path if self._TEMP_PATH in path.parts else (self._TEMP_PATH / path)) # And it should be rooted in the repo. path_temp = self.root() / path self._log_data_processing(self.dotted, "Temp Path is (rooted specific): {}", paths.to_str(path_temp), context=context, success=False) # ------------------------------ # Done! # ------------------------------ return path_temp
def _ext_glob(self, element: paths.PathType) -> paths.Path: '''Concatenates extensions glob onto paths.Path/str.''' # Convert to a path, then adjust suffix. path = paths.cast(element) return path.with_suffix(".*")
def _ignore_dir(log_dotted: label.DotStr, path: paths.PathType, ignores: Set[Union[str, re.Pattern]]) -> bool: ''' Checks if the directory `path_relative` (relative to `path_root`), should be ignored or not according to the ignore set and import lists. Don't call this for the root - you should not ignore that. ''' # Match type. matched_on = None # Match from `ignores` - str itself or re.Pattern's pattern string. matching = None # What matched? dir_name for str; regex search result for re.Pattern. matched = None # Need only the dir name for a string comparison; also want it for logs. dir_name = paths.cast(path).stem # Return Value: Should it be ignored or not? ignore = False # ------------------------------ # Check list of explicit ignores. # ------------------------------ for check in ignores: # Can check strings or regexs, so which one is this? if isinstance(check, str): # Ignore only if full match. if check == dir_name: ignore = True matched_on = "string" matching = check matched = dir_name elif isinstance(check, re.Pattern): # Need a string of full path to do our regex comparisons. full_path = str(path) # Ignore if regex /does/ matches. match = check.search(full_path) if match: ignore = True matched_on = "regex" matching = check.pattern matched = match.groups() # If we've found a reason to ignore this, quit early. if ignore: break # ------------------------------ # Result? # ------------------------------ if log.will_output(*_LOG_INIT): if ignore: log.group_multi(_LOG_INIT, log_dotted, "Ignoring Directory:\n" " path: {}\n" " directory: {}\n" " ignore type: {}\n" " ignore match: {}\n" " matched on: {}", path, dir_name, matched_on, matching, matched, log_minimum=log.Level.DEBUG) else: log.group_multi(_LOG_INIT, log_dotted, "Directory To Scan:\n" " path: {}\n" " directory: {}\n", path, dir_name, log_minimum=log.Level.DEBUG) return ignore
def _scan_tree(log_dotted: Optional[label.DotStr], root_name: str, root_path: paths.Path, import_registrars: List[str], import_registrees: List[str], ignore_files: Set[Union[str, re.Pattern]], ignore_dirs: Set[re.Pattern], find_ut: bool) -> Tuple[List[str], List[str], List[str]]: ''' Find the import modules using os's `scandir()`, which is much faster than `pathlib.iterdir()` and `os.walk()`. `iterdir()`: - Just took too long. - Had too many calls to `posix.stat()`. `os.walk()` uses `scandir()`, so it had potential... but: - No way to stop it from walking all of ".git/", or other 'ignore' dirs. - Doesn't return DirEntry, so had to do additional `posix.stat()` to figure out file/dir. ''' # Original idea from https://stackoverflow.com/a/5135444/425816 # But using os.walk, which uses os.scandir, which is much much more # performant than my original pathlib.iterdir attempt. export_registrars = [] export_registrees = [] # Files that somehow got past ignore checks but are also not matching # registrar/registree names. /Should/ never happen... unknowns = [] # Get module info from root path. log.group_multi( _LOG_INIT, log_dotted, "Finding modules...\n" " unit-testing?: {}\n" " module: {}\n" " path: {}\n" " find: \n" " registrars: {}\n" " registrees: {}", find_ut, root_name, root_path, import_registrars, import_registrees) # Start off with the root dir. Append more dir scans as we find valid ones. scans = [ root_path, ] scanned_paths = 0 # Pull the next directory string off of `scans` and do a scan of it for # files/dirs we want. for directory in scans: with os.scandir(directory) as entries: for entry in entries: scanned_paths += 1 # ------------------------------ # Directories # ------------------------------ if (entry.is_dir() and not _ignore_dir(log_dotted, entry.path, ignore_dirs)): # Add to our list of dirs to scan. scans.append(entry.path) continue # ------------------------------ # Files # ------------------------------ # --- # Set-up for checking files. # --- path_relative = paths.cast(entry.path).relative_to(root_path) # --- # Check each module file. # --- if _ignore(log_dotted, root_path, path_relative, ignore_files, import_registrars, import_registrees): continue # Alright; sort this guy into an import list. _sort(log_dotted, root_path, path_relative, import_registrars, import_registrees, export_registrars, export_registrees, unknowns) # --- # Done; log info and return. # --- if log.will_output(log.Group.START_UP): log.group_multi( _LOG_INIT, log_dotted, "Done scanning for modules.\n" " scanned: {}\n" " matches: {}\n", scanned_paths, len(export_registrars) + len(export_registrees)) if export_registrars and log.will_output(log.Group.START_UP): module_log = [] for module in export_registrars: module_log.append(" - " + module) log.group_multi( _LOG_INIT, log_dotted, "Done finding registrar modules.\n" " module: {}\n" " matches: {}\n" "{}", root_name, len(export_registrars), '\n'.join(module_log)) if export_registrees and log.will_output(log.Group.START_UP): module_log = [] for module in export_registrees: module_log.append(" - " + module) log.group_multi( _LOG_INIT, log_dotted, "Done finding registree modules.\n" " module: {}\n" " matches: {}\n" "{}", root_name, len(export_registrees), '\n'.join(module_log)) if unknowns: file_log = [] for file in unknowns: file_log.append(" - " + file) log.group_multi(_LOG_INIT, log_dotted, "Found unknown but matching files?!\n" " module: {}\n" " unknowns: {}\n" "{}", root_name, len(unknowns), '\n'.join(file_log), log_minimum=log.Level.WARNING) return (export_registrars, export_registrees, unknowns)
def do_save_test( self, save: Optional['Test_FileTreeRepo.TaxonCtx'] = None, min_len: int = 1024, ) -> None: # ------------------------------ # Get data to save, first. # ------------------------------ # `save` enum is also good for the load. load_context = self.context_load(save) # Ok; give to repo to load... loaded_stream = self.repo.load(load_context) self.assertIsNotNone(loaded_stream) self.assertTrue(loaded_stream) # There should be a good amount of data in that file... loaded_data = self._helper_data_stream(loaded_stream, min_len) # ------------------------------ # Prepare for the save. # ------------------------------ with log.LoggingManager.on_or_off(self.debugging): context = self.context_save(save, True) # Did we get something? self.assertTrue(context) self.assertIsInstance(context, DataSaveContext) self.assertTrue(context.taxon) self.assertIsInstance(context.taxon, Taxon) self.assertIsInstance(context.taxon, SavedTaxon) self.assertTrue(context.dotted) self.assertEqual(context.dotted, self.dotted) self.assertTrue(context.action) self.assertIsInstance(context.action, DataAction) self.assertEqual(context.action, DataAction.SAVE) # Assuming all saving done in temp for unit test. self.assertTrue(context.temp) # Shouldn't have repo context yet - haven't given it to repo yet. repo_ctx = context.repo_data self.assertFalse(repo_ctx) # ------------------------------ # Save data to root_temp. # ------------------------------ # Ok; give to repo to save... saved = self.repo.save(loaded_stream, context) self.assertTrue(saved) # And now the repo context should be there. repo_ctx = context.repo_data self.assertTrue(repo_ctx) self.assertTrue(repo_ctx['meta']) self.assertTrue(repo_ctx['paths']) self.assertIsInstance(repo_ctx['paths'], list) self.assertEqual(len(repo_ctx['paths']), 1) # ....and make sure the path exists. path = paths.cast(repo_ctx['paths'][0]) self.assertTrue(path) # ------------------------------ # Verify against loaded data. # ------------------------------ # Read our newly saved file directly, assert contents are same as what # we loaded. self._helper_file_contents(path, min_len, loaded_data)
def test_path_temp(self) -> None: # Test out our FileTreeRepository._path_temp() function that we'll be # using in test_save(). self.assertTrue(self.repo) # Temp paths shouldn't be set-up just yet... self.assertFalse(self.root_temp) self.assertFalse(self.path_temp_file) self.assertFalse(self.path_temp) # So set the up. self._set_temp_paths() self.assertTrue(self.root_temp) self.assertTrue(self.path_temp_file) self.assertTrue(self.path_temp) # Don't care about this file name... Anything will do. path_in = "jeff.file-does-not-exist.txt" # Tree repo does have a root to start with now, and it should be our # root. self.assertTrue(self.repo.root()) self.assertEqual(self.repo.root(), self.root) # ------------------------------ # Success Cases: Rooted # ------------------------------ # --- # The more expected use-case: when the repo actually has a root. # --- # Ask for the temp dir. temp_path = self.repo._path_temp() expected_path = self.root / self.repo._TEMP_PATH self.assertEqual(temp_path, expected_path) # Ask for a path to be converted. temp_path = self.repo._path_temp(path_in) expected_path = self.root / self.repo._TEMP_PATH / path_in self.assertEqual(temp_path, expected_path) # Ask for a temp path when you happen to already have one. path_in_temp = paths.cast(self.repo._TEMP_PATH) / path_in temp_path = self.repo._path_temp(path_in_temp) self.assertEqual(temp_path, expected_path) # --- # `test_save()` troubles check: # --- # Ask for temp paths that were giving us trouble in `test_save()`. # Was getting ".../<temp_dir>/<temp_dir>/..." path_in_temp = self.repo._path_temp(self.filename) no_inception_path = self.repo._path_temp(path_in_temp) # Already in temp, so shouldn't've been changed. self.assertEqual(path_in_temp, no_inception_path) # Was getting non-temp filepath when asking to save non-temp filepath # as temp. path_in_repo = self.path_file # Expect it to be redirected to temp. expected = self.root_temp / path_in_repo.relative_to(self.root) path_in_temp = self.repo._path_temp(path_in_repo) self.assertNotEqual(path_in_repo, path_in_temp) self.assertEqual(path_in_temp, expected) # ------------------------------ # Success Cases: NO ROOT! # ------------------------------ # Tree repo does have a root... # But we want to pretend it doesn't for some error cases. self.assertTrue(self.repo.root()) self.assertEqual(self.repo.root(), self.root) self.repo._root = None # An absolute path that just so happens to have some directory named # the right thing? Ok... root_in = paths.cast("/somewhere/with/a", self.repo._TEMP_PATH, "dir") temp_path = self.repo._path_temp(root_in / path_in) self.assertEqual(temp_path, root_in / path_in) # ------------------------------ # Error Cases: NO ROOT! # ------------------------------ # Don't print all the log exceptions, please. with log.LoggingManager.disabled(): with self.assertRaises((VerediError, LoadError, SaveError)): # No root, no input... yes exception. self.repo._path_temp() with self.assertRaises((VerediError, LoadError, SaveError)): # No root, not absolute - exception self.repo._path_temp(path_in) with self.assertRaises((VerediError, LoadError, SaveError)): # No root, absolute w/o temp-dir in it - exception root_in = paths.cast("/no/dir/which/shall/not/be/named") self.repo._path_temp(root_in / path_in)