def test_strict_build_path(self): # Test with strict matching--should fail pats = ['[{session}/]{task}/r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz'] entities = {'subject': 1, 'task': "A", 'run': 2} assert build_path(entities, pats, True) entities = {'subject': 1, 'task': "A", 'age': 22} assert not build_path(entities, pats, True)
def test_build_path(self, writable_file): writable_file.tags = { 'task': Tag(None, 'rest'), 'run': Tag(None, '2'), 'subject': Tag(None, '3') } # Single simple pattern with pytest.raises(TypeError): build_path(writable_file.entities) pat = join(writable_file.dirname, '{task}/sub-{subject}/run-{run}.nii.gz') target = join(writable_file.dirname, 'rest/sub-3/run-2.nii.gz') assert build_path(writable_file.entities, pat) == target # Multiple simple patterns pats = [ '{session}/{task}/r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz', '{subject}/{task}.nii.gz' ] pats = [join(writable_file.dirname, p) for p in pats] target = join(writable_file.dirname, 't-rest/3-2.nii.gz') assert build_path(writable_file.entities, pats) == target # Pattern with optional entity pats = [ '[{session}/]{task}/r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz' ] pats = [join(writable_file.dirname, p) for p in pats] target = join(writable_file.dirname, 'rest/r-2.nii.gz') assert build_path(writable_file.entities, pats) == target # Pattern with conditional values pats = [ '{task<func|acq>}/r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz' ] pats = [join(writable_file.dirname, p) for p in pats] target = join(writable_file.dirname, 't-rest/3-2.nii.gz') assert build_path(writable_file.entities, pats) == target # Pattern with valid conditional values pats = [ '{task<func|rest>}/r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz' ] pats = [join(writable_file.dirname, p) for p in pats] target = join(writable_file.dirname, 'rest/r-2.nii.gz') assert build_path(writable_file.entities, pats) == target # Pattern with optional entity with conditional values pats = [ '[{task<func|acq>}/]r-{run}.nii.gz', 't-{task}/{subject}-{run}.nii.gz' ] pats = [join(writable_file.dirname, p) for p in pats] target = join(writable_file.dirname, 'r-2.nii.gz') assert build_path(writable_file.entities, pats) == target
def build_path(self, source, path_patterns=None, strict=False): ''' Constructs a target filename for a file or dictionary of entities. Args: source (str, File, dict): The source data to use to construct the new file path. Must be one of: - A File object - A string giving the path of a File contained within the current Layout. - A dict of entities, with entity names in keys and values in values path_patterns (list): Optional path patterns to use to construct the new file path. If None, the Layout-defined patterns will be used. strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. ''' if isinstance(source, six.string_types): source = self.files[source] if isinstance(source, File): source = source.entities if path_patterns is None: path_patterns = self.path_patterns return build_path(source, path_patterns, strict)
def copy(self, path_patterns, symbolic_link=False, root=None, conflicts='fail'): ''' Copy the contents of a file to a new location, with target filename defined by the current File's entities and the specified path_patterns. ''' new_filename = build_path(self.entities, path_patterns) if not new_filename: return None if new_filename[-1] == os.sep: new_filename += self.filename if symbolic_link: contents = None link_to = self.path else: with open(self.path, 'r') as f: contents = f.read() link_to = None write_contents_to_file(new_filename, contents=contents, link_to=link_to, content_mode='text', root=root, conflicts=conflicts)
def write_contents_to_file(self, entities, path_patterns=None, contents=None, link_to=None, content_mode='text', conflicts='fail', strict=False): """ Write arbitrary data to a file defined by the passed entities and path patterns. Args: entities (dict): A dictionary of entities, with Entity names in keys and values for the desired file in values. path_patterns (list): Optional path patterns to use when building the filename. If None, the Layout-defined patterns will be used. contents (object): Contents to write to the generate file path. Can be any object serializable as text or binary data (as defined in the content_mode argument). conflicts (str): One of 'fail', 'skip', 'overwrite', or 'append' that defines the desired action when the output path already exists. 'fail' raises an exception; 'skip' does nothing; 'overwrite' overwrites the existing file; 'append' adds a suffix to each file copy, starting with 1. Default is 'fail'. strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. """ if not path_patterns: path_patterns = self.path_patterns path = build_path(entities, path_patterns, strict) write_contents_to_file(path, contents=contents, link_to=link_to, content_mode=content_mode, conflicts=conflicts, root=self.root) self._index_file(self.root, path)
def build(self, type, extension): file = build_path( {**self.entities, 'type': type, 'extension': extension}, path_patterns=REPORT_PATHS) outfile = str(self.outdir / file) return outfile, '{}/reports/{}/{}'.format(self.domain, self.hash, file)
def writeout_events(analysis, pes, outdir): """ Writeout predictor_events into BIDS event files """ desc = { 'Name': analysis['hash_id'], 'BIDSVersion': '1.1.1', 'PipelineDescription': { 'Name': 'Neuroscout Events' } } desc_path = (outdir / 'dataset_description.json') paths = [(str(desc_path), 'dataset_description.json')] json.dump(desc, desc_path.open('w')) outdir = outdir / "func" outdir.mkdir(exist_ok=True) # Load events and rename columns to human-readable pes = pd.DataFrame(pes) predictor_names = {p['id']: p['name'] for p in analysis['predictors']} pes.predictor_id = pes.predictor_id.map(predictor_names) # Write out event files for run in analysis.get('runs'): # Write out event files for each run_id run_events = pes[pes.run_id == run['id']].drop('run_id', axis=1) entities = get_entities(run) entities['task'] = analysis['task_name'] out_cols = {} if run_events.empty is False: for name, df in run_events.groupby('predictor_id'): df_col = df.groupby(['onset', 'duration'])['value'].max() df_col = df_col.reset_index().rename(columns={'value': name}) out_cols[name] = df_col # For any columns that don't have events, output n/a file for name in set(predictor_names.values()) - out_cols.keys(): df_col = pd.DataFrame([[0, 0, 'n/a']], columns=['onset', 'duration', name]) out_cols[name] = df_col # Write out files for name, df_col in out_cols.items(): # Write out BIDS path fname = outdir / name / build_path(entities, path_patterns=PATHS) fname.parent.mkdir(exist_ok=True) paths.append((str(fname), 'events/{}/{}'.format(name, fname.name))) df_col.to_csv(fname, sep='\t', index=False) return paths
def build_path(self, source, path_patterns=None, strict=False, domains=None): ''' Constructs a target filename for a file or dictionary of entities. Args: source (str, File, dict): The source data to use to construct the new file path. Must be one of: - A File object - A string giving the path of a File contained within the current Layout. - A dict of entities, with entity names in keys and values in values path_patterns (list): Optional path patterns to use to construct the new file path. If None, the Layout-defined patterns will be used. strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. domains (str, list): Optional name(s) of domain(s) to scan for path patterns. If None, all domains are scanned. If two or more domains are provided, the order determines the precedence of path patterns (i.e., earlier domains will have higher precedence). ''' if isinstance(source, six.string_types): if source not in self.files: source = join(self.root, source) source = self.get_file(source) if isinstance(source, File): source = source.entities if path_patterns is None: if domains is None: domains = list(self.domains.keys()) path_patterns = [] for dom in listify(domains): path_patterns.extend(self.domains[dom].path_patterns) return build_path(source, path_patterns, strict)
def copy(self, path_patterns, symbolic_link=False, root=None, conflicts='fail'): ''' Copy the contents of a file to a new location, with target filename defined by the current File's entities and the specified path_patterns. ''' new_filename = build_path(self.entities, path_patterns) if not new_filename: return None if new_filename[-1] == os.sep: new_filename += self.filename if isabs(self.path) or root is None: path = self.path else: path = join(root, self.path) if not exists(path): raise ValueError("Target filename to copy/symlink (%s) doesn't " "exist." % path) if symbolic_link: contents = None link_to = path else: with open(path, 'r') as f: contents = f.read() link_to = None write_contents_to_file(new_filename, contents=contents, link_to=link_to, content_mode='text', root=root, conflicts=conflicts)
def write_contents_to_file(self, entities, path_patterns=None, contents=None, link_to=None, content_mode='text', conflicts='fail', strict=False, domains=None, index=False, index_domains=None): """ Write arbitrary data to a file defined by the passed entities and path patterns. Args: entities (dict): A dictionary of entities, with Entity names in keys and values for the desired file in values. path_patterns (list): Optional path patterns to use when building the filename. If None, the Layout-defined patterns will be used. contents (object): Contents to write to the generate file path. Can be any object serializable as text or binary data (as defined in the content_mode argument). conflicts (str): One of 'fail', 'skip', 'overwrite', or 'append' that defines the desired action when the output path already exists. 'fail' raises an exception; 'skip' does nothing; 'overwrite' overwrites the existing file; 'append' adds a suffix to each file copy, starting with 1. Default is 'fail'. strict (bool): If True, all entities must be matched inside a pattern in order to be a valid match. If False, extra entities will be ignored so long as all mandatory entities are found. domains (list): List of Domains to scan for path_patterns. Order determines precedence (i.e., earlier Domains will be scanned first). If None, all available domains are included. index (bool): If True, adds the generated file to the current index using the domains specified in index_domains. index_domains (list): List of domain names to attach the generated file to when indexing. Ignored if index == False. If None, All available domains are used. """ if path_patterns: path = build_path(entities, path_patterns, strict) else: path_patterns = [self.path_patterns] if domains is None: domains = list(self.domains.keys()) for dom in domains: path_patterns.append(self.domains[dom].path_patterns) for pp in path_patterns: path = build_path(entities, pp, strict) if path is not None: break if path is None: raise ValueError("Cannot construct any valid filename for " "the passed entities given available path " "patterns.") write_contents_to_file(path, contents=contents, link_to=link_to, content_mode=content_mode, conflicts=conflicts, root=self.root) if index: if index_domains is None: index_domains = list(self.domains.keys()) self._index_file(self.root, path, index_domains)