def _resolve_bids(self, fileobj): layout = BIDSLayout(fileobj.path, absolute_paths=True, validate=False) basemetadata = dict() if hasattr(fileobj, "metadata") and isinstance(fileobj.metadata, dict): basemetadata = fileobj.metadata resolved_files = [] for filepath, obj in layout.get_files().items(): entitydict = obj.get_entities() tags = dict() for k, v in entitydict.items(): entity = entity_shortnames[k] if k in entity_shortnames else k if entity in entities: tags[entity] = str(v) filedict = { "datatype": entitydict.get("datatype"), "suffix": entitydict.get("suffix"), "extension": entitydict.get("extension"), "path": filepath, "tags": tags, "metadata": { **basemetadata, **obj.get_metadata() } } if filedict["extension"] is not None: extension = filedict["extension"] if not extension.startswith("."): filedict["extension"] = f".{extension}" if filedict["datatype"] is None: continue # catch README and dataset_description.json etc # TODO IntendedFor try: resolved_fileobj = file_schema.load(filedict, unknown=EXCLUDE) self.fileobj_by_filepaths[filepath] = resolved_fileobj self.specfileobj_by_filepaths[resolved_fileobj.path] = fileobj resolved_files.append(resolved_fileobj) except marshmallow.exceptions.ValidationError as e: logging.getLogger("halfpipe.ui").warning( f'Ignored validation error for "{filepath}": %s', e, stack_info=False) return resolved_files
def _resolve_bids(self, fileobj: File) -> list[File]: # load using pybids validate = False # save time layout = BIDSLayout( root=fileobj.path, reset_database=True, # force reindex in case files have changed absolute_paths=True, validate=validate, indexer=BIDSLayoutIndexer( validate=validate, index_metadata=False, # save time ), ) # load override metadata basemetadata = dict() if hasattr(fileobj, "metadata"): metadata = getattr(fileobj, "metadata", None) if isinstance(metadata, dict): basemetadata.update(metadata) resolved_files: list[File] = [] for obj in layout.get_files().values(): file: File | None = to_fileobj(obj, basemetadata) if file is None: continue self.fileobj_by_filepaths[file.path] = file self.specfileobj_by_filepaths[file.path] = file resolved_files.append(file) intended_for: dict[str, frozenset[tuple[str, str]]] = dict() for file in resolved_files: if file.datatype != "fmap": continue metadata = SidecarMetadataLoader.load(file.path) if metadata is None: continue intended_for_paths = metadata.get("intended_for") if intended_for_paths is None: continue linked_fmap_tags = frozenset(file.tags.items()) for intended_for_path in intended_for_paths: intended_for[intended_for_path] = linked_fmap_tags informed_by: dict[frozenset[tuple[str, str]], list[frozenset[tuple[str, str]]]] = defaultdict(list) for file in resolved_files: file_tags = frozenset(file.tags.items()) for file_path, linked_fmap_tags in intended_for.items(): if file.path.endswith(file_path): # slow performance informed_by[file_tags].append(linked_fmap_tags) mappings: set[tuple[tuple[str, str], tuple[str, str]]] = set() for func_tags, linked_fmap_tags_list in informed_by.items(): for linked_fmap_tags in linked_fmap_tags_list: for func_tag, linked_fmap_tag in product( func_tags, linked_fmap_tags): if func_tag[0] == "sub" or linked_fmap_tag[0] == "sub": continue if (func_tag[0] == linked_fmap_tag[0] ): # only map between different entities continue mappings.add((func_tag, linked_fmap_tag)) intended_for_rules = defaultdict(list) for functag, fmaptag in mappings: entity, val = functag funcstr = f"{entity}.{val}" entity, val = fmaptag fmapstr = f"{entity}.{val}" intended_for_rules[fmapstr].append(funcstr) if len(intended_for) > 0: logger.info( "Inferred mapping between func and fmap files to be %s", pformat(intended_for_rules), ) for file in resolved_files: if file.datatype != "fmap": continue file.intended_for = intended_for_rules return resolved_files