def testUpdateEntities(validBidsI): # Ensure an append updates the run's entities to the full set entities = { key: validBidsI.getEntities()[key] for key in ['subject', 'task'] } run = BidsRun(**entities) assert run._entities != validBidsI.getEntities() run.appendIncremental(validBidsI) assert run._entities == validBidsI.getEntities() # Ensure minimally supplied entities are still sufficient to block a # non-matching run and doesn't update the run's entities entities = { key: validBidsI.getEntities()[key] for key in ['subject', 'task'] } run = BidsRun(**entities) preAppendEntities = run._entities assert preAppendEntities != validBidsI.getEntities() validBidsI.setMetadataField('subject', 'nonValidSubject') with pytest.raises(MetadataMismatchError): run.appendIncremental(validBidsI) assert run._entities == preAppendEntities
def testAppendConflictingMetadata(oneImageBidsI): bidsInc2 = BidsIncremental(oneImageBidsI.image, oneImageBidsI.getImageMetadata()) bidsInc2.setMetadataField('subject', 'definitely_invalid_name') run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(bidsInc2) # Append should work now with validateAppend turned off numIncrementalsBefore = run.numIncrementals() run.appendIncremental(bidsInc2, validateAppend=False) assert run.numIncrementals() == (numIncrementalsBefore + 1)
def testEq(oneImageBidsI): run1 = BidsRun() run2 = BidsRun() run1.appendIncremental(oneImageBidsI) assert run1 != run2 run2.appendIncremental(oneImageBidsI) assert run1 == run2 run2._entities['subject'] = "new_subect" assert run1 != run2
def testGetAppendIncremental(oneImageBidsI): run = BidsRun() run.appendIncremental(oneImageBidsI) assert run.getIncremental(0) == oneImageBidsI NUM_APPENDS = 20 for i in range(1, NUM_APPENDS): run.appendIncremental(oneImageBidsI) assert run.getIncremental(i) == oneImageBidsI
def testAppendConflictingNiftiHeaders(oneImageBidsI, imageMetadata): # Change the pixel dimensions (zooms) to make the image append-incompatible image2 = nib.Nifti1Image(oneImageBidsI.image.dataobj, oneImageBidsI.image.affine, oneImageBidsI.image.header) new_data_shape = tuple(i * 2 for i in image2.header.get_zooms()) image2.header.set_zooms(new_data_shape) bidsInc2 = BidsIncremental(image2, imageMetadata) run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(bidsInc2) # Append should work now with validateAppend turned off numIncrementalsBefore = run.numIncrementals() run.appendIncremental(bidsInc2, validateAppend=False) assert run.numIncrementals() == (numIncrementalsBefore + 1)
def testAppendConflictingReadme(oneImageBidsI, sampleBidsEntities): run = BidsRun() run.appendIncremental(oneImageBidsI) oneImageBidsI.readme += "totally new data" with pytest.raises(MetadataMismatchError): run.appendIncremental(oneImageBidsI) assert oneImageBidsI != run.getIncremental(0)
def testAppendConflictingDatasetDescription(oneImageBidsI, sampleBidsEntities): run = BidsRun() run.appendIncremental(oneImageBidsI) oneImageBidsI.datasetDescription["newKey"] = "totally new data" with pytest.raises(MetadataMismatchError): run.appendIncremental(oneImageBidsI) assert oneImageBidsI != run.getIncremental(0)
def testAppendBidsRun(tmpdir, bidsArchive4D, bidsArchiveMultipleRuns, sampleBidsEntities): archivePath = Path(tmpdir, "appendBidsRunArchive") archive = BidsArchive(archivePath) emptyRun = BidsRun() archive.appendBidsRun(emptyRun) run = bidsArchive4D.getBidsRun(**sampleBidsEntities) archive.appendBidsRun(run) assert archive.getBidsRun(**sampleBidsEntities) == run
def doRuns(cfg, bidsInterface, subjInterface, webInterface): """ Process a run of a bids dataset. The subject and run configuration values will be supplied by the cfg parameter. Args: cfg: configurations parsed from the project toml config file bidsInterface: client interface to bids data webInterface: client interface to user web page Returns: no return value """ subject = cfg.subjectName run = cfg.runNum[0] entities = { 'subject': subject, 'run': run, 'suffix': 'bold', 'datatype': 'func' } webInterface.clearRunPlot(run) if cfg.writeBidsArchive is True: # Create a new bids archive from the incrementals bidsArchivePath = os.path.join(tmpDir, 'bids_archive_' + uuid.uuid4().hex) print(f'BIDS Archive will be written to {bidsArchivePath}') newArchive = BidsArchive(bidsArchivePath) newRun = BidsRun(**entities) # Initialize the bids stream streamId = bidsInterface.initOpenNeuroStream(cfg.dsAccessionNumber, **entities) numVols = bidsInterface.getNumVolumes(streamId) for idx in range(numVols): bidsIncremental = bidsInterface.getIncremental(streamId, idx) if cfg.writeBidsArchive is True: newRun.appendIncremental(bidsIncremental) imageData = bidsIncremental.getImageData() avg_niftiData = numpy.mean(imageData) print("| average activation value for TR %d is %f" % (idx, avg_niftiData)) webInterface.plotDataPoint(run, idx, float(avg_niftiData)) if cfg.writeBidsArchive is True: newArchive.appendBidsRun(newRun)
def testConstruction(oneImageBidsI, sampleBidsEntities): runWithoutEntities = BidsRun() assert runWithoutEntities is not None assert len(runWithoutEntities.getRunEntities()) == 0 runWithEntities = BidsRun(**sampleBidsEntities) assert runWithEntities is not None assert runWithEntities.getRunEntities() == sampleBidsEntities
def testNumIncrementals(oneImageBidsI): run = BidsRun() assert run.numIncrementals() == 0 NUM_APPENDS = 20 for i in range(NUM_APPENDS): run.appendIncremental(oneImageBidsI) assert run.numIncrementals() == i + 1
def appendBidsRun(self, run: BidsRun) -> None: """ Append a BIDS Run to this archive. Args: run: Run to append to the archvie. Examples: >>> archive1 = BidsArchive('/tmp/dataset1') >>> archive2 = BidsArchive('/tmp/dataset2') >>> archive1.getRuns() [1, 2] >>> archive2.getRuns() [1] >>> run2 = archive1.getBidsRun(subject='01', task='test', run=2) >>> archive2.appendBidsRun(run2) >>> archive2.getRuns() [1, 2] """ if run.numIncrementals() == 0: return self._appendIncremental(run.asSingleIncremental())
def testAppendConflictingEntities(oneImageBidsI): differentBidsInc = BidsIncremental(oneImageBidsI.image, oneImageBidsI.getImageMetadata()) differentBidsInc.setMetadataField("subject", "new-subject") run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(differentBidsInc)
def testAsSingleIncremental(oneImageBidsI): run = BidsRun() assert run.asSingleIncremental() is None NUM_APPENDS = 5 for i in range(NUM_APPENDS): run.appendIncremental(oneImageBidsI) oldImage = oneImageBidsI.image imageData = getNiftiData(oldImage) newDataShape = imageData.shape[:3] + (NUM_APPENDS, ) newData = np.zeros(newDataShape, dtype=imageData.dtype) for i in range(NUM_APPENDS): newData[..., i] = imageData[..., 0] newImage = oldImage.__class__(newData, oldImage.affine, oldImage.header) consolidatedBidsI = BidsIncremental(newImage, oneImageBidsI.getImageMetadata()) assert run.asSingleIncremental() == consolidatedBidsI
def testAppendSetEntities(oneImageBidsI, sampleBidsEntities): run = BidsRun() run.appendIncremental(oneImageBidsI) assert run.getRunEntities() == sampleBidsEntities
def testAppendEmptyIncrementals(oneImageBidsI, sampleBidsEntities): run = BidsRun(**sampleBidsEntities) run.appendIncremental(oneImageBidsI) assert run.numIncrementals() == 1
def testGetOutOfBounds(oneImageBidsI): run = BidsRun() NUM_APPENDS = 10 for i in range(NUM_APPENDS): run.appendIncremental(oneImageBidsI) # This is inbounds due to how negative indexing works assert run.getIncremental(0) == run.getIncremental(-1 * NUM_APPENDS) with pytest.raises(IndexError): run.getIncremental(NUM_APPENDS) with pytest.raises(IndexError): run.getIncremental(NUM_APPENDS + 1) with pytest.raises(IndexError): run.getIncremental(-1 * NUM_APPENDS - 1)
def testAppendConflictingEvents(oneImageBidsI, sampleBidsEntities): run = BidsRun() run.appendIncremental(oneImageBidsI) # This should work, as the previous events file is empty so any new data # shouldn't have a conflict newEventsData = [{key: data for key in DEFAULT_EVENTS_HEADERS} for data in range(1)] oneImageBidsI.events = oneImageBidsI.events.append(newEventsData, ignore_index=True) run.appendIncremental(oneImageBidsI) # This should also work, as they share the same starting row, and the new # DataFrame just has 5 additional rows newRowCount = 5 newEventsData = [{key: data for key in DEFAULT_EVENTS_HEADERS} for data in range(1, newRowCount + 1)] oneImageBidsI.events = oneImageBidsI.events.append(newEventsData, ignore_index=True) run.appendIncremental(oneImageBidsI) # This should fail, as rows for same onset times have different values now with pytest.raises(MetadataMismatchError): oneImageBidsI.events.iloc[int(newRowCount / 2):] += 1 run.appendIncremental(oneImageBidsI) assert oneImageBidsI != run.getIncremental(0)
def getBidsRun(self, **entities) -> BidsRun: """ Get a BIDS Run from the archive. Args: entities: Entities defining a run in the archive. Returns: A BidsRun containing all the BidsIncrementals in the specified run. Raises: NoMatchError: If the entities don't match any runs in the archive. QueryError: If the entities match more than one run in the archive. Examples: >>> archive = BidsArchive('/tmp/dataset') >>> run = archive.getBidsRun(subject='01', session='02', task='testTask', run=1) >>> print(run.numIncrementals()) 53 """ images = self.getImages(**entities) if len(images) == 0: raise NoMatchError(f"Found no runs matching entities {entities}") if len(images) > 1: entities = [img.get_entities() for img in images] raise QueryError("Provided entities were not unique to one run; " "try specifying more entities " f" (got runs with these entities: {entities}") else: bidsImage = images[0] niftiImage = bidsImage.get_image() # TODO: Add inheritance processing for higher-level metadata JSON # files, in the style of the below events file inheritance metadata = self.getSidecarMetadata(bidsImage) metadata.pop('extension') # only used in PyBids # This incremental will typically have a 4th (time) dimension > 1 incremental = BidsIncremental(niftiImage, metadata) # Get dataset description, set incremental.datasetDescription = self.getDatasetDescription() # Get README, set with open(self.getReadme().path) as readmeFile: incremental.readme = readmeFile.read() # Get events file, set # Due to inheritance, must find and process all events files the # target image inherits from to create the final events file for # this run # Parse out the events files that the image file inherits from inheritedFiles = [] searchEntities = bidsImage.get_entities() # only want to compare entities, not file type searchEntities.pop('extension', None) searchEntities.pop('suffix', None) allEventsFiles = self.getEvents() for eventFile in allEventsFiles: fileEntities = eventFile.get_entities() # only want to compare entities, not file type fileEntities.pop('extension', None) fileEntities.pop('suffix', None) if all(item in searchEntities.items() for item in fileEntities.items()): inheritedFiles.append(eventFile) # Sort the files by their position in the hierarchy. # Metric: Files with shorter path lengths are higher in the # inheritance hierarchy. inheritedFiles.sort(key=lambda eventsFile: len(eventsFile.path)) # Merge every subsequent events file's DataFrame, in order of # inheritance (from top level to bottom level) # Using a dictionary representation of the DataFrame gives access to # the dict.update() method, which has exactly the desired # combination behavior for inheritance (replace conflicting values # with the new values, keep any non-conflicting values) def mergeEventsFiles(base: dict, eventsFile: BIDSDataFile): # Set DataFrame to be indexed by 'onset' column to ensure # dictionary update changes rows when onsets match dfToAdd = eventsFile.get_df() dfToAdd.set_index('onset', inplace=True, drop=False) base.update(dfToAdd.to_dict(orient='index')) return base eventsDFDict = functools.reduce(mergeEventsFiles, inheritedFiles, {}) eventsDF = pd.DataFrame.from_dict(eventsDFDict, orient='index') # If there's no data in the DataFrame, create the default empty # events file DataFrame if eventsDF.empty: eventsDF = pd.DataFrame(columns=DEFAULT_EVENTS_HEADERS) # Ensure the events file order is the same as presentation/onset # order eventsDF.sort_values(by='onset', inplace=True, ignore_index=True) incremental.events = correctEventsFileDatatypes(eventsDF) run = BidsRun() # appendIncremental will take care of splitting the BidsIncremental # into its component 3-D images run.appendIncremental(incremental, validateAppend=False) return run