def testAppendConflictingEntities(oneImageBidsI): differentBidsInc = BidsIncremental(oneImageBidsI.image, oneImageBidsI.getImageMetadata()) differentBidsInc.setMetadataField("subject", "new-subject") run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(differentBidsInc)
def testMetadataDictionaryIndependence(sample4DNifti1, imageMetadata): incremental = BidsIncremental(sample4DNifti1, imageMetadata) key = 'subject' assert incremental.getMetadataField(key) == imageMetadata[key] old = incremental.getMetadataField(key) imageMetadata[key] = 'a brand-new subject' assert incremental.getMetadataField(key) == old assert incremental.getMetadataField(key) != imageMetadata[key]
def testConstructionMetadataPrecedence(sample4DNifti1, imageMetadata): assert imageMetadata.get('ProtocolName', None) is not None metadata = metadataFromProtocolName(imageMetadata['ProtocolName']) assert len(metadata) > 0 assert metadata.get('run', None) is not None newRunNumber = int(metadata['run']) + 1 imageMetadata['run'] = newRunNumber assert metadata['run'] != imageMetadata['run'] incremental = BidsIncremental(sample4DNifti1, imageMetadata) assert incremental.getMetadataField('run') == newRunNumber
def testAppendConflictingMetadata(oneImageBidsI): bidsInc2 = BidsIncremental(oneImageBidsI.image, oneImageBidsI.getImageMetadata()) bidsInc2.setMetadataField('subject', 'definitely_invalid_name') run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(bidsInc2) # Append should work now with validateAppend turned off numIncrementalsBefore = run.numIncrementals() run.appendIncremental(bidsInc2, validateAppend=False) assert run.numIncrementals() == (numIncrementalsBefore + 1)
def testInvalidConstruction(sample2DNifti1, samplePseudo2DNifti1, sample4DNifti1, imageMetadata): # Test empty image with pytest.raises(TypeError): BidsIncremental(image=None, imageMetadata=imageMetadata) # Test 2-D image with pytest.raises(ValueError) as err: BidsIncremental(image=sample2DNifti1, imageMetadata=imageMetadata) assert "Image must have at least 3 dimensions" in str(err.value) # Test 2-D image masquerading as 4-D image with pytest.raises(ValueError) as err: BidsIncremental(image=samplePseudo2DNifti1, imageMetadata=imageMetadata) assert ( "Image's 3rd (and any higher) dimensions are <= 1, which means " "it is a 2D image; images must have at least 3 dimensions" in str(err.value)) # Test incomplete metadata protocolName = imageMetadata.pop("ProtocolName") for key in BidsIncremental.REQUIRED_IMAGE_METADATA: value = imageMetadata.pop(key) assert not BidsIncremental.isCompleteImageMetadata(imageMetadata) with pytest.raises(MissingMetadataError): BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata) imageMetadata[key] = value imageMetadata["ProtocolName"] = protocolName # Test too-large repetition and echo times for key in ["RepetitionTime", "EchoTime"]: original = imageMetadata[key] imageMetadata[key] = 10**6 with pytest.raises(ValueError): BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata) imageMetadata[key] = original # Test non-image object with pytest.raises(TypeError) as err: notImage = "definitely not an image" BidsIncremental(image=notImage, imageMetadata=imageMetadata) assert ("Image must be one of [nib.Nifti1Image, nib.Nifti2Image, " f"BIDSImageFile (got {type(notImage)})" in str(err.value)) # Test non-functional data with pytest.raises(NotImplementedError) as err: original = imageMetadata['datatype'] invalidType = 'anat' imageMetadata['datatype'] = invalidType BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata) imageMetadata['datatype'] = original assert ("BIDS Incremental for BIDS datatypes other than 'func' is not " f"yet implemented (got '{invalidType}')") in str(err.value)
def testConflictingMetadataAppend(bidsArchive4D, sample4DNifti1, imageMetadata): # Modify metadata in critical way (change the subject) imageMetadata['ProtocolName'] = 'not the same' with pytest.raises(MetadataMismatchError): bidsArchive4D._appendIncremental( BidsIncremental(sample4DNifti1, imageMetadata))
def getIncremental(self, index: int) -> BidsIncremental: """ Returns the incremental in the run at the provided index. Arguments: index: Which image of the run to get (0-indexed) Returns: Incremental at provided index. Raises: IndexError: If index is out of bounds for this run. Examples: >>> print(run.numIncrementals()) 5 >>> inc = run.getIncremental(1) >>> inc2 = run.getIncremental(5) IndexError """ try: dataArray = self._dataArrays[index] image = self._imageKlass(dataArray, self._imageAffine, self._imageHeader) return BidsIncremental(image, self._imageMetadata) except IndexError: raise IndexError(f"Index {index} out of bounds for run with " f"{self.numIncrementals()} incrementals")
def testConflictingNiftiHeaderAppend(bidsArchive4D, sample4DNifti1, imageMetadata): # Modify NIfTI header in critical way (change the datatype) sample4DNifti1.header['datatype'] = 32 # 32=complex, should be uint16=512 with pytest.raises(MetadataMismatchError): bidsArchive4D.appendIncremental(BidsIncremental(sample4DNifti1, imageMetadata))
def testDatasetMetadata(sample4DNifti1, imageMetadata): # Test invalid dataset metadata with pytest.raises(MissingMetadataError): BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata, datasetDescription={"random_field": "doesnt work"}) # Test valid dataset metadata dataset_name = "Test dataset" bidsInc = BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata, datasetDescription={ "Name": dataset_name, "BIDSVersion": "1.0" }) assert bidsInc.getDatasetName() == dataset_name
def testDiskOutput(validBidsI, tmpdir): # Write the archive datasetRoot = os.path.join(tmpdir, "bids-pytest-dataset") validBidsI.writeToDisk(datasetRoot) # Validate the output can be opened by BidsArchive and verified against the # source BIDS-Incremental archive = BidsArchive(datasetRoot) archiveImage = archive.getImages()[0] # Remove pseudo entities to avoid conflict with the validBidsI metadata = archive.getSidecarMetadata(archiveImage, includeEntities=True) for entity in PYBIDS_PSEUDO_ENTITIES: metadata.pop(entity) incrementalFromArchive = BidsIncremental(archiveImage, metadata) assert incrementalFromArchive == validBidsI assert isValidBidsArchive(archive.rootPath) # Try only writing data datasetRoot = os.path.join(tmpdir, "bids-pytest-dataset-2") validBidsI.writeToDisk(datasetRoot, onlyData=True) assert not os.path.exists(os.path.join(datasetRoot, "README")) assert not os.path.exists( os.path.join(datasetRoot, "dataset_description.json"))
def testSerialization(validBidsI, sample4DNifti1, imageMetadata, tmpdir): # Copy the NIfTI source image to a different location sourceFileName = 'test.nii' sourceFilePath = os.path.join(tmpdir, sourceFileName) nib.save(sample4DNifti1, sourceFilePath) sourceNifti = nib.load(sourceFilePath) incremental = BidsIncremental(sourceNifti, imageMetadata) # validBidsI is derived from a file elsewhere on disk, so we can use it as a # reference once the file 'incremental' is derived from is removed # Transitive property gives us: # IF incremental == validBidsI AND validBidsI == deserialized # THEN incremental == deserialized assert incremental == validBidsI # Serialize the object serialized = pickle.dumps(incremental) del incremental # Now remove image file so the deserialized object can't access it os.remove(sourceFilePath) # Deserialize the object deserialized = pickle.loads(serialized) # Compare equality assert validBidsI == deserialized # Check there's no file mapping assert deserialized.image.file_map['image'].filename is None
def getIncremental(self, volIdx=-1) -> BidsIncremental: """ Get a BIDS incremental for the indicated index in the current subject/run VolIdx acts similar to a file_seek pointer. If a volIdx >= 0 is supplied the volume pointer is advanced to that position. If no volIdx or a volIdx < 0 is supplied, then the next image volume after the previous position is returned and the pointer is incremented. Args: volIdx: The volume index (or TR) within the run to retrieve. Returns: BidsIncremental of that volume index within this subject/run """ # TODO - when we have BidsRun # return self.bidsRun.getIncremental(volIdx) if volIdx >= 0: # reset the next volume to the user specified volume self.nextVol = volIdx else: # use the default next volume pass if self.nextVol < self.numVolumes: incremental = BidsIncremental(self.imgVolumes[self.nextVol], self.metadata) self.nextVol += 1 return incremental else: return None
def getIncremental(self, volIdx=-1) -> BidsIncremental: """ Get the BIDS incremental for the corresponding DICOM image indicated by the volIdx, where volIdx is equivalent to TR id. VolIdx acts similar to a file_seek pointer. If a volIdx >= 0 is supplied the volume pointer is advanced to that position. If no volIdx or a volIdx < 0 is supplied, then the next image volume after the previous position is returned and the pointer is incremented. Args: volIdx: The volume index (or TR) within the run to retrieve. Returns: BidsIncremental for the matched DICOM for the run/volume """ if volIdx >= 0: # reset the next volume to the user specified volume self.nextVol = volIdx else: # use the default next volume pass # wait for the dicom and create a bidsIncremental dcmImg = self.dataInterface.getImageData(self.dicomStreamId, self.nextVol) dicomMetadata = getDicomMetadata(dcmImg) dicomMetadata.update(self.entities) niftiImage = convertDicomImgToNifti(dcmImg) incremental = BidsIncremental(niftiImage, dicomMetadata) self.nextVol += 1 return incremental
def incrementAcquisitionValues(incremental: BidsIncremental) -> None: """ Increment the acquisition values in an image metadata dictionary to prepare for append an incremental to an archive built with the same source image. """ trTime = incremental.getMetadataField("RepetitionTime") trTime = 1.0 if trTime is None else float(trTime) fieldToIncrement = {'AcquisitionTime': trTime, 'AcquisitionNumber': 1.0} for field, increment in fieldToIncrement.items(): previousValue = incremental.getMetadataField(field) if previousValue is None: continue else: previousValue = float(previousValue) incremental.setMetadataField(field, previousValue + increment)
def readLocalDicomIncremental(volIdx, entities): # read the incremental locally for test comparison dicomPath = os.path.join(test_sampleProjectDicomPath, "001_000013_{TR:06d}.dcm".format(TR=volIdx)) dicomImg = readDicomFromFile(dicomPath) dicomMetadata = getDicomMetadata(dicomImg) dicomMetadata.update(entities) niftiImg = convertDicomImgToNifti(dicomImg) localIncremental = BidsIncremental(niftiImg, dicomMetadata) return localIncremental
def appendDataMatches(archive: BidsArchive, reference: BidsIncremental, startIndex: int = 0, endIndex: int = -1): entities = filterEntities(reference.getImageMetadata()) images = archive.getImages(**entities) assert len(images) == 1 imageFromArchive = images[0].get_image() fullImageData = getNiftiData(imageFromArchive) if endIndex == -1: endIndex = len(fullImageData) appendedData = fullImageData[..., startIndex:endIndex] appendedImage = nib.Nifti1Image(appendedData, imageFromArchive.affine, imageFromArchive.header) newIncremental = BidsIncremental(appendedImage, reference.getImageMetadata()) return newIncremental == reference
def dataset_Bidsinc(self): """ :param output: the output file that will go to stream :return: BidsIncremental """ image = self.get_image() conf = self.get_conf() subject = self.get_bids_required_info()[0] task = self.get_bids_required_info()[1] suffix = self.get_bids_required_info()[2] metadata = self.get_metadata() return BidsIncremental(image[0], subject, task, suffix, metadata)
def testImageMetadataDictCreation(imageMetadata): createdDict = BidsIncremental.createImageMetadataDict( subject=imageMetadata["subject"], task=imageMetadata["task"], suffix=imageMetadata["suffix"], repetitionTime=imageMetadata["RepetitionTime"], datatype='func') for key in createdDict.keys(): assert createdDict.get(key) == imageMetadata.get(key) # Ensure that the method is in sync with the required metadata # Get all required fields as lowerCamelCase for passing as kwargs requiredFieldsCamel = [(key[0].lower() + key[1:]) for key in BidsIncremental.REQUIRED_IMAGE_METADATA] dummyValue = 'n/a' metadataDict = {key: dummyValue for key in requiredFieldsCamel} createdDict = BidsIncremental.createImageMetadataDict(**metadataDict) for field in BidsIncremental.REQUIRED_IMAGE_METADATA: assert createdDict[field] == dummyValue
def bidsArchiveMultipleRuns(tmpdir, sample4DNifti1, imageMetadata): metadata = imageMetadata.copy() adjustTimeUnits(metadata) archive = archiveWithImage(sample4DNifti1, metadata, tmpdir) metadata = imageMetadata.copy() adjustTimeUnits(metadata) metadata['run'] = int(metadata['run']) + 1 incremental = BidsIncremental(sample4DNifti1, metadata) archive.appendIncremental(incremental) return archive
def dataset_Bidsinc(self, sliceIndex: int = 0): """ :param output: the output file that will go to stream :return: BidsIncremental """ image = self.get_image() conf = self.get_conf() subject = self.get_bids_required_info()[0] task = self.get_bids_required_info()[1] suffix = self.get_bids_required_info()[2] datasetMetadata = self.get_metadata() imageMetadata = {'subject': subject, 'session': self.conf['sessionId'], 'task': task, 'suffix': suffix, 'datatype': self.conf['imageType']} return BidsIncremental(image[sliceIndex],imageMetadata, datasetMetadata),len(image)
def dicomToBidsinc(dicomFile, requiredMetadata: {}) -> BidsIncremental: # TODO(spolcyn): Do this all in memory -- dicom2nifti is promising # Put extra metadata in sidecar JSON file # # NOTE: This is not the final version of this method. # The conversion from DICOM to BIDS-I and gathering all required metadata # can be complex, as DICOM doesn't necessarily have the metadata required # for BIDS in it by default. Thus, another component will handle the logic # and error handling surrounding this. dicomImg = readDicomFromFile(dicomFile) niftiImage = convertDicomImgToNifti(dicomImg) #logger.debug("Nifti header after conversion is: %s", niftiImage.header) publicMeta, privateMeta = getDicomMetadata(dicomImg) publicMeta.update(privateMeta) # combine metadata dictionaries publicMeta.update(requiredMetadata) return BidsIncremental(niftiImage, requiredMetadata, publicMeta)
def testAppendConflictingNiftiHeaders(oneImageBidsI, imageMetadata): # Change the pixel dimensions (zooms) to make the image append-incompatible image2 = nib.Nifti1Image(oneImageBidsI.image.dataobj, oneImageBidsI.image.affine, oneImageBidsI.image.header) new_data_shape = tuple(i * 2 for i in image2.header.get_zooms()) image2.header.set_zooms(new_data_shape) bidsInc2 = BidsIncremental(image2, imageMetadata) run = BidsRun() run.appendIncremental(oneImageBidsI) with pytest.raises(MetadataMismatchError): run.appendIncremental(bidsInc2) # Append should work now with validateAppend turned off numIncrementalsBefore = run.numIncrementals() run.appendIncremental(bidsInc2, validateAppend=False) assert run.numIncrementals() == (numIncrementalsBefore + 1)
def stream(self): """ :param output: the output file that will go to stream :return: BidsIncremental """ image = self.get_image() conf = self.get_conf() subject = self.get_bids_required_info()[0] task = self.get_bids_required_info()[1] suffix = self.get_bids_required_info()[2] datasetMetadata = self.get_metadata() imageMetadata = {'subject': subject, 'session': self.conf['sessionId'], 'task': task, 'suffix': suffix, 'datatype': self.conf['imageType']} for img in image: incremental = BidsIncremental(img, imageMetadata, datasetMetadata) print(incremental) time.sleep(.2)
def dicomToBidsInc(dicomImg: pydicom.dataset.Dataset, extraMetadata: dict = {}) -> BidsIncremental: # TODO(spolcyn): Do this all in memory -- dicom2nifti is promising # Currently, there are 4 disk operations: # 1) Read DICOM (by dcm2niix) # 2) Write NIfTI # 3) Read NIfTI # 4) Read DICOM (for metadata) # NOTE: This is not the final version of this method. # The conversion from DICOM to BIDS-I and gathering all required metadata # can be complex, as DICOM doesn't necessarily have the metadata required # for BIDS in it by default. Thus, another component should handle the logic # and error handling surrounding this. niftiImage = convertDicomImgToNifti(dicomImg) metadata = getDicomMetadata(dicomImg) metadata.update(extraMetadata) return BidsIncremental(image=niftiImage, imageMetadata=metadata)
def testAsSingleIncremental(oneImageBidsI): run = BidsRun() assert run.asSingleIncremental() is None NUM_APPENDS = 5 for i in range(NUM_APPENDS): run.appendIncremental(oneImageBidsI) oldImage = oneImageBidsI.image imageData = getNiftiData(oldImage) newDataShape = imageData.shape[:3] + (NUM_APPENDS, ) newData = np.zeros(newDataShape, dtype=imageData.dtype) for i in range(NUM_APPENDS): newData[..., i] = imageData[..., 0] newImage = oldImage.__class__(newData, oldImage.affine, oldImage.header) consolidatedBidsI = BidsIncremental(newImage, oneImageBidsI.getImageMetadata()) assert run.asSingleIncremental() == consolidatedBidsI
def asSingleIncremental(self) -> BidsIncremental: """ Coalesces the entire run into a single BIDS-I that can be sent over a network, written to disk, or added to an archive. Returns: BidsIncremental with all image data and metadata represented by the incrementals composing the run, or None if the run is empty. Examples: >>> incremental = run.asSingleIncremental() >>> incremental.writeToDisk('/tmp/new_dataset') """ if self.numIncrementals() == 0: return None numIncrementals = self.numIncrementals() newImageShape = self._imageHeader.get_data_shape()[:3] + \ (numIncrementals,) # It is critical to set the dtype of the array according to the source # image's dtype. Without doing so, int data may be cast to float (the # numpy default type for a new array), which Nibabel will then write # float data to disk using the NIfTI scl_scope header scaling field. # This procedure almost always results in less precision than offered by # the original ints, which means images at either end of a round-trip # (read image data/put image data in numpy array/save image data/read # image from disk) will have arrays with slightly different values. # # Also, note that pre-declaring the array and then using it as the out # array is substantially faster than just letting np.stack create and # return a new array, as of this writing (~60% faster on one test) newDataArray = np.empty(newImageShape, order='F', dtype=self._dataArrays[0].dtype) np.stack(self._dataArrays, axis=3, out=newDataArray) newImage = self._imageKlass(newDataArray, self._imageAffine, self._imageHeader) return BidsIncremental(newImage, self._imageMetadata)
def testGetIncremental(bidsArchive4D, sample3DNifti1, sample4DNifti1, imageMetadata): """ TODO(spolcyn): Support anatomical archives # 3D Case reference = BidsIncremental(sample3DNifti1, imageMetadata) incremental = bidsArchive3D.getIncremental( subject=imageMetadata["subject"], task=imageMetadata["task"], suffix=imageMetadata["suffix"], datatype="anat", session=imageMetadata["session"]) # 3D image still results in 4D incremental assert len(incremental.imageDimensions) == 4 assert incremental.imageDimensions[3] == 1 assert incremental == reference """ # 4D Case # Both the first and second image in the 4D archive should be identical reference = BidsIncremental(sample3DNifti1, imageMetadata) for index in range(0, 2): incremental = bidsArchive4D.getIncremental( subject=imageMetadata["subject"], task=imageMetadata["task"], suffix=imageMetadata["suffix"], datatype="func", imageIndex=index, session=imageMetadata["session"]) assert len(incremental.imageDimensions) == 4 assert incremental.imageDimensions[3] == 1 assert incremental == reference
def testValidConstruction(sample3DNifti1, sample3DNifti2, sample4DNifti1, sampleNifti2, bidsArchive4D, imageMetadata): # 3-D should be promoted to 4-D assert BidsIncremental(sample3DNifti1, imageMetadata) is not None assert BidsIncremental(sample3DNifti2, imageMetadata) is not None # Both Nifti1 and Nifti2 images should work assert BidsIncremental(sample4DNifti1, imageMetadata) is not None assert BidsIncremental(sampleNifti2, imageMetadata) is not None # If the metadata provides a RepetitionTime or EchoTime that works without # adjustment, the construction should still work repetitionTimeKey = "RepetitionTime" original = imageMetadata[repetitionTimeKey] imageMetadata[repetitionTimeKey] = 1.5 assert BidsIncremental(sample4DNifti1, imageMetadata) is not None imageMetadata[repetitionTimeKey] = original # Passing a BIDSImageFile is also valid image = bidsArchive4D.getImages()[0] assert type(image) is BIDSImageFile assert BidsIncremental(image, imageMetadata) is not None
def validBidsI(sample4DNifti1, imageMetadata): """ Constructs and returns a known-valid BIDS-Incremental using known metadata. """ return BidsIncremental(image=sample4DNifti1, imageMetadata=imageMetadata)
def testEquals(sample4DNifti1, sample3DNifti1, imageMetadata): # Test images with different headers assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(sample3DNifti1, imageMetadata) # Test images with the same header, but different data newData = 2 * getNiftiData(sample4DNifti1) reversedNifti1 = nib.Nifti1Image(newData, sample4DNifti1.affine, header=sample4DNifti1.header) assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(reversedNifti1, imageMetadata) # Test different image metadata modifiedImageMetadata = deepcopy(imageMetadata) modifiedImageMetadata["subject"] = "newSubject" assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(sample4DNifti1, modifiedImageMetadata) # Test different dataset metadata datasetMeta1 = {"Name": "Dataset_1", "BIDSVersion": "1.0"} datasetMeta2 = {"Name": "Dataset_2", "BIDSVersion": "2.0"} assert BidsIncremental(sample4DNifti1, imageMetadata, datasetMeta1) != \ BidsIncremental(sample4DNifti1, imageMetadata, datasetMeta2) # Test different readme incremental1 = BidsIncremental(sample4DNifti1, imageMetadata) incremental2 = BidsIncremental(sample4DNifti1, imageMetadata) readme1 = "README 1" readme2 = "README 2" incremental1.readme = readme1 incremental2.readme = readme2 assert incremental1 != incremental2 # Test different events file incremental1 = BidsIncremental(sample4DNifti1, imageMetadata) incremental2 = BidsIncremental(sample4DNifti1, imageMetadata) events1 = { 'onset': [1, 25, 50], 'duration': [10, 10, 10], 'response_time': [15, 36, 70] } events2 = {key: [v + 5 for v in events1[key]] for key in events1.keys()} incremental1.events = pd.DataFrame(data=events1) incremental2.events = pd.DataFrame(data=events2) assert incremental1 != incremental2