def testGetImages(bidsArchive4D, sample4DNifti1, bidsArchiveMultipleRuns, imageMetadata): entities = ['subject', 'task', 'session'] dataDict = {key: imageMetadata[key] for key in entities} archiveImages = bidsArchive4D.getImages(**dataDict, matchExact=False) assert len(archiveImages) == 1 archiveImage = archiveImages[0].get_image() assert archiveImage.header == sample4DNifti1.header assert np.array_equal(getNiftiData(archiveImage), getNiftiData(sample4DNifti1)) # Exact match requires set of provided entities and set of entities in a # filename to be exactly the same (1-1 mapping); since 'run' isn't provided, # an exact match will fail for the multiple runs archive, which has files # with the 'run' entity, but will succeed for a non-exact matching, as the # provided entities match a subset of the file entities archiveImages = bidsArchiveMultipleRuns.getImages(**dataDict, matchExact=True) assert archiveImages == [] matchingDict = dataDict.copy() matchingDict.update({'datatype': 'func', 'suffix': 'bold', 'run': 1}) archiveImages = bidsArchiveMultipleRuns.getImages(**matchingDict, matchExact=True) assert archiveImages != [] archiveImages = bidsArchiveMultipleRuns.getImages(**dataDict, matchExact=False) assert archiveImages != [] assert len(archiveImages) == 2
def testQueryNifti(validBidsI): # Image data queriedData = validBidsI.getImageData() exactData = getNiftiData(validBidsI.image) assert np.array_equal(queriedData, exactData), "{} elements not equal" \ .format(np.sum(np.where(queriedData != exactData))) # Header Data queriedHeader = validBidsI.getImageHeader() exactHeader = validBidsI.image.header # Compare full image header assert queriedHeader.keys() == exactHeader.keys() for (field, queryValue) in queriedHeader.items(): exactValue = exactHeader.get(field) if queryValue.dtype.char == 'S': assert queryValue == exactValue else: assert np.allclose(queryValue, exactValue, atol=0.0, equal_nan=True) # Compare Header field: Dimensions FIELD = "dim" assert np.array_equal(queriedHeader.get(FIELD), exactHeader.get(FIELD))
def sample2DNifti1(): nifti = readNifti(test_3DNifti1Path) newData = getNiftiData(nifti) # max positive value of 2 byte, signed short used in Nifti header for # storing dimension information newData = (newData.flatten()[:10000]).reshape((100, 100)) return nib.Nifti1Image(newData, nifti.affine)
def testEquals(sample4DNifti1, sample3DNifti1, imageMetadata): # Test images with different headers assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(sample3DNifti1, imageMetadata) # Test images with the same header, but different data newData = 2 * getNiftiData(sample4DNifti1) reversedNifti1 = nib.Nifti1Image(newData, sample4DNifti1.affine, header=sample4DNifti1.header) assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(reversedNifti1, imageMetadata) # Test different image metadata modifiedImageMetadata = deepcopy(imageMetadata) modifiedImageMetadata["subject"] = "newSubject" assert BidsIncremental(sample4DNifti1, imageMetadata) != \ BidsIncremental(sample4DNifti1, modifiedImageMetadata) # Test different dataset metadata datasetMeta1 = {"Name": "Dataset_1", "BIDSVersion": "1.0"} datasetMeta2 = {"Name": "Dataset_2", "BIDSVersion": "2.0"} assert BidsIncremental(sample4DNifti1, imageMetadata, datasetMeta1) != \ BidsIncremental(sample4DNifti1, imageMetadata, datasetMeta2) # Test different readme incremental1 = BidsIncremental(sample4DNifti1, imageMetadata) incremental2 = BidsIncremental(sample4DNifti1, imageMetadata) readme1 = "README 1" readme2 = "README 2" incremental1.readme = readme1 incremental2.readme = readme2 assert incremental1 != incremental2 # Test different events file incremental1 = BidsIncremental(sample4DNifti1, imageMetadata) incremental2 = BidsIncremental(sample4DNifti1, imageMetadata) events1 = { 'onset': [1, 25, 50], 'duration': [10, 10, 10], 'response_time': [15, 36, 70] } events2 = {key: [v + 5 for v in events1[key]] for key in events1.keys()} incremental1.events = pd.DataFrame(data=events1) incremental2.events = pd.DataFrame(data=events2) assert incremental1 != incremental2
def appendDataMatches(archive: BidsArchive, reference: BidsIncremental, startIndex: int = 0, endIndex: int = -1): entities = filterEntities(reference.imageMetadata) images = archive.getImages(**entities) assert len(images) == 1 imageFromArchive = images[0].get_image() fullImageData = getNiftiData(imageFromArchive) if endIndex == -1: endIndex = len(fullImageData) appendedData = fullImageData[..., startIndex:endIndex] appendedImage = nib.Nifti1Image(appendedData, imageFromArchive.affine, imageFromArchive.header) return BidsIncremental(appendedImage, reference.imageMetadata) == reference
def testAsSingleIncremental(oneImageBidsI): run = BidsRun() assert run.asSingleIncremental() is None NUM_APPENDS = 5 for i in range(NUM_APPENDS): run.appendIncremental(oneImageBidsI) oldImage = oneImageBidsI.image imageData = getNiftiData(oldImage) newDataShape = imageData.shape[:3] + (NUM_APPENDS, ) newData = np.zeros(newDataShape, dtype=imageData.dtype) for i in range(NUM_APPENDS): newData[..., i] = imageData[..., 0] newImage = oldImage.__class__(newData, oldImage.affine, oldImage.header) consolidatedBidsI = BidsIncremental(newImage, oneImageBidsI.getImageMetadata()) assert run.asSingleIncremental() == consolidatedBidsI
def __init__(self, image: nib.Nifti1Image, imageMetadata: dict, datasetDescription: dict = None): """ Initializes a BIDS Incremental object with provided image and metadata. Args: image: NIfTI image as an NiBabel NiftiImage or PyBids BIDSImageFile imageMetadata: Metadata for image, which must include all variables in BidsIncremental.REQUIRED_IMAGE_METADATA. datasetDescription: Top-level dataset metadata for the BIDS dataset to be placed in a dataset_description.json. Defaults to None and a default description is used. Raises: MissingMetadataError: If any required metadata is missing. TypeError: If the image is not an Nibabel Nifti1Image or Nifti2Image. Examples: >>> import nibabel as nib >>> imageMetadata = {'subject': '01', 'task': 'test', 'suffix': 'bold', 'datatype': 'func', 'RepetitionTime': 1.5} >>> image = nib.load('/tmp/testfile.nii') >>> datasetDescription = {'Name': 'Example Dataset', 'BIDSVersion': '1.5.1', 'Authors': 'The RT-Cloud Authors'} >>> incremental = BidsIncremental(image, imageMetadata, datasetDescription) >>> print(incremental) "Image shape: (64, 64, 27, 1); Metadata Key Count: 6; BIDS-I Version: 1" """ # TODO(spolcyn): Enable a BIDS incremental to store an index that # specifies where the image should be inserted into the archive. This # would extend capabilities beyond just appending. """ Do basic input validation """ # IMAGE validTypes = [nib.Nifti1Image, nib.Nifti2Image, BIDSImageFile] if image is None or type(image) not in validTypes: raise TypeError("Image must be one of " + str([typ.__name__ for typ in validTypes]) + f"(got {type(image)})") if type(image) is BIDSImageFile: image = image.get_image() # DATASET DESCRIPTION if datasetDescription is not None: missingFields = [field for field in DATASET_DESC_REQ_FIELDS if datasetDescription.get(field, None) is None] if missingFields: raise MissingMetadataError( f"Dataset description needs: {str(missingFields)}") """ Process, validate, and store image metadata """ imageMetadata = self._preprocessMetadata(imageMetadata) self._exceptIfMissingMetadata(imageMetadata) self._imgMetadata = self._postprocessMetadata(imageMetadata) """ Store dataset description""" if datasetDescription is None: self.datasetDescription = deepcopy(DEFAULT_DATASET_DESC) else: self.datasetDescription = deepcopy(datasetDescription) """ Validate and store image """ # Remove singleton dimensions past the 3rd dimension # Note: this function does not remove trailing 1's if the image is 3-D, # (i.e., 160x160x1 image will retain that shape), so a later check is # needed to ensure that the 3rd dimension is > 1 image = nib.funcs.squeeze_image(image) # BIDS-I is currently used for BOLD data, and according to the BIDS # Standard, BOLD data must be in 4-D NIfTI files. Thus, upgrade 3-D to # 4-D images with singleton final dimension, if necessary. imageShape = image.shape if len(imageShape) < 3: raise ValueError("Image must have at least 3 dimensions") elif len(imageShape) == 3: if imageShape[2] <= 1: raise ValueError("Image's 3rd (and any higher) dimensions are " " <= 1, which means it is a 2D image; images " "must have at least 3 dimensions") newData = np.expand_dims(getNiftiData(image), -1) image = image.__class__(newData, image.affine, image.header) correct3DHeaderTo4D(image, self._imgMetadata['RepetitionTime']) assert len(image.shape) == 4 self.image = image # Configure README self.readme = DEFAULT_README # Configure events file self.events = pd.DataFrame(columns=DEFAULT_EVENTS_HEADERS) self.events = correctEventsFileDatatypes(self.events) # BIDS-I version for serialization self.version = 1
def getImageData(self) -> np.ndarray: return getNiftiData(self.image)
def samplePseudo2DNifti1(sample2DNifti1): data = getNiftiData(sample2DNifti1) data = data.reshape((data.shape[0], data.shape[1], 1, 1)) return nib.Nifti1Image(data, sample2DNifti1.affine)
def testGetBidsRun(bidsArchiveMultipleRuns, sampleBidsEntities, sample4DNifti1, bidsArchive4D, validBidsI): # Entities that aren't present in the archive won't match with pytest.raises(NoMatchError) as err: bidsArchive4D.getBidsRun(subject='notARealSubject') assert "Found no runs matching entities" in str(err.value) # Just one entity is not specific enough with pytest.raises(QueryError) as err: bidsArchiveMultipleRuns.getBidsRun( subject=sampleBidsEntities['subject']) assert "Provided entities were not unique to one run" in str(err.value) run = bidsArchive4D.getBidsRun(**sampleBidsEntities) runData = getNiftiData(run.getIncremental(0).image).flatten() incrementalData = getNiftiData(validBidsI.image)[..., 0].flatten() assert runData.shape == incrementalData.shape assert np.array_equal(runData, incrementalData) # Ensure that the run has the expected readme, events, and dataset # description in it assert run._readme == DEFAULT_README assert run._datasetDescription == DEFAULT_DATASET_DESC for column in DEFAULT_EVENTS_HEADERS: assert column in run._events.columns # Now change the archive and ensure the values for a new run are correct # Change readme readmeFile = Path(bidsArchive4D.getReadme().path) newReadmeText = 'new pytest readme' readmeFile.write_text(newReadmeText) # Change dataset description datasetDescriptionFile = Path(bidsArchive4D.rootPath, 'dataset_description.json') with open(datasetDescriptionFile, 'w') as f: newDatasetDescription = DEFAULT_DATASET_DESC.copy() newDatasetDescription['newField'] = 'this is some new data' json.dump(newDatasetDescription, f) # Change events eventsFile = bidsArchive4D.getEvents(**sampleBidsEntities)[0] eventsDF = correctEventsFileDatatypes(eventsFile.get_df()) newEventsRow = [1, 2] eventsDF.loc[len(eventsDF)] = newEventsRow writeDataFrameToEvents(eventsDF, eventsFile.path) # Get new run and test it newRun = bidsArchive4D.getBidsRun(**sampleBidsEntities) assert newRun._readme == newReadmeText assert newRun._datasetDescription == newDatasetDescription pd.util.testing.assert_frame_equal(eventsDF, newRun._events) # With multiple runs, not specifying run isn't good enough entities = sampleBidsEntities.copy() del entities['run'] with pytest.raises(QueryError) as err: bidsArchiveMultipleRuns.getBidsRun(**entities) assert "Provided entities were not unique to one run" in str(err.value) run = bidsArchiveMultipleRuns.getBidsRun(**sampleBidsEntities) assert run is not None assert run.numIncrementals() == sample4DNifti1.header.get_data_shape()[3]
def testGetNiftiData(sample4DNifti1): extracted = getNiftiData(sample4DNifti1) fromRawDataobj = np.asanyarray(sample4DNifti1.dataobj, dtype=sample4DNifti1.dataobj.dtype) assert np.array_equal(extracted, fromRawDataobj)
def _appendIncremental(self, incremental: BidsIncremental, makePath: bool = True, validateAppend: bool = True) -> bool: """ Appends a BIDS Incremental's image data and metadata to the archive, creating new directories if necessary (this behavior can be overridden). For internal use only. Args: incremental: BIDS Incremental to append makePath: Create new directory path for BIDS-I data if needed. (default: True). validateAppend: Compares image metadata and NIfTI headers to check that the images being appended are part of the same sequence and don't conflict with each other (default: True). Raises: RuntimeError: If the image to append to in the archive is not either 3D or 4D. StateError: If the image path within the BIDS-I would result in directory creation and makePath is set to False. ValidationError: If the data to append is incompatible with existing data in the archive. Returns: True if the append succeeded, False otherwise. Examples: Assume we have a NIfTI image 'image' and a metadata dictionary 'metdata' with all required metadata for a BIDS Incremental. >>> archive = BidsArchive('.') >>> incremental = BidsIncremental(image, metadata) >>> archive._appendIncremental(incremental) If we don't want to create any new files/directories in the archive, makePath can be set to false. >>> archive = BidsArchive('/tmp/emptyDirectory') >>> archive._appendIncremental(incremental, makePath=False) False """ # 1) Create target paths for image in archive dataDirPath = incremental.getDataDirPath() imgPath = incremental.getImageFilePath() # 2) Verify we have a valid way to append the image to the archive. # 4 cases: # 2.0) Archive is empty and must be created # 2.1) Image already exists within archive, append this NIfTI to it # 2.2) Image doesn't exist in archive, but rest of the path is valid for # the archive; create new Nifti file within the archive # 2.3) No image append possible and no creation possible; fail append # Write the specified part of an incremental, taking appropriate actions # for the layout update def writeIncremental(onlyData=False): incremental.writeToDisk(self.rootPath, onlyData=onlyData) self._updateLayout() # 2.0) Archive is empty and must be created if self.isEmpty(): if makePath: writeIncremental() return True # If can't create new files in an empty archive, no valid append else: return False # 2.1) Image already exists within archive, append this NIfTI to it imageFile = self.tryGetFile(imgPath) if imageFile is not None: logger.debug("Image exists in archive, appending") archiveImg = imageFile.get_image() # Validate header match if validateAppend: compatible, errorMsg = niftiImagesAppendCompatible( incremental.image, archiveImg) if not compatible: raise MetadataMismatchError( "NIfTI headers not append compatible: " + errorMsg) compatible, errorMsg = metadataAppendCompatible( incremental.getImageMetadata(), self.getSidecarMetadata(imageFile)) if not compatible: raise MetadataMismatchError( "Image metadata not append compatible: " + errorMsg) # Ensure archive image is 4D, expanding if not archiveData = getNiftiData(archiveImg) nDimensions = len(archiveData.shape) if nDimensions < 3 or nDimensions > 4: # RT-Cloud assumes 3D or 4D NIfTI images, other sizes have # unknown interpretations raise DimensionError( "Expected image to have 3 or 4 dimensions " f"(got {nDimensions})") if nDimensions == 3: archiveData = np.expand_dims(archiveData, 3) correct3DHeaderTo4D( archiveImg, incremental.getMetadataField("RepetitionTime")) # Create the new, combined image to replace the old one # TODO(spolcyn): Replace this with Nibabel's concat_images function # when the dtype issue with save/load cycle is fixed # https://github.com/nipy/nibabel/issues/986 newArchiveData = np.concatenate( (archiveData, getNiftiData(incremental.image)), axis=3) newImg = nib.Nifti1Image(newArchiveData, affine=archiveImg.affine, header=archiveImg.header) newImg.update_header() # Since the NIfTI image is only being appended to, no additional # files are being added, so the BIDSLayout's file index remains # accurate. Thus, avoid the expensive layout update. self._addImage(newImg, imgPath, updateLayout=False) return True # 2.2) Image doesn't exist in archive, but rest of the path is valid for # the archive; create new Nifti file within the archive if self.dirExistsInArchive(dataDirPath) or makePath: logger.debug("Image doesn't exist in archive, creating") writeIncremental(onlyData=True) return True # 2.3) No image append possible and no creation possible; fail append return False