def appendDataMatches(archive: BidsArchive, reference: BidsIncremental, startIndex: int = 0, endIndex: int = -1): entities = filterEntities(reference.getImageMetadata()) images = archive.getImages(**entities) assert len(images) == 1 imageFromArchive = images[0].get_image() fullImageData = getNiftiData(imageFromArchive) if endIndex == -1: endIndex = len(fullImageData) appendedData = fullImageData[..., startIndex:endIndex] appendedImage = nib.Nifti1Image(appendedData, imageFromArchive.affine, imageFromArchive.header) newIncremental = BidsIncremental(appendedImage, reference.getImageMetadata()) return newIncremental == reference
def appendIncremental(self, incremental: BidsIncremental, validateAppend: bool = True) -> None: """ Appends an incremental to this run's data, setting the run's entities if the run is empty. Arguments: incremental: The incremental to add to the run. validateAppend: Validate the incremental matches the current run's data (default True). Turning off is useful for efficiently creating a whole run at once from an existing image volume, where all data is known to be match already. Raises: MetadataMismatchError: If either the incremental's entities, its images's NIfTI header, or its metadata doesn't match the existing run's data. Examples: Suppose a NIfTI image and metadata dictionary are available in the environment. >>> incremental = BidsIncremental(image, metadata) >>> run = BidsRun() >>> run.appendIncremental(incremental) >>> metadata['subject'] = 'new_subject' >>> incremental2 = BidsIncremental(image, metadata) >>> run.appendIncremental(incremental2) MetadataMismatchError """ # Set this run's entities if not already present if len(self._entities) == 0: self._entities = incremental.getEntities() if self._imageMetadata is None: self._imageMetadata = incremental.getImageMetadata() if self._imageHeader is None: self._imageHeader = incremental.image.header.copy() if self._imageAffine is None: self._imageAffine = incremental.image.affine.copy() if self._imageKlass is None: self._imageKlass = incremental.image.__class__ if self._readme is None: self._readme = incremental.readme if self._datasetDescription is None: self._datasetDescription = deepcopy(incremental.datasetDescription) if self._events is None: self._events = incremental.events.copy(deep=True) if validateAppend: entityDifference = \ symmetricDictDifference(self._entities, incremental.getEntities()) if len(entityDifference) != 0: # Two cases: # 1) New incremental matches all existing entities, and just # adds new, more specific ones (update run) # 2) New incremental doesn't match some existing entities (fail) mismatchKeys = [key for key in entityDifference.keys() if key in self._entities] if len(mismatchKeys) == 0: # Add new, more specific entities self._entities.update(incremental.getEntities()) else: errorMsg = ("Incremental's BIDS entities do not match this " "run's entities (difference: " f"{entityDifference})") raise MetadataMismatchError(errorMsg) if self.numIncrementals() > 0: canAppend, niftiErrorMsg = \ niftiHeadersAppendCompatible(incremental.image.header, self._imageHeader) if not canAppend: errorMsg = ("Incremental's NIfTI header not compatible " f" with this run's images ({niftiErrorMsg})") raise MetadataMismatchError(errorMsg) canAppend, metadataErrorMsg = metadataAppendCompatible( incremental.getImageMetadata(), self._imageMetadata) if not canAppend: errorMsg = ("Incremental's metadata not compatible " f" with this run's images ({metadataErrorMsg})") raise MetadataMismatchError(errorMsg) # Verify readme if not incremental.readme == self._readme: errorMsg = ("Incremental's readme doesn't match run's readme " "(incremental: {}, run: {})" .format(incremental.readme, self._readme)) raise MetadataMismatchError(errorMsg) # Verify dataset description datasetDescriptionDifference = \ symmetricDictDifference(self._datasetDescription, incremental.datasetDescription) if len(datasetDescriptionDifference) != 0: errorMsg = ("Incremental's dataset description doesn't match " "run's dataset description {}" .format(datasetDescriptionDifference)) raise MetadataMismatchError(errorMsg) # Verify first part of new events file matches all rows in existing # events file incrementalSubset = incremental.events.iloc[0:len(self._events)] if not incrementalSubset.equals(self._events): errorMsg = ("Run's existing events must be found in first part " "of incremental's events file, weren't: " "\nexisting:\n{existing}\n" "\nnew:\n{new}\n".format( existing=self._events, new=incrementalSubset)) raise MetadataMismatchError(errorMsg) # Update events file with new events newRowSubset = incremental.events.iloc[len(self._events):] self._events = self._events.append(newRowSubset, ignore_index=True) # Slice up the incremental into smaller component images if it has # multiple images in its image volume imagesInVolume = incremental.getImageDimensions()[3] try: import indexed_gzip # noqa except ImportError: warnings.warn("Package 'indexed_gzip' not available: appending BIDS" " Incremental that uses a gzipped NIfTI file as its " "underlying data source will be very slow. Install " "the 'indexed_gzip' package with 'conda install " "indexed_gzip' to improve performance.") # Slice the dataobj so we ensure that data is read into memory newArrays = [incremental.image.dataobj[..., imageIdx] for imageIdx in range(imagesInVolume)] if len(self._dataArrays) == 0: self._dataArrays = newArrays else: self._dataArrays.extend(newArrays)
def _appendIncremental(self, incremental: BidsIncremental, makePath: bool = True, validateAppend: bool = True) -> bool: """ Appends a BIDS Incremental's image data and metadata to the archive, creating new directories if necessary (this behavior can be overridden). For internal use only. Args: incremental: BIDS Incremental to append makePath: Create new directory path for BIDS-I data if needed. (default: True). validateAppend: Compares image metadata and NIfTI headers to check that the images being appended are part of the same sequence and don't conflict with each other (default: True). Raises: RuntimeError: If the image to append to in the archive is not either 3D or 4D. StateError: If the image path within the BIDS-I would result in directory creation and makePath is set to False. ValidationError: If the data to append is incompatible with existing data in the archive. Returns: True if the append succeeded, False otherwise. Examples: Assume we have a NIfTI image 'image' and a metadata dictionary 'metdata' with all required metadata for a BIDS Incremental. >>> archive = BidsArchive('.') >>> incremental = BidsIncremental(image, metadata) >>> archive._appendIncremental(incremental) If we don't want to create any new files/directories in the archive, makePath can be set to false. >>> archive = BidsArchive('/tmp/emptyDirectory') >>> archive._appendIncremental(incremental, makePath=False) False """ # 1) Create target paths for image in archive dataDirPath = incremental.getDataDirPath() imgPath = incremental.getImageFilePath() # 2) Verify we have a valid way to append the image to the archive. # 4 cases: # 2.0) Archive is empty and must be created # 2.1) Image already exists within archive, append this NIfTI to it # 2.2) Image doesn't exist in archive, but rest of the path is valid for # the archive; create new Nifti file within the archive # 2.3) No image append possible and no creation possible; fail append # Write the specified part of an incremental, taking appropriate actions # for the layout update def writeIncremental(onlyData=False): incremental.writeToDisk(self.rootPath, onlyData=onlyData) self._updateLayout() # 2.0) Archive is empty and must be created if self.isEmpty(): if makePath: writeIncremental() return True # If can't create new files in an empty archive, no valid append else: return False # 2.1) Image already exists within archive, append this NIfTI to it imageFile = self.tryGetFile(imgPath) if imageFile is not None: logger.debug("Image exists in archive, appending") archiveImg = imageFile.get_image() # Validate header match if validateAppend: compatible, errorMsg = niftiImagesAppendCompatible( incremental.image, archiveImg) if not compatible: raise MetadataMismatchError( "NIfTI headers not append compatible: " + errorMsg) compatible, errorMsg = metadataAppendCompatible( incremental.getImageMetadata(), self.getSidecarMetadata(imageFile)) if not compatible: raise MetadataMismatchError( "Image metadata not append compatible: " + errorMsg) # Ensure archive image is 4D, expanding if not archiveData = getNiftiData(archiveImg) nDimensions = len(archiveData.shape) if nDimensions < 3 or nDimensions > 4: # RT-Cloud assumes 3D or 4D NIfTI images, other sizes have # unknown interpretations raise DimensionError( "Expected image to have 3 or 4 dimensions " f"(got {nDimensions})") if nDimensions == 3: archiveData = np.expand_dims(archiveData, 3) correct3DHeaderTo4D( archiveImg, incremental.getMetadataField("RepetitionTime")) # Create the new, combined image to replace the old one # TODO(spolcyn): Replace this with Nibabel's concat_images function # when the dtype issue with save/load cycle is fixed # https://github.com/nipy/nibabel/issues/986 newArchiveData = np.concatenate( (archiveData, getNiftiData(incremental.image)), axis=3) newImg = nib.Nifti1Image(newArchiveData, affine=archiveImg.affine, header=archiveImg.header) newImg.update_header() # Since the NIfTI image is only being appended to, no additional # files are being added, so the BIDSLayout's file index remains # accurate. Thus, avoid the expensive layout update. self._addImage(newImg, imgPath, updateLayout=False) return True # 2.2) Image doesn't exist in archive, but rest of the path is valid for # the archive; create new Nifti file within the archive if self.dirExistsInArchive(dataDirPath) or makePath: logger.debug("Image doesn't exist in archive, creating") writeIncremental(onlyData=True) return True # 2.3) No image append possible and no creation possible; fail append return False