Beispiel #1
0
def testIsEmpty(tmpdir, bidsArchive4D):
    datasetRoot = Path(tmpdir, "bids-archive")
    archive = BidsArchive(datasetRoot)
    assert archive is not None
    assert archive.isEmpty()

    assert not bidsArchive4D.isEmpty()
Beispiel #2
0
def testGetBidsRunInheritedEvents(tmpdir, validBidsI, sampleBidsEntities):
    # Add an events file on the top level with data
    rootPath = os.path.join(tmpdir, 'dataset')
    incrementalDFDict = {col: [4] for col in DEFAULT_EVENTS_HEADERS}
    incrementalDF = pd.DataFrame.from_dict(incrementalDFDict)
    validBidsI.events = correctEventsFileDatatypes(incrementalDF)

    validBidsI.writeToDisk(rootPath)
    archive = BidsArchive(rootPath)

    newDFDict = {col: [1, 2, 3] for col in DEFAULT_EVENTS_HEADERS}
    newDF = pd.DataFrame.from_dict(newDFDict)
    newDF = correctEventsFileDatatypes(newDF)
    writeDataFrameToEvents(
        newDF, '{dirPath}/task-{taskName}_events.tsv'.format(
            dirPath=rootPath, taskName=sampleBidsEntities['task']))
    archive._updateLayout()

    # Get the BIDS run
    run = archive.getBidsRun(**sampleBidsEntities)

    # Ensure that the BIDS run has all of the data added at the top level
    combinedDF = newDF.append(incrementalDF)
    combinedDF.sort_values(by='onset', inplace=True, ignore_index=True)
    assert combinedDF.equals(run._events)
def testDiskOutput(validBidsI, tmpdir):
    # Write the archive
    datasetRoot = os.path.join(tmpdir, "bids-pytest-dataset")
    validBidsI.writeToDisk(datasetRoot)

    # Validate the output can be opened by BidsArchive and verified against the
    # source BIDS-Incremental
    archive = BidsArchive(datasetRoot)
    archiveImage = archive.getImages()[0]

    # Remove pseudo entities to avoid conflict with the validBidsI
    metadata = archive.getSidecarMetadata(archiveImage, includeEntities=True)
    for entity in PYBIDS_PSEUDO_ENTITIES:
        metadata.pop(entity)

    incrementalFromArchive = BidsIncremental(archiveImage, metadata)
    assert incrementalFromArchive == validBidsI

    assert isValidBidsArchive(archive.rootPath)

    # Try only writing data
    datasetRoot = os.path.join(tmpdir, "bids-pytest-dataset-2")
    validBidsI.writeToDisk(datasetRoot, onlyData=True)
    assert not os.path.exists(os.path.join(datasetRoot, "README"))
    assert not os.path.exists(
        os.path.join(datasetRoot, "dataset_description.json"))
Beispiel #4
0
def testEmptyArchiveAppend(validBidsI, imageMetadata, tmpdir):
    # Create in root with no BIDS-I, then append to make non-empty archive
    datasetRoot = Path(tmpdir, testEmptyArchiveAppend.__name__)
    archive = BidsArchive(datasetRoot)
    archive.appendIncremental(validBidsI)

    assert not archive.isEmpty()
    assert archiveHasMetadata(archive, imageMetadata)
    assert appendDataMatches(archive, validBidsI)
    assert isValidBidsArchive(datasetRoot)
Beispiel #5
0
 def __init__(self, archivePath, **entities):
     """
     Args:
         archivePath: Absolute path of the BIDS archive.
         entities: BIDS entities (subject, session, task, run, suffix, datatype) that
             define the particular subject/run of the data to stream
     """
     self.bidsArchive = BidsArchive(archivePath)
     self.bidsRun = self.bidsArchive.getBidsRun(**entities)
     self.numVolumes = self.bidsRun.numIncrementals()
     self.nextVol = 0
Beispiel #6
0
def testGetEvents(validBidsI, imageMetadata, tmpdir):
    archive = BidsArchive(tmpdir)
    archive.appendIncremental(validBidsI)

    # Get the events from the archive as a pandas data frame
    events = archive.getEvents()[0].get_df()
    assert events is not None

    # Check the required columns are present in the events file data frame
    for column in ['onset', 'duration', 'response_time']:
        assert column in events.columns
Beispiel #7
0
def testMetadataValidation(imageMetadata, caplog):
    metadataCopy = imageMetadata.copy()

    # Test failure on sample of fields that must be the same
    matchFields = ["Modality", "MagneticFieldStrength", "ImagingFrequency",
                   "Manufacturer", "ManufacturersModelName", "InstitutionName",
                   "InstitutionAddress", "DeviceSerialNumber", "StationName",
                   "BodyPartExamined", "PatientPosition", "EchoTime",
                   "ProcedureStepDescription", "SoftwareVersions",
                   "MRAcquisitionType", "SeriesDescription", "ProtocolName",
                   "ScanningSequence", "SequenceVariant", "ScanOptions",
                   "SequenceName", "SpacingBetweenSlices", "SliceThickness",
                   "ImageType", "RepetitionTime", "PhaseEncodingDirection",
                   "FlipAngle", "InPlanePhaseEncodingDirectionDICOM",
                   "ImageOrientationPatientDICOM", "PartialFourier"]

    for field in matchFields:
        oldValue = metadataCopy.get(field, None)

        # If field not present, append should work
        if oldValue is None:
            assert BidsArchive._metadataAppendCompatible(imageMetadata,
                                                         metadataCopy)
        # If field is present, modify and ensure failure
        else:
            metadataCopy[field] = "not a valid value by any stretch of the word"
            assert metadataCopy[field] != oldValue

            compatible, errorMsg = BidsArchive._metadataAppendCompatible(
                imageMetadata,
                metadataCopy)
            assert not compatible
            assert f"Metadata doesn't match on field: {field}" in errorMsg

            metadataCopy[field] = oldValue

    # Test append-compatible when only one side has a particular metadata value
    for field in matchFields:
        for metadataDict in [imageMetadata, metadataCopy]:
            oldValue = metadataDict.pop(field, None)
            if oldValue is None:
                continue

            compatible, errorMsg = BidsArchive._metadataAppendCompatible(
                imageMetadata,
                metadataCopy)
            assert compatible

            metadataDict[field] = oldValue
Beispiel #8
0
def testGetEvents(validBidsI, imageMetadata, tmpdir):
    archive = BidsArchive(tmpdir)
    archive._appendIncremental(validBidsI)

    # Get the events from the archive as a pandas data frame
    events = archive.getEvents()[0].get_df()
    events = correctEventsFileDatatypes(events)
    assert events is not None

    # Check the required columns are present in the events file data frame
    for column in DEFAULT_EVENTS_HEADERS:
        assert column in events.columns

    # Check the columns are of the proper types
    for column, dtype in BIDS_EVENT_COL_TO_DTYPE.items():
        if column in events.columns:
            assert events[column].dtype == dtype
Beispiel #9
0
def doRuns(cfg, bidsInterface, subjInterface, webInterface):
    """
    Process a run of a bids dataset. The subject and run configuration
    values will be supplied by the cfg parameter.
    Args:
        cfg: configurations parsed from the project toml config file
        bidsInterface: client interface to bids data
        webInterface: client interface to user web page
    Returns: no return value
    """
    subject = cfg.subjectName
    run = cfg.runNum[0]
    entities = {
        'subject': subject,
        'run': run,
        'suffix': 'bold',
        'datatype': 'func'
    }
    webInterface.clearRunPlot(run)
    if cfg.writeBidsArchive is True:
        # Create a new bids archive from the incrementals
        bidsArchivePath = os.path.join(tmpDir,
                                       'bids_archive_' + uuid.uuid4().hex)
        print(f'BIDS Archive will be written to {bidsArchivePath}')
        newArchive = BidsArchive(bidsArchivePath)
        newRun = BidsRun(**entities)
    # Initialize the bids stream
    streamId = bidsInterface.initOpenNeuroStream(cfg.dsAccessionNumber,
                                                 **entities)
    numVols = bidsInterface.getNumVolumes(streamId)
    for idx in range(numVols):
        bidsIncremental = bidsInterface.getIncremental(streamId, idx)
        if cfg.writeBidsArchive is True:
            newRun.appendIncremental(bidsIncremental)
        imageData = bidsIncremental.getImageData()
        avg_niftiData = numpy.mean(imageData)
        print("| average activation value for TR %d is %f" %
              (idx, avg_niftiData))
        webInterface.plotDataPoint(run, idx, float(avg_niftiData))
    if cfg.writeBidsArchive is True:
        newArchive.appendBidsRun(newRun)
Beispiel #10
0
def testAppendNoMakePath(bidsArchive4D, validBidsI, tmpdir):
    # Append to empty archive specifying not to make any files or directories
    datasetRoot = Path(tmpdir, testEmptyArchiveAppend.__name__)
    assert not BidsArchive(datasetRoot).appendIncremental(validBidsI,
                                                          makePath=False)

    # Append to populated archive in a way that would require new directories
    # and files without allowing it
    validBidsI.setMetadataField('subject', 'invalidSubject')
    validBidsI.setMetadataField('run', 42)

    assert not bidsArchive4D.appendIncremental(validBidsI, makePath=False)
Beispiel #11
0
def testAppendBidsRun(tmpdir, bidsArchive4D, bidsArchiveMultipleRuns,
                      sampleBidsEntities):
    archivePath = Path(tmpdir, "appendBidsRunArchive")
    archive = BidsArchive(archivePath)
    emptyRun = BidsRun()
    archive.appendBidsRun(emptyRun)

    run = bidsArchive4D.getBidsRun(**sampleBidsEntities)
    archive.appendBidsRun(run)

    assert archive.getBidsRun(**sampleBidsEntities) == run
Beispiel #12
0
 def __init__(self, archivePath, **entities):
     """
     Args:
         archivePath: Absolute path of the BIDS archive.
         entities: BIDS entities (subject, session, task, run, suffix, datatype) that
             define the particular subject/run of the data to stream
     """
     self.bidsArchive = BidsArchive(archivePath)
     # TODO - when we have BidsRun
     # self.bidsRun = self.bidsArchive.getBidsRun(**entities)
     images = self.bidsArchive.getImages(**entities)
     if len(images) == 0:
         raise ValidationError('No matching images found')
     if len(images) > 1:
         raise ValidationError('More than one match, please give more specific subject/session/task/run')
     self.bidsImage = images[0]
     self.niftiImage = self.bidsImage.get_image()
     self.filename = self.niftiImage.get_filename()
     self.imgVolumes = nib.four_to_three(self.niftiImage)
     self.metadata = self.bidsArchive.getSidecarMetadata(self.filename, includeEntities=True)
     self.metadata.pop('extension')
     self.numVolumes = len(self.imgVolumes)
     self.nextVol = 0
Beispiel #13
0
class BidsStream:
    """
    A class that opens a BIDS archive and prepares to stream the data as
    BIDS incrementals.
    """
    def __init__(self, archivePath, **entities):
        """
        Args:
            archivePath: Absolute path of the BIDS archive.
            entities: BIDS entities (subject, session, task, run, suffix, datatype) that
                define the particular subject/run of the data to stream
        """
        self.bidsArchive = BidsArchive(archivePath)
        self.bidsRun = self.bidsArchive.getBidsRun(**entities)
        self.numVolumes = self.bidsRun.numIncrementals()
        self.nextVol = 0

    def getNumVolumes(self) -> int:
        """Return the number of brain volumes in the run"""
        return self.numVolumes

    def getIncremental(self, volIdx=-1) -> BidsIncremental:
        """
        Get a BIDS incremental for the indicated index in the current subject/run
        VolIdx acts similar to a file_seek pointer. If a volIdx >= 0 is supplied
            the volume pointer is advanced to that position. If no volIdx or
            a volIdx < 0 is supplied, then the next image volume after the previous
            position is returned and the pointer is incremented.
        Args:
            volIdx: The volume index (or TR) within the run to retrieve.
        Returns:
            BidsIncremental of that volume index within this subject/run
        """
        if volIdx >= 0:
            # reset the next volume to the user specified volume
            self.nextVol = volIdx
        else:
            # use the default next volume
            pass

        if self.nextVol < self.numVolumes:
            incremental = self.bidsRun.getIncremental(self.nextVol)
            self.nextVol += 1
            return incremental
        else:
            return None
Beispiel #14
0
def appendDataMatches(archive: BidsArchive, reference: BidsIncremental,
                      startIndex: int = 0, endIndex: int = -1):
    entities = filterEntities(reference.imageMetadata)
    images = archive.getImages(**entities)
    assert len(images) == 1
    imageFromArchive = images[0].get_image()

    fullImageData = getNiftiData(imageFromArchive)
    if endIndex == -1:
        endIndex = len(fullImageData)
    appendedData = fullImageData[..., startIndex:endIndex]

    appendedImage = nib.Nifti1Image(appendedData,
                                    imageFromArchive.affine,
                                    imageFromArchive.header)

    return BidsIncremental(appendedImage, reference.imageMetadata) == reference
Beispiel #15
0
def archiveWithImage(image, metadata: dict, tmpdir):
    """
    Create an archive on disk by hand with the provided image and metadata
    """
    # Create ensured empty directory
    while True:
        id = str(randint(0, 1e6))
        rootPath = Path(tmpdir, f"dataset-{id}/")
        if not Path.exists(rootPath):
            rootPath.mkdir()
            break

    # Create the archive by hand, with default readme and dataset description
    Path(rootPath, 'README').write_text("README for pytest")
    Path(rootPath, 'dataset_description.json') \
        .write_text(json.dumps(DEFAULT_DATASET_DESC))

    # Write the nifti image & metadata
    dataPath = Path(rootPath, bids_build_path(metadata, BIDS_DIR_PATH_PATTERN))
    dataPath.mkdir(parents=True)

    filenamePrefix = bids_build_path(metadata, BIDS_FILE_PATTERN)
    imagePath = Path(dataPath, filenamePrefix + '.nii')
    metadataPath = Path(dataPath, filenamePrefix + '.json')

    nib.save(image, str(imagePath))

    # BIDS-I's takes care of this automatically, but must be done manually here
    metadata['TaskName'] = metadata['task']
    metadataPath.write_text(json.dumps(metadata))
    del metadata['TaskName']

    # BIDS-I's takes care of event file creation automatically, but must be done
    # manually here
    metadata['suffix'] = 'events'
    metadata['extension'] = '.tsv'

    eventsPath = Path(dataPath, bids_build_path(metadata, BIDS_FILE_PATTERN))
    with open(eventsPath, mode='w') as eventsFile:
        eventDefaultHeaders = ['onset', 'duration', 'response_time']
        pd.DataFrame(columns=eventDefaultHeaders).to_csv(eventsFile, sep='\t')

    # Create an archive from the directory and return it
    return BidsArchive(rootPath)
Beispiel #16
0
def testAppendNoOverwriteDatasetMetadata(tmpdir, validBidsI):
    rootPath = Path(tmpdir, "new-dataset")
    archive = BidsArchive(rootPath)

    EXPECTED_README = "The readme we expect"
    validBidsI.readme = EXPECTED_README
    archive._appendIncremental(validBidsI)

    NEW_README = "The readme we don't expect"
    validBidsI.readme = NEW_README
    validBidsI.setMetadataField('subject', 'newSubject')
    archive._appendIncremental(validBidsI)

    with open(os.path.join(rootPath, 'README')) as readme:
        readmeText = readme.readlines()
        assert len(readmeText) == 1
        assert readmeText[0] == EXPECTED_README
Beispiel #17
0
def openNeuroStreamTest(bidsInterface):
    dsAccessionNumber = 'ds002338'
    dsSubject = 'xp201'
    datasetDir = tmpDownloadOpenNeuro(dsAccessionNumber, dsSubject, 1)
    localEntities = {'subject': dsSubject, 'run': 1, 'suffix': 'bold', 'datatype': 'func'}
    remoteEntities = {'subject': dsSubject, 'run': 1}
    localBidsArchive = BidsArchive(datasetDir)
    streamId = bidsInterface.initOpenNeuroStream(dsAccessionNumber, **remoteEntities)
    for idx in range(3):
        streamIncremental = bidsInterface.getIncremental(streamId)
        localIncremental = localBidsArchive._getIncremental(idx, **localEntities)
        print(f"OpenNeuro stream check: image {idx}")
        assert streamIncremental == localIncremental

    for idx in [5, 2, 7]:
        streamIncremental = bidsInterface.getIncremental(streamId, volIdx=idx)
        localIncremental = localBidsArchive._getIncremental(idx, **localEntities)
        print(f"OpenNeuro stream check: image {idx}")
        assert streamIncremental == localIncremental

    # Resume without specifying volumes
    for idx in [*range(8, 10)]:
        streamIncremental = bidsInterface.getIncremental(streamId)
        localIncremental = localBidsArchive._getIncremental(idx, **localEntities)
        print(f"OpenNeuro stream check: image {idx}")
        assert streamIncremental == localIncremental

    numVols = bidsInterface.getNumVolumes(streamId)
    assert numVols > 0 and numVols < 1000

    # Check with local bidsRun
    localBidsRun = localBidsArchive.getBidsRun(**localEntities)
    assert numVols == localBidsRun.numIncrementals()
    assert numVols > 10
    for idx in [*range(6, 10)]:
        streamIncremental = bidsInterface.getIncremental(streamId, volIdx=idx)
        localIncremental = localBidsRun.getIncremental(idx)
        print(f"OpenNeuro bidsRun check: image {idx}")
        assert streamIncremental == localIncremental
Beispiel #18
0
def testNiftiHeaderValidation(sample4DNifti1, sample3DNifti1, sample2DNifti1,
                              caplog):
    # Prepare test infrastructure
    original3DHeader = sample3DNifti1.header.copy()
    original4DHeader = sample4DNifti1.header.copy()

    other3D = nib.Nifti1Image(sample3DNifti1.dataobj,
                              sample3DNifti1.affine,
                              sample3DNifti1.header)
    assert other3D.header == original3DHeader

    other4D = nib.Nifti1Image(sample4DNifti1.dataobj,
                              sample4DNifti1.affine,
                              sample4DNifti1.header)
    assert other4D.header == original4DHeader

    """ Test field values """
    # Test equal headers
    assert BidsArchive._imagesAppendCompatible(sample4DNifti1, other4D)

    # Test unequal headers on variety of fields that must match
    fieldsToModify = ["intent_code", "dim_info", "scl_slope", "sform_code"]

    for field in fieldsToModify:
        fieldArray = other4D.header[field]
        oldValue = fieldArray.copy()

        if np.sum(np.isnan(fieldArray)) > 0:
            fieldArray = np.zeros(1)
        else:
            fieldArray = fieldArray + 1
        other4D.header[field] = fieldArray

        compatible, error = \
            BidsArchive._imagesAppendCompatible(sample4DNifti1, other4D)
        assert not compatible
        assert "NIfTI headers don't match on field: " + field in error

        other4D.header[field] = oldValue

    """ Test special cases for dimensions and pixel dimensions being non-equal
    but still append compatible """
    # First three dimensions and pixel dimensions equal
    assert BidsArchive._imagesAppendCompatible(sample3DNifti1, sample4DNifti1)

    # Dimension 4 of the 3D image should not matter
    for i in range(0, 100):
        sample3DNifti1.header["dim"][4] = i
        assert BidsArchive._imagesAppendCompatible(sample3DNifti1,
                                                   sample4DNifti1)

    sample3DNifti1.header["dim"] = np.copy(original3DHeader["dim"])
    assert sample3DNifti1.header == original3DHeader

    """ Test special cases for dimensions and pixel dimensions being non-equal
    and not append compatible """
    # Ensure all headers are in their original states
    assert sample4DNifti1.header == original4DHeader
    assert other4D.header == original4DHeader
    assert sample3DNifti1.header == original3DHeader
    assert other3D.header == original3DHeader

    # 4D with non-matching first 3 dimensions should fail
    other4D.header["dim"][1:4] = other4D.header["dim"][1:4] * 2
    compatible, errorMsg = BidsArchive._imagesAppendCompatible(sample4DNifti1,
                                                               other4D)
    assert not compatible
    assert "NIfTI headers not append compatible due to mismatch in dimensions "\
        "and pixdim fields." in errorMsg
    # Reset
    other4D.header["dim"][1:4] = original4DHeader["dim"][1:4]
    assert other4D.header == original4DHeader

    # 3D and 4D in which first 3 dimensions don't match
    other3D.header["dim"][1:3] = other3D.header["dim"][1:3] * 2
    compatible, errorMsg = BidsArchive._imagesAppendCompatible(sample4DNifti1,
                                                               other3D)
    assert not compatible

    # Reset
    other3D.header["dim"][1:3] = original3DHeader["dim"][1:3]
    assert other3D.header == original3DHeader

    # 2D and 4D are one too many dimensions apart
    other4D.header['dim'][0] = 2
    compatible, errorMsg = BidsArchive._imagesAppendCompatible(other4D,
                                                               sample4DNifti1)
    assert not compatible
Beispiel #19
0
def testFailEmpty(tmpdir):
    datasetRoot = Path(tmpdir, "bids-archive")
    emptyArchive = BidsArchive(datasetRoot)

    with pytest.raises(StateError):
        emptyArchive.dirExistsInArchive("will fail anyway")
        emptyArchive.getImages("will fail anyway")
        emptyArchive.addImage(None, "will fall anyway")
        emptyArchive.getSidecarMetadata("will fall anyway")
        emptyArchive.addMetadata({"will": "fail"}, "will fall anyway")
        emptyArchive.getIncremental(subject="will fall anyway",
                                    session="will fall anyway",
                                    task="will fall anyway",
                                    suffix="will fall anyway",
                                    datatype="will fall anyway")
Beispiel #20
0
def testEmptyArchiveCreation(tmpdir):
    datasetRoot = Path(tmpdir, "bids-archive")
    assert BidsArchive(datasetRoot) is not None
Beispiel #21
0
class BidsStream:
    """
    A class that opens a BIDS archive and prepares to stream the data as
    BIDS incrementals.
    """
    def __init__(self, archivePath, **entities):
        """
        Args:
            archivePath: Absolute path of the BIDS archive.
            entities: BIDS entities (subject, session, task, run, suffix, datatype) that
                define the particular subject/run of the data to stream
        """
        self.bidsArchive = BidsArchive(archivePath)
        # TODO - when we have BidsRun
        # self.bidsRun = self.bidsArchive.getBidsRun(**entities)
        images = self.bidsArchive.getImages(**entities)
        if len(images) == 0:
            raise ValidationError('No matching images found')
        if len(images) > 1:
            raise ValidationError('More than one match, please give more specific subject/session/task/run')
        self.bidsImage = images[0]
        self.niftiImage = self.bidsImage.get_image()
        self.filename = self.niftiImage.get_filename()
        self.imgVolumes = nib.four_to_three(self.niftiImage)
        self.metadata = self.bidsArchive.getSidecarMetadata(self.filename, includeEntities=True)
        self.metadata.pop('extension')
        self.numVolumes = len(self.imgVolumes)
        self.nextVol = 0

    def getNumVolumes(self) -> int:
        """Return the number of brain volumes in the run"""
        # TODO - when we have BidsRun
        # return self.bidsRun.getNumVolumes()
        return self.numVolumes

    def getIncremental(self, volIdx=-1) -> BidsIncremental:
        """
        Get a BIDS incremental for the indicated index in the current subject/run
        VolIdx acts similar to a file_seek pointer. If a volIdx >= 0 is supplied
            the volume pointer is advanced to that position. If no volIdx or
            a volIdx < 0 is supplied, then the next image volume after the previous
            position is returned and the pointer is incremented.
        Args:
            volIdx: The volume index (or TR) within the run to retrieve.
        Returns:
            BidsIncremental of that volume index within this subject/run
        """
        # TODO - when we have BidsRun
        # return self.bidsRun.getIncremental(volIdx)
        if volIdx >= 0:
            # reset the next volume to the user specified volume
            self.nextVol = volIdx
        else:
            # use the default next volume
            pass

        if self.nextVol < self.numVolumes:
            incremental = BidsIncremental(self.imgVolumes[self.nextVol], self.metadata)
            self.nextVol += 1
            return incremental
        else:
            return None