def test_nifti_dataset_from3_d(): """Test NiftiDataset based on 3D volume(s) """ tssrc = os.path.join(pymvpa_dataroot, 'bold') masrc = os.path.join(pymvpa_dataroot, 'mask') # Test loading of 3D volumes # by default we are enforcing 4D, testing here with the demo 3d mask ds = fmri_dataset(masrc, mask=masrc, targets=1) assert_equal(len(ds), 1) plain_data = NiftiImage(masrc).data # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).data.reshape(plain_data.shape)) # test loading from a list of filenames # for now we should fail if trying to load a mix of 4D and 3D volumes assert_raises(ValueError, fmri_dataset, (masrc, tssrc), mask=masrc, targets=1) # Lets prepare some custom NiftiImage dsfull = fmri_dataset(tssrc, mask=masrc, targets=1) ds_selected = dsfull[3] nifti_selected = map2nifti(ds_selected) # Load dataset from a mix of 3D volumes # (given by filenames and NiftiImages) labels = [123, 2, 123] ds2 = fmri_dataset((masrc, masrc, nifti_selected), mask=masrc, targets=labels) assert_equal(ds2.nsamples, 3) assert_array_equal(ds2.samples[0], ds2.samples[1]) assert_array_equal(ds2.samples[2], dsfull.samples[3]) assert_array_equal(ds2.targets, labels)
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_nifti_dataset_from3_d(): """Test NiftiDataset based on 3D volume(s) """ tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') # Test loading of 3D volumes # by default we are enforcing 4D, testing here with the demo 3d mask ds = fmri_dataset(masrc, mask=masrc, targets=1) assert_equal(len(ds), 1) if externals.exists('nibabel'): import nibabel plain_data = nibabel.load(masrc).get_data() # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).get_data().reshape(plain_data.shape)) else: import nifti plain_data = nifti.NiftiImage(masrc).data # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).data.reshape(plain_data.shape)) # test loading from a list of filenames # for now we should fail if trying to load a mix of 4D and 3D volumes assert_raises(ValueError, fmri_dataset, (masrc, tssrc), mask=masrc, targets=1) # Lets prepare some custom NiftiImage dsfull = fmri_dataset(tssrc, mask=masrc, targets=1) ds_selected = dsfull[3] nifti_selected = map2nifti(ds_selected) # Load dataset from a mix of 3D volumes # (given by filenames and NiftiImages) labels = [123, 2, 123] ds2 = fmri_dataset((masrc, masrc, nifti_selected), mask=masrc, targets=labels) assert_equal(ds2.nsamples, 3) assert_array_equal(ds2.samples[0], ds2.samples[1]) assert_array_equal(ds2.samples[2], dsfull.samples[3]) assert_array_equal(ds2.targets, labels)
def test_nifti_self_mapper(): """Test map2Nifti facility ran without arguments """ example_path = os.path.join(pymvpa_dataroot, 'example4d') example = NiftiImage(example_path) data = fmri_dataset(samples=example_path, targets=[1,2]) # Map read data to itself vol = map2nifti(data) assert_equal(vol.data.shape, example.data.shape) assert_array_equal(vol.data, example.data) data.samples[:] = 1 vol = map2nifti(data) assert_true((vol.data == 1).all())
def test_nifti_mapper(filename): """Basic testing of map2Nifti """ import nibabel data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) ok_(isinstance(vol, data.a.imgtype)) # test providing custom imgtypes vol = map2nifti(data, imgtype=nibabel.Nifti1Pair) ok_(isinstance(vol, nibabel.Nifti1Pair)) # Lets generate a dataset using an alternative format (MINC) # and see if type persists volminc = nibabel.MincImage(vol.get_data(), vol.get_affine(), vol.get_header()) ok_(isinstance(volminc, nibabel.MincImage)) dsminc = fmri_dataset(volminc, targets=1) ok_(dsminc.a.imgtype is nibabel.MincImage) ok_(isinstance(dsminc.a.imghdr, nibabel.minc.MincImage.header_class)) # Lets test if we could save/load now into Analyze volume/dataset if externals.versions['nibabel'] < '1.1.0': raise SkipTest('nibabel prior 1.1.0 had an issue with types comprehension') volanal = map2nifti(dsminc, imgtype=nibabel.AnalyzeImage) # MINC has no 'save' capability ok_(isinstance(volanal, nibabel.AnalyzeImage)) volanal.to_filename(filename) dsanal = fmri_dataset(filename, targets=1) # this one is tricky since it might become Spm2AnalyzeImage ok_('AnalyzeImage' in str(dsanal.a.imgtype)) ok_('AnalyzeHeader' in str(dsanal.a.imghdr.__class__)) volanal_ = map2nifti(dsanal) ok_(isinstance(volanal_, dsanal.a.imgtype)) # type got preserved
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) if externals.exists('nibabel'): assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) else: assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=[1, 2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912, ), dtype='int16')) if externals.exists('nibabel'): assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) else: assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_er_nifti_dataset(): # setup data sources tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') evsrc = os.path.join(pymvpa_dataroot, 'fslev3.txt') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') evs = FslEV3(evsrc).to_events() # load timeseries ds_orig = fmri_dataset(tssrc) # segment into events ds = eventrelated_dataset(ds_orig, evs, time_attr='time_coords') # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 3200) assert_equal(len(ds), len(evs)) # the voxel indices are reflattened after boxcaring , but still 3D assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:800], ds.fa.voxel_indices[800:1600]) # each feature got an event offset value assert_array_equal(ds.fa.event_offsetidx, np.repeat([0,1,2,3], 800)) # check for all event attributes assert_true('onset' in ds.sa) assert_true('duration' in ds.sa) assert_true('features' in ds.sa) # check samples origsamples = _load_anyimg(tssrc)[0] for i, onset in \ enumerate([value2idx(e['onset'], ds_orig.sa.time_coords, 'floor') for e in evs]): assert_array_equal(ds.samples[i], origsamples[onset:onset+4].ravel()) assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4)) assert_array_equal(ds.sa.time_coords[i], np.arange(onset, onset + 4) * 2.5) for evattr in [a for a in ds.sa if a.count("event_attrs") and not a.count('event_attrs_event')]: assert_array_equal(evs[i]['_'.join(evattr.split('_')[2:])], ds.sa[evattr].value[i]) # check offset: only the last one exactly matches the tr assert_array_equal(ds.sa.orig_offset, [1, 1, 0]) # map back into voxel space, should ignore addtional features nim = map2nifti(ds) # origsamples has t,x,y,z assert_equal(nim.get_shape(), origsamples.shape[1:] + (len(ds) * 4,)) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.get_shape(), (40, 20, 1, 4)) # and now with masking ds = fmri_dataset(tssrc, mask=masrc) ds = eventrelated_dataset(ds, evs, time_attr='time_coords') nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0]) assert_equal(nnonzero, 530) # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 4 * 530) assert_equal(len(ds), len(evs)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:nnonzero], ds.fa.voxel_indices[nnonzero:2*nnonzero])
def test_er_nifti_dataset(): # setup data sources tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') evsrc = os.path.join(pymvpa_dataroot, 'fslev3.txt') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') evs = FslEV3(evsrc).to_events() # load timeseries ds_orig = fmri_dataset(tssrc) # segment into events ds = eventrelated_dataset(ds_orig, evs, time_attr='time_coords') # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 3200) assert_equal(len(ds), len(evs)) # the voxel indices are reflattened after boxcaring , but still 3D assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:800], ds.fa.voxel_indices[800:1600]) # each feature got an event offset value assert_array_equal(ds.fa.event_offsetidx, np.repeat([0, 1, 2, 3], 800)) # check for all event attributes assert_true('onset' in ds.sa) assert_true('duration' in ds.sa) assert_true('features' in ds.sa) # check samples origsamples = _load_anyimg(tssrc)[0] for i, onset in \ enumerate([value2idx(e['onset'], ds_orig.sa.time_coords, 'floor') for e in evs]): assert_array_equal(ds.samples[i], origsamples[onset:onset + 4].ravel()) assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4)) assert_array_equal(ds.sa.time_coords[i], np.arange(onset, onset + 4) * 2.5) for evattr in [ a for a in ds.sa if a.count("event_attrs") and not a.count('event_attrs_event') ]: assert_array_equal(evs[i]['_'.join(evattr.split('_')[2:])], ds.sa[evattr].value[i]) # check offset: only the last one exactly matches the tr assert_array_equal(ds.sa.orig_offset, [1, 1, 0]) # map back into voxel space, should ignore addtional features nim = map2nifti(ds) if externals.exists('nibabel'): # origsamples has t,x,y,z assert_equal(nim.get_shape(), origsamples.shape[1:] + (len(ds) * 4, )) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.get_shape(), (40, 20, 1, 4)) else: # origsamples has t,x,y,z but pynifti image has [t,]z,y,x assert_equal(nim.data.T.shape, origsamples.shape[1:] + (len(ds) * 4, )) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.data.T.shape, (40, 20, 1, 4)) # and now with masking ds = fmri_dataset(tssrc, mask=masrc) ds = eventrelated_dataset(ds, evs, time_attr='time_coords') nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0]) assert_equal(nnonzero, 530) # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 4 * 530) assert_equal(len(ds), len(evs)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:nnonzero], ds.fa.voxel_indices[nnonzero:2 * nnonzero])