def test_nifti_dataset_from3_d(): """Test NiftiDataset based on 3D volume(s) """ tssrc = os.path.join(pymvpa_dataroot, 'bold') masrc = os.path.join(pymvpa_dataroot, 'mask') # Test loading of 3D volumes # by default we are enforcing 4D, testing here with the demo 3d mask ds = fmri_dataset(masrc, mask=masrc, targets=1) assert_equal(len(ds), 1) plain_data = NiftiImage(masrc).data # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).data.reshape(plain_data.shape)) # test loading from a list of filenames # for now we should fail if trying to load a mix of 4D and 3D volumes assert_raises(ValueError, fmri_dataset, (masrc, tssrc), mask=masrc, targets=1) # Lets prepare some custom NiftiImage dsfull = fmri_dataset(tssrc, mask=masrc, targets=1) ds_selected = dsfull[3] nifti_selected = map2nifti(ds_selected) # Load dataset from a mix of 3D volumes # (given by filenames and NiftiImages) labels = [123, 2, 123] ds2 = fmri_dataset((masrc, masrc, nifti_selected), mask=masrc, targets=labels) assert_equal(ds2.nsamples, 3) assert_array_equal(ds2.samples[0], ds2.samples[1]) assert_array_equal(ds2.samples[2], dsfull.samples[3]) assert_array_equal(ds2.targets, labels)
def test_multiple_calls(): """Test if doing exactly the same operation twice yields the same result """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=1, sprefix='abc') data2 = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=1, sprefix='abc') assert_array_equal(data.a.abc_eldim, data2.a.abc_eldim)
def test_er_nifti_dataset_mapping(): """Some mapping testing -- more tests is better """ # z,y,x sample_size = (4, 3, 2) # t,z,y,x samples = np.arange(120).reshape((5, ) + sample_size) dsmask = np.arange(24).reshape(sample_size) % 2 if externals.exists('nibabel'): import nibabel tds = fmri_dataset(nibabel.Nifti1Image(samples.T, None), mask=nibabel.Nifti1Image(dsmask.T, None)) else: import nifti tds = fmri_dataset(nifti.NiftiImage(samples), mask=nifti.NiftiImage(dsmask)) ds = eventrelated_dataset(tds, events=[ Event(onset=0, duration=2, label=1, chunk=1, features=[1000, 1001]), Event(onset=1, duration=2, label=2, chunk=1, features=[2000, 2001]) ]) nfeatures = tds.nfeatures mask = np.zeros(dsmask.shape, dtype='bool') mask[0, 0, 0] = mask[1, 0, 1] = mask[0, 0, 1] = 1 fmask = ds.a.mapper.forward1(mask.T) # select using mask in volume and all features in the other part ds_sel = ds[:, fmask] # now tests assert_array_equal(mask.reshape(24).nonzero()[0], [0, 1, 7]) # two events, 2 orig features at 2 timepoints assert_equal(ds_sel.samples.shape, (2, 4)) assert_array_equal(ds_sel.sa.features, [[1000, 1001], [2000, 2001]]) assert_array_equal(ds_sel.samples, [[1, 7, 25, 31], [25, 31, 49, 55]]) # reproducability assert_array_equal(ds_sel.samples, ds_sel.a.mapper.forward(np.rollaxis(samples.T, -1))) # reverse-mapping rmapped = ds_sel.a.mapper.reverse1(np.arange(10, 14)) assert_equal(np.rollaxis(rmapped, 0, 4).T.shape, (2, ) + sample_size) expected = np.zeros((2, ) + sample_size, dtype='int') expected[0, 0, 0, 1] = 10 expected[0, 1, 0, 1] = 11 expected[1, 0, 0, 1] = 12 expected[1, 1, 0, 1] = 13 assert_array_equal(np.rollaxis(rmapped, 0, 4).T, expected)
def test_multiple_calls(): """Test if doing exactly the same operation twice yields the same result """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=1, sprefix='abc') data2 = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=1, sprefix='abc') assert_array_equal(data.a.abc_eldim, data2.a.abc_eldim)
def test_er_nifti_dataset_mapping(): """Some mapping testing -- more tests is better """ # z,y,x sample_size = (4, 3, 2) # t,z,y,x samples = np.arange(120).reshape((5,) + sample_size) dsmask = np.arange(24).reshape(sample_size) % 2 if externals.exists('nibabel'): import nibabel tds = fmri_dataset(nibabel.Nifti1Image(samples.T, None), mask=nibabel.Nifti1Image(dsmask.T, None)) else: import nifti tds = fmri_dataset(nifti.NiftiImage(samples), mask=nifti.NiftiImage(dsmask)) ds = eventrelated_dataset( tds, events=[Event(onset=0, duration=2, label=1, chunk=1, features=[1000, 1001]), Event(onset=1, duration=2, label=2, chunk=1, features=[2000, 2001])]) nfeatures = tds.nfeatures mask = np.zeros(dsmask.shape, dtype='bool') mask[0, 0, 0] = mask[1, 0, 1] = mask[0, 0, 1] = 1 fmask = ds.a.mapper.forward1(mask.T) # select using mask in volume and all features in the other part ds_sel = ds[:, fmask] # now tests assert_array_equal(mask.reshape(24).nonzero()[0], [0, 1, 7]) # two events, 2 orig features at 2 timepoints assert_equal(ds_sel.samples.shape, (2, 4)) assert_array_equal(ds_sel.sa.features, [[1000, 1001], [2000, 2001]]) assert_array_equal(ds_sel.samples, [[ 1, 7, 25, 31], [ 25, 31, 49, 55]]) # reproducability assert_array_equal(ds_sel.samples, ds_sel.a.mapper.forward(np.rollaxis(samples.T, -1))) # reverse-mapping rmapped = ds_sel.a.mapper.reverse1(np.arange(10, 14)) assert_equal(np.rollaxis(rmapped, 0, 4).T.shape, (2,) + sample_size) expected = np.zeros((2,)+sample_size, dtype='int') expected[0,0,0,1] = 10 expected[0,1,0,1] = 11 expected[1,0,0,1] = 12 expected[1,1,0,1] = 13 assert_array_equal(np.rollaxis(rmapped, 0, 4).T, expected)
def test_fmridataset(): # full-blown fmri dataset testing maskimg = NiftiImage(os.path.join(pymvpa_dataroot, 'mask.nii.gz')) # assign some values we can check later on maskimg.data[maskimg.data>0] = np.arange(1, np.sum(maskimg.data) + 1) attr = SampleAttributes(os.path.join(pymvpa_dataroot, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'bold'), targets=attr.targets, chunks=attr.chunks, mask=maskimg, sprefix='subj1', add_fa={'myintmask': maskimg}) # content assert_equal(len(ds), 1452) assert_true(ds.nfeatures, 530) assert_array_equal(sorted(ds.sa.keys()), ['chunks', 'targets', 'time_coords', 'time_indices']) assert_array_equal(sorted(ds.fa.keys()), ['myintmask', 'subj1_indices']) assert_array_equal(sorted(ds.a.keys()), ['imghdr', 'mapper', 'subj1_dim', 'subj1_eldim']) # vol extent assert_equal(ds.a.subj1_dim, (1, 20, 40)) # check time assert_equal(ds.sa.time_coords[-1], 3627.5) # non-zero mask values assert_array_equal(ds.fa.myintmask, np.arange(1, ds.nfeatures + 1))
def test_fmridataset(): # full-blown fmri dataset testing import nibabel maskimg = nibabel.load(os.path.join(pymvpa_dataroot, 'mask.nii.gz')) data = maskimg.get_data().copy() data[data>0] = np.arange(1, np.sum(data) + 1) maskimg = nibabel.Nifti1Image(data, None, maskimg.get_header()) attr = SampleAttributes(os.path.join(pymvpa_dataroot, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=maskimg, sprefix='subj1', add_fa={'myintmask': maskimg}) # content assert_equal(len(ds), 1452) assert_true(ds.nfeatures, 530) assert_array_equal(sorted(ds.sa.keys()), ['chunks', 'targets', 'time_coords', 'time_indices']) assert_array_equal(sorted(ds.fa.keys()), ['myintmask', 'subj1_indices']) assert_array_equal(sorted(ds.a.keys()), ['imghdr', 'imgtype', 'mapper', 'subj1_dim', 'subj1_eldim']) # vol extent assert_equal(ds.a.subj1_dim, (40, 20, 1)) # check time assert_equal(ds.sa.time_coords[-1], 3627.5) # non-zero mask values assert_array_equal(ds.fa.myintmask, np.arange(1, ds.nfeatures + 1)) # we know that imgtype must be: ok_(ds.a.imgtype is nibabel.Nifti1Image)
def test_nifti_dataset_from3_d(): """Test NiftiDataset based on 3D volume(s) """ tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') # Test loading of 3D volumes # by default we are enforcing 4D, testing here with the demo 3d mask ds = fmri_dataset(masrc, mask=masrc, targets=1) assert_equal(len(ds), 1) if externals.exists('nibabel'): import nibabel plain_data = nibabel.load(masrc).get_data() # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).get_data().reshape(plain_data.shape)) else: import nifti plain_data = nifti.NiftiImage(masrc).data # Lets check if mapping back works as well assert_array_equal(plain_data, map2nifti(ds).data.reshape(plain_data.shape)) # test loading from a list of filenames # for now we should fail if trying to load a mix of 4D and 3D volumes assert_raises(ValueError, fmri_dataset, (masrc, tssrc), mask=masrc, targets=1) # Lets prepare some custom NiftiImage dsfull = fmri_dataset(tssrc, mask=masrc, targets=1) ds_selected = dsfull[3] nifti_selected = map2nifti(ds_selected) # Load dataset from a mix of 3D volumes # (given by filenames and NiftiImages) labels = [123, 2, 123] ds2 = fmri_dataset((masrc, masrc, nifti_selected), mask=masrc, targets=labels) assert_equal(ds2.nsamples, 3) assert_array_equal(ds2.samples[0], ds2.samples[1]) assert_array_equal(ds2.samples[2], dsfull.samples[3]) assert_array_equal(ds2.targets, labels)
def test_nifti_dataset(): """Basic testing of NiftiDataset """ ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=[1, 2], sprefix='voxel') assert_equal(ds.nfeatures, 294912) assert_equal(ds.nsamples, 2) assert_array_equal(ds.a.voxel_eldim, ds.a.imghdr['pixdim'][1:4]) assert_true(ds.a['voxel_dim'].value == (128, 96, 24)) # XXX move elsewhere #check that mapper honours elementsize #nb22 = np.array([i for i in data.a.mapper.getNeighborIn((1, 1, 1), 2.2)]) #nb20 = np.array([i for i in data.a.mapper.getNeighborIn((1, 1, 1), 2.0)]) #self.failUnless(nb22.shape[0] == 7) #self.failUnless(nb20.shape[0] == 5) merged = ds.copy() merged.append(ds) assert_equal(merged.nfeatures, 294912) assert_equal(merged.nsamples, 4) # check that the header survives for k in merged.a.imghdr.keys(): assert_array_equal(merged.a.imghdr[k], ds.a.imghdr[k]) # throw away old dataset and see if new one survives del ds assert_array_equal(merged.samples[3], merged.samples[1]) # check whether we can use a plain ndarray as mask mask = np.zeros((128, 96, 24), dtype='bool') mask[40, 20, 12] = True nddata = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=[1, 2], mask=mask) assert_equal(nddata.nfeatures, 1) rmap = nddata.a.mapper.reverse1(np.array([44])) assert_equal(rmap.shape, (128, 96, 24)) assert_equal(np.sum(rmap), 44) assert_equal(rmap[40, 20, 12], 44)
def test_nifti_dataset(): """Basic testing of NiftiDataset """ ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2], sprefix='voxel') assert_equal(ds.nfeatures, 294912) assert_equal(ds.nsamples, 2) assert_array_equal(ds.a.voxel_eldim, ds.a.imghdr['pixdim'][1:4]) assert_true(ds.a['voxel_dim'].value == (128,96,24)) # XXX move elsewhere #check that mapper honours elementsize #nb22 = np.array([i for i in data.a.mapper.getNeighborIn((1, 1, 1), 2.2)]) #nb20 = np.array([i for i in data.a.mapper.getNeighborIn((1, 1, 1), 2.0)]) #self.failUnless(nb22.shape[0] == 7) #self.failUnless(nb20.shape[0] == 5) merged = ds.copy() merged.append(ds) assert_equal(merged.nfeatures, 294912) assert_equal(merged.nsamples, 4) # check that the header survives for k in merged.a.imghdr.keys(): assert_array_equal(merged.a.imghdr[k], ds.a.imghdr[k]) # throw away old dataset and see if new one survives del ds assert_array_equal(merged.samples[3], merged.samples[1]) # check whether we can use a plain ndarray as mask mask = np.zeros((128, 96, 24), dtype='bool') mask[40, 20, 12] = True nddata = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2], mask=mask) assert_equal(nddata.nfeatures, 1) rmap = nddata.a.mapper.reverse1(np.array([44])) assert_equal(rmap.shape, (128, 96, 24)) assert_equal(np.sum(rmap), 44) assert_equal(rmap[40, 20, 12], 44)
def test_nifti_mapper(filename): """Basic testing of map2Nifti """ import nibabel data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) ok_(isinstance(vol, data.a.imgtype)) # test providing custom imgtypes vol = map2nifti(data, imgtype=nibabel.Nifti1Pair) ok_(isinstance(vol, nibabel.Nifti1Pair)) # Lets generate a dataset using an alternative format (MINC) # and see if type persists volminc = nibabel.MincImage(vol.get_data(), vol.get_affine(), vol.get_header()) ok_(isinstance(volminc, nibabel.MincImage)) dsminc = fmri_dataset(volminc, targets=1) ok_(dsminc.a.imgtype is nibabel.MincImage) ok_(isinstance(dsminc.a.imghdr, nibabel.minc.MincImage.header_class)) # Lets test if we could save/load now into Analyze volume/dataset if externals.versions['nibabel'] < '1.1.0': raise SkipTest('nibabel prior 1.1.0 had an issue with types comprehension') volanal = map2nifti(dsminc, imgtype=nibabel.AnalyzeImage) # MINC has no 'save' capability ok_(isinstance(volanal, nibabel.AnalyzeImage)) volanal.to_filename(filename) dsanal = fmri_dataset(filename, targets=1) # this one is tricky since it might become Spm2AnalyzeImage ok_('AnalyzeImage' in str(dsanal.a.imgtype)) ok_('AnalyzeHeader' in str(dsanal.a.imghdr.__class__)) volanal_ = map2nifti(dsanal) ok_(isinstance(volanal_, dsanal.a.imgtype)) # type got preserved
def load_example_fmri_dataset(): """Load minimal fMRI dataset that is shipped with PyMVPA.""" from mvpa.datasets.mri import fmri_dataset from mvpa.misc.io import SampleAttributes attr = SampleAttributes(os.path.join(pymvpa_dataroot, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=os.path.join(pymvpa_dataroot, 'mask.nii.gz')) return ds
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_nifti_self_mapper(): """Test map2Nifti facility ran without arguments """ example_path = os.path.join(pymvpa_dataroot, 'example4d') example = NiftiImage(example_path) data = fmri_dataset(samples=example_path, targets=[1,2]) # Map read data to itself vol = map2nifti(data) assert_equal(vol.data.shape, example.data.shape) assert_array_equal(vol.data, example.data) data.samples[:] = 1 vol = map2nifti(data) assert_true((vol.data == 1).all())
def load_datadb_tutorial_data(path=os.path.join( pymvpa_datadbroot, 'tutorial_data', 'tutorial_data', 'data'), roi='brain'): """Loads the block-design demo dataset from PyMVPA dataset DB. Parameters ---------- path : str Path of the directory containing the dataset files. roi : str or int or tuple or None Region Of Interest to be used for masking the dataset. If a string is given a corresponding mask image from the demo dataset will be used (mask_<str>.nii.gz). If an int value is given, the corresponding ROI is determined from the atlas image (mask_hoc.nii.gz). If a tuple is provided it may contain int values that a processed as explained before, but the union of a ROIs is taken to produce the final mask. If None, no masking is performed. """ import nibabel as nb from mvpa.datasets.mri import fmri_dataset from mvpa.misc.io import SampleAttributes if roi is None: mask = None elif isinstance(roi, str): mask = os.path.join(path, 'mask_' + roi + '.nii.gz') elif isinstance(roi, int): nimg = nb.load(os.path.join(path, 'mask_hoc.nii.gz')) tmpmask = nimg.get_data() == roi mask = nb.Nifti1Image(tmpmask.astype(int), nimg.get_affine(), nimg.get_header()) elif isinstance(roi, tuple) or isinstance(roi, list): nimg = nb.load(os.path.join(path, 'mask_hoc.nii.gz')) tmpmask = np.zeros(nimg.get_shape(), dtype='bool') for r in roi: tmpmask = np.logical_or(tmpmask, nimg.get_data() == r) mask = nb.Nifti1Image(tmpmask.astype(int), nimg.get_affine(), nimg.get_header()) else: raise ValueError("Got something as mask that I cannot handle.") attr = SampleAttributes(os.path.join(path, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(path, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=mask) return ds
def _run_core(self,): """ Core routine for detecting outliers Parameters ---------- imgfile : motionfile : """ attr = SampleAttributes(self.inputs.attributes_file) dataset = fmri_dataset( samples=self.inputs.samples_file, labels=attr.labels, chunks=attr.chunks, mask=self.inputs.mask_file) if 'rest' in dataset.uniquelabels: dataset = dataset[dataset.sa.labels != 'rest'] # zscore dataset relative to baseline ('rest') mean zscore(dataset, chunks_attr=True, dtype='float32') # choose classifier clf = LinearCSVMC() # setup measure to be computed by Searchlight # cross-validated mean transfer using an N-fold dataset splitter cv = CrossValidatedTransferError(TransferError(clf), NFoldSplitter()) sl = sphere_searchlight(cv, radius=self.inputs.radius, space='voxel_indices', nproc=2, mapper=mean_sample()) ds = dataset.copy(deep=False, sa=['labels', 'chunks'], fa=['voxel_indices'], a=[]) sl_map = sl(ds) # map sensitivity map into original dataspace orig_sl_map = dataset.map2nifti(sl_map) orig_sl_map.save(self._get_output_filename())
def load_datadb_tutorial_data(path=os.path.join( pymvpa_datadbroot, 'tutorial_data', 'tutorial_data', 'data'), roi='brain'): """Loads the block-design demo dataset from PyMVPA dataset DB. Parameters ---------- path : str Path of the directory containing the dataset files. roi : str or int or tuple or None Region Of Interest to be used for masking the dataset. If a string is given a corresponding mask image from the demo dataset will be used (mask_<str>.nii.gz). If an int value is given, the corresponding ROI is determined from the atlas image (mask_hoc.nii.gz). If a tuple is provided it may contain int values that a processed as explained before, but the union of a ROIs is taken to produce the final mask. If None, no masking is performed. """ from nifti import NiftiImage from mvpa.datasets.mri import fmri_dataset from mvpa.misc.io import SampleAttributes if roi is None: mask = None elif isinstance(roi, str): mask = os.path.join(path, 'mask_' + roi + '.nii.gz') elif isinstance(roi, int): nimg = NiftiImage(os.path.join(path, 'mask_hoc.nii.gz')) tmpmask = nimg.data == roi mask = NiftiImage(tmpmask.astype(int), nimg.header) elif isinstance(roi, tuple) or isinstance(roi, list): nimg = NiftiImage(os.path.join(path, 'mask_hoc.nii.gz')) tmpmask = np.zeros(nimg.data.shape, dtype='bool') for r in roi: tmpmask = np.logical_or(tmpmask, nimg.data == r) mask = NiftiImage(tmpmask.astype(int), nimg.header) else: raise ValueError("Got something as mask that I cannot handle.") attr = SampleAttributes(os.path.join(path, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(path, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=mask) return ds
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot,'example4d.nii.gz'), targets=[1,2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912,), dtype='int16')) if externals.exists('nibabel'): assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) else: assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_nifti_mapper(): """Basic testing of map2Nifti """ data = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'example4d.nii.gz'), targets=[1, 2]) # test mapping of ndarray vol = map2nifti(data, np.ones((294912, ), dtype='int16')) if externals.exists('nibabel'): assert_equal(vol.get_shape(), (128, 96, 24)) assert_true((vol.get_data() == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.get_shape(), (128, 96, 24, 2)) else: assert_equal(vol.data.shape, (24, 96, 128)) assert_true((vol.data == 1).all()) # test mapping of the dataset vol = map2nifti(data) assert_equal(vol.data.shape, (2, 24, 96, 128))
def test_fmridataset(): # full-blown fmri dataset testing if externals.exists('nibabel'): import nibabel maskimg = nibabel.load(os.path.join(pymvpa_dataroot, 'mask.nii.gz')) data = maskimg.get_data().copy() data[data > 0] = np.arange(1, np.sum(data) + 1) maskimg = nibabel.Nifti1Image(data, None, maskimg.get_header()) else: import nifti maskimg = nifti.NiftiImage(os.path.join(pymvpa_dataroot, 'mask.nii.gz')) # assign some values we can check later on maskimg.data.T[maskimg.data.T > 0] = np.arange( 1, np.sum(maskimg.data) + 1) attr = SampleAttributes(os.path.join(pymvpa_dataroot, 'attributes.txt')) ds = fmri_dataset(samples=os.path.join(pymvpa_dataroot, 'bold.nii.gz'), targets=attr.targets, chunks=attr.chunks, mask=maskimg, sprefix='subj1', add_fa={'myintmask': maskimg}) # content assert_equal(len(ds), 1452) assert_true(ds.nfeatures, 530) assert_array_equal(sorted(ds.sa.keys()), ['chunks', 'targets', 'time_coords', 'time_indices']) assert_array_equal(sorted(ds.fa.keys()), ['myintmask', 'subj1_indices']) assert_array_equal(sorted(ds.a.keys()), ['imghdr', 'mapper', 'subj1_dim', 'subj1_eldim']) # vol extent assert_equal(ds.a.subj1_dim, (40, 20, 1)) # check time assert_equal(ds.sa.time_coords[-1], 3627.5) # non-zero mask values assert_array_equal(ds.fa.myintmask, np.arange(1, ds.nfeatures + 1))
###################### Piero ######################################### mask = '/media/DATA/fmri/ica_classification/ICA/mask.nii.gz' img_s00 = ni.load('/media/DATA/fmri/ica_classification/shuffled_IC/shuffled_dr_stage2_subject00000_Z.nii.gz') img_s01 = ni.load('/media/DATA/fmri/ica_classification/shuffled_IC/shuffled_dr_stage2_subject00001_Z.nii.gz') img_s02 = ni.load('/media/DATA/fmri/ica_classification/shuffled_IC/shuffled_dr_stage2_subject00002_Z.nii.gz') img_s03 = ni.load('/media/DATA/fmri/ica_classification/shuffled_IC/shuffled_dr_stage2_subject00003_Z.nii.gz') img_s00 = ni.load('/media/DATA/fmri/ica_classification/ICA/dr_stage2_subject00000_Z.nii.gz') img_s01 = ni.load('/media/DATA/fmri/ica_classification/ICA/dr_stage2_subject00001_Z.nii.gz') img_s02 = ni.load('/media/DATA/fmri/ica_classification/ICA/dr_stage2_subject00002_Z.nii.gz') img_s03 = ni.load('/media/DATA/fmri/ica_classification/ICA/dr_stage2_subject00003_Z.nii.gz') ds0 = fmri_dataset(img_s00, mask=mask) ds1 = fmri_dataset(img_s01, mask=mask) ds2 = fmri_dataset(img_s02, mask=mask) ds3 = fmri_dataset(img_s03, mask=mask) d0 = fmri_dataset(img_00, mask=mask) d1 = fmri_dataset(img_01, mask=mask) d2 = fmri_dataset(img_02, mask=mask) d3 = fmri_dataset(img_03, mask=mask) ts0 = TimeSeries(ds0.samples, sampling_interval=1) ts1 = TimeSeries(ds1.samples, sampling_interval=1)
def test_er_nifti_dataset(): # setup data sources tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') evsrc = os.path.join(pymvpa_dataroot, 'fslev3.txt') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') evs = FslEV3(evsrc).to_events() # load timeseries ds_orig = fmri_dataset(tssrc) # segment into events ds = eventrelated_dataset(ds_orig, evs, time_attr='time_coords') # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 3200) assert_equal(len(ds), len(evs)) # the voxel indices are reflattened after boxcaring , but still 3D assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:800], ds.fa.voxel_indices[800:1600]) # each feature got an event offset value assert_array_equal(ds.fa.event_offsetidx, np.repeat([0,1,2,3], 800)) # check for all event attributes assert_true('onset' in ds.sa) assert_true('duration' in ds.sa) assert_true('features' in ds.sa) # check samples origsamples = _load_anyimg(tssrc)[0] for i, onset in \ enumerate([value2idx(e['onset'], ds_orig.sa.time_coords, 'floor') for e in evs]): assert_array_equal(ds.samples[i], origsamples[onset:onset+4].ravel()) assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4)) assert_array_equal(ds.sa.time_coords[i], np.arange(onset, onset + 4) * 2.5) for evattr in [a for a in ds.sa if a.count("event_attrs") and not a.count('event_attrs_event')]: assert_array_equal(evs[i]['_'.join(evattr.split('_')[2:])], ds.sa[evattr].value[i]) # check offset: only the last one exactly matches the tr assert_array_equal(ds.sa.orig_offset, [1, 1, 0]) # map back into voxel space, should ignore addtional features nim = map2nifti(ds) # origsamples has t,x,y,z assert_equal(nim.get_shape(), origsamples.shape[1:] + (len(ds) * 4,)) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.get_shape(), (40, 20, 1, 4)) # and now with masking ds = fmri_dataset(tssrc, mask=masrc) ds = eventrelated_dataset(ds, evs, time_attr='time_coords') nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0]) assert_equal(nnonzero, 530) # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 4 * 530) assert_equal(len(ds), len(evs)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:nnonzero], ds.fa.voxel_indices[nnonzero:2*nnonzero])
def test_er_nifti_dataset(): # setup data sources tssrc = os.path.join(pymvpa_dataroot, 'bold.nii.gz') evsrc = os.path.join(pymvpa_dataroot, 'fslev3.txt') masrc = os.path.join(pymvpa_dataroot, 'mask.nii.gz') evs = FslEV3(evsrc).to_events() # load timeseries ds_orig = fmri_dataset(tssrc) # segment into events ds = eventrelated_dataset(ds_orig, evs, time_attr='time_coords') # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 3200) assert_equal(len(ds), len(evs)) # the voxel indices are reflattened after boxcaring , but still 3D assert_equal(ds.fa.voxel_indices.shape, (ds.nfeatures, 3)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:800], ds.fa.voxel_indices[800:1600]) # each feature got an event offset value assert_array_equal(ds.fa.event_offsetidx, np.repeat([0, 1, 2, 3], 800)) # check for all event attributes assert_true('onset' in ds.sa) assert_true('duration' in ds.sa) assert_true('features' in ds.sa) # check samples origsamples = _load_anyimg(tssrc)[0] for i, onset in \ enumerate([value2idx(e['onset'], ds_orig.sa.time_coords, 'floor') for e in evs]): assert_array_equal(ds.samples[i], origsamples[onset:onset + 4].ravel()) assert_array_equal(ds.sa.time_indices[i], np.arange(onset, onset + 4)) assert_array_equal(ds.sa.time_coords[i], np.arange(onset, onset + 4) * 2.5) for evattr in [ a for a in ds.sa if a.count("event_attrs") and not a.count('event_attrs_event') ]: assert_array_equal(evs[i]['_'.join(evattr.split('_')[2:])], ds.sa[evattr].value[i]) # check offset: only the last one exactly matches the tr assert_array_equal(ds.sa.orig_offset, [1, 1, 0]) # map back into voxel space, should ignore addtional features nim = map2nifti(ds) if externals.exists('nibabel'): # origsamples has t,x,y,z assert_equal(nim.get_shape(), origsamples.shape[1:] + (len(ds) * 4, )) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.get_shape(), (40, 20, 1, 4)) else: # origsamples has t,x,y,z but pynifti image has [t,]z,y,x assert_equal(nim.data.T.shape, origsamples.shape[1:] + (len(ds) * 4, )) # check shape of a single sample nim = map2nifti(ds, ds.samples[0]) # pynifti image has [t,]z,y,x assert_equal(nim.data.T.shape, (40, 20, 1, 4)) # and now with masking ds = fmri_dataset(tssrc, mask=masrc) ds = eventrelated_dataset(ds, evs, time_attr='time_coords') nnonzero = len(_load_anyimg(masrc)[0].nonzero()[0]) assert_equal(nnonzero, 530) # we ask for boxcars of 9s length, and the tr in the file header says 2.5s # hence we should get round(9.0/2.4) * np.prod((1,20,40) == 3200 features assert_equal(ds.nfeatures, 4 * 530) assert_equal(len(ds), len(evs)) # and they have been broadcasted through all boxcars assert_array_equal(ds.fa.voxel_indices[:nnonzero], ds.fa.voxel_indices[nnonzero:2 * nnonzero])