def test_dataset_initializes(self): """ Test whether dataset initializes properly. """ Dataset(get_test_data_path() + 'test_dataset.txt', get_test_data_path() + 'test_features.txt') self.assertIsNotNone(self.dataset.masker) self.assertIsNotNone(self.dataset.image_table) self.assertEqual(len(self.dataset.mappables), 5) self.assertIsNotNone(self.dataset.masker) self.assertIsNotNone(self.dataset.r) self.assertIsNotNone(self.dataset.mappables[0].data['extra_field'].iloc[2], 'field')
def test_add_and_remove_masks(self): self.masker.add(get_test_data_path() + 'sgacc_mask.nii.gz') self.masker.add({'motor': get_test_data_path() + 'medial_motor.nii.gz'}) self.assertEqual(len(self.masker.layers), 2) self.assertEqual(len(self.masker.stack), 2) self.assertEqual(set(self.masker.layers.keys()), set(['layer_0', 'motor'])) self.assertEqual(np.sum(self.masker.layers['motor']), 1419) self.masker.remove('motor') self.assertEqual(len(self.masker.layers), 1) self.assertEqual(len(self.masker.stack), 1)
def test_roi_averaging(self): """ Test averaging within region labels in a mask. """ filename = get_test_data_path() + 'sgacc_mask.nii.gz' avg_vox = reduce.average_within_regions(self.dataset, filename) n_studies = self.dataset.image_table.data.shape[1] self.assertEqual(n_studies, avg_vox.shape[1]) self.assertGreater(avg_vox.sum(), 0.05)
def test_unmask(self): """ Test unmasking on 1d and 2d vectors (going back to 3d and 4d) TODO: test directly on Mask class and its functions, and on some smaller example data. But then it should get into a separate TestCase to not 'reload' the same Dataset. So for now let's just reuse loaded Dataset and provide rudimentary testing """ dataset = self.dataset ids = dataset.get_ids_by_mask( get_test_data_path() + 'sgacc_mask.nii.gz') nvoxels = dataset.volume.in_mask[0].shape[0] nvols = 2 data2d = np.arange(nvoxels * nvols).reshape((nvoxels, -1)) data2d_unmasked_separately = [ dataset.volume.unmask(data2d[:, i]) for i in xrange(nvols)] data2d_unmasked = dataset.volume.unmask(data2d) self.assertEqual(data2d_unmasked.shape, data2d_unmasked_separately[0].shape + (nvols,)) # and check corresponding volumes for i in xrange(nvols): self.assertTrue(np.all(data2d_unmasked[ ..., i] == data2d_unmasked_separately[i]))
def test_selection_by_mask(self): """ Test mask-based Mappable selection. Only one peak in the test dataset (in study5) should be within the sgACC. """ ids = self.dataset.get_studies(mask=get_test_data_path() + 'sgacc_mask.nii.gz') self.assertEquals(len(ids), 1) self.assertEquals('study5', ids[0])
def test_unmask(self): """ Test unmasking on 1d and 2d vectors (going back to 3d and 4d) TODO: test directly on Masker class and its functions, and on some smaller example data. But then it should get into a separate TestCase to not 'reload' the same Dataset. So for now let's just reuse loaded Dataset and provide rudimentary testing """ dataset = self.dataset ids = dataset.get_ids_by_mask(get_test_data_path() + 'sgacc_mask.nii.gz') nvoxels = dataset.masker.n_vox_in_vol nvols = 2 data2d = np.arange(nvoxels * nvols).reshape((nvoxels, -1)) data2d_unmasked_separately = [ dataset.masker.unmask(data2d[:, i]) for i in xrange(nvols) ] data2d_unmasked = dataset.masker.unmask(data2d) self.assertEqual(data2d_unmasked.shape, data2d_unmasked_separately[0].shape + (nvols, )) # and check corresponding volumes for i in xrange(nvols): self.assertTrue( np.all(data2d_unmasked[..., i] == data2d_unmasked_separately[i]))
def test_clustering(self): # Test standard coactivation-based clustering roi_mask = os.path.join(get_test_data_path(), 'sgacc_mask.nii.gz') clusters = cluster.magic(self.real_dataset, roi_mask=roi_mask, reduce_reference='pca', n_components=5, min_studies_per_voxel=1, n_clusters=3) n_unique = len(np.unique(clusters.get_data())) self.assertEqual(n_unique, 4) # Test study-based clustering d = tempfile.mkdtemp() from sklearn.decomposition import PCA from sklearn.cluster import KMeans pca = PCA(20, svd_solver='randomized') clust = KMeans(3) cluster.magic( self.real_dataset, method='studies', roi_mask=roi_mask, features=['emotion', 'pain'], feature_threshold=0.0, reduce_reference=pca, clustering_algorithm=clust, distance_metric='jaccard', output_dir=d, filename='test.nii.gz') img = nb.load(os.path.join(d, 'test.nii.gz')) self.assertEqual(len(np.unique(img.get_data())), 4) # Test feature-based clustering clusters = cluster.magic(self.real_dataset, method='features', n_clusters=3) n_unique = len(np.unique(clusters.get_data())) self.assertEqual(n_unique, 4) shutil.rmtree(d)
def test_img_to_json(self): path = get_test_data_path() + 'sgacc_mask.nii.gz' json_data = imageutils.img_to_json(path) data = json.loads(json_data) self.assertEqual(data['max'], 1) self.assertEqual(data['dims'], [91, 109, 91]) self.assertEqual(data['values'], [1.0]) self.assertEqual(len(data['indices'][0]), 1142)
def test_coactivation(self): """ Test seed-based coactivation. """ tempdir = tempfile.mkdtemp() seed_img = get_test_data_path() + 'sgacc_mask.nii.gz' network.coactivation(self.dataset, seed_img, output_dir=tempdir, prefix='test', r=20) filter = os.path.join(tempdir, 'test*.nii.gz') files = glob(filter) self.assertEquals(len(files), 9) shutil.rmtree(tempdir)
def test_decoder(self): t = tempfile.mktemp() test_data_path = get_test_data_path() # dataset = Dataset(test_data_path + 'test_real_dataset.txt') # dataset.add_features(test_data_path + 'test_real_features.txt') dec = decode.Decoder(self.real_dataset, features=['pain', 'emotion']) img = os.path.join(test_data_path, 'sgacc_mask.nii.gz') dec.decode(img, save=t) self.assertTrue(os.path.exists(t)) results = dec.decode(img) self.assertEqual(results.shape, (2, 1)) os.unlink(t)
def test_unmask(self): """ Test unmasking on 1d and 2d vectors (going back to 3d and 4d) TODO: test directly on Masker class and its functions, and on some smaller example data. But then it should get into a separate TestCase to not 'reload' the same Dataset. So for now let's just reuse loaded Dataset and provide rudimentary testing """ dataset = self.dataset ids = dataset.get_studies( mask=get_test_data_path() + 'sgacc_mask.nii.gz') nvoxels = dataset.masker.n_vox_in_vol nvols = 2 data2d = np.arange(nvoxels * nvols).reshape((nvoxels, -1)) data2d_unmasked = dataset.masker.unmask(data2d, output='array') self.assertEqual(data2d_unmasked.shape, (91, 109, 91, 2)) data2d_unmasked = dataset.masker.unmask(data2d, output='image') self.assertEqual(data2d_unmasked.shape, (91, 109, 91, 2)) self.assertTrue(hasattr(data2d_unmasked, 'get_data'))
def test_clustering(self): # Test standard coactivation-based clustering roi_mask = os.path.join(get_test_data_path(), 'sgacc_mask.nii.gz') clusters = cluster.magic(self.real_dataset, roi_mask=roi_mask, reduce_reference='pca', n_components=5, min_studies_per_voxel=1, n_clusters=3) n_unique = len(np.unique(clusters.get_data())) self.assertEqual(n_unique, 4) # Test study-based clustering d = tempfile.mkdtemp() from sklearn.decomposition import RandomizedPCA from sklearn.cluster import KMeans pca = RandomizedPCA(20) clust = KMeans(3) cluster.magic(self.real_dataset, method='studies', roi_mask=roi_mask, features=['emotion', 'pain'], feature_threshold=0.0, reduce_reference=pca, clustering_algorithm=clust, distance_metric='jaccard', output_dir=d, filename='test.nii.gz') img = nb.load(os.path.join(d, 'test.nii.gz')) self.assertEqual(len(np.unique(img.get_data())), 4) # Test feature-based clustering clusters = cluster.magic(self.real_dataset, method='features', n_clusters=3) n_unique = len(np.unique(clusters.get_data())) self.assertEqual(n_unique, 4) shutil.rmtree(d)