def test_decoder(self):
     t = tempfile.mktemp()
     test_data_path = get_test_data_path()
     # dataset = Dataset(test_data_path + 'test_real_dataset.txt')
     # dataset.add_features(test_data_path + 'test_real_features.txt')
     dec = decode.Decoder(self.real_dataset, features=['pain', 'emotion'])
     img = os.path.join(test_data_path, 'sgacc_mask.nii.gz')
     dec.decode(img, save=t)
     self.assertTrue(os.path.exists(t))
     results = dec.decode(img)
     self.assertEqual(results.shape, (2, 1))
     os.unlink(t)
Beispiel #2
0
  def decode(self,images,outfile,mrs=None,round=4):
    if not self.decoder:
      self.decoder = decode.Decoder(self.db)

    # If mrs is not specified, do decoding against neurosynth database
    if not mrs:
      result = self.decoder.decode(images, save=outfile)
  
    # If mrs is specified, do decoding against custom set of images
    else:
      # This is akin to traditional neurosynth method - pearson's r correlation
      imgs_to_compare = imageutils.load_imgs(mrs,self.masker)
      imgs_to_decode = imageutils.load_imgs(images,self.masker)
      x, y = imgs_to_compare.astype(float),imgs_to_decode.astype(float)
      x, y = x - x.mean(0), y - y.mean(0)
      x, y = x / np.sqrt((x ** 2).sum(0)), y / np.sqrt((y ** 2).sum(0))
      result = np.around(x.T.dot(y).T,round)
      features = [os.path.basename(m) for m in mrs]
      rownames = [os.path.basename(m) for m in images]
      df = pd.DataFrame(result,columns=features)
      df.index = rownames
      df.to_csv(outfile,sep="\t")
    return result
Beispiel #3
0
dat.volInfo.mat = inv(t)*dat.volInfo.mat;
dat.fullpath = '/Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/Friend.nii';
write(dat)

# 2) Reorient using FSL - Unix
fslreorient2std Friend Friend_Or

# 3) Coregister to 2mm MNI space - Unix
/usr/local/fsl/bin/flirt -in /Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/Friend_Or.nii.gz -ref /usr/local/fsl/data/standard/MNI152_T1_2mm_brain -out /Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/Friend_Or_Mni.nii.gz -omat /Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/Friend_Or_Mni.mat -bins 256 -cost corratio -searchrx -90 90 -searchry -90 90 -searchrz -90 90 -dof 12  -interp trilinear

# 4) Decode - Python
DATASET_FILE = '/Users/lukechang/Dropbox/Github/neurosynth/topics.pkl'
PREFIX = '/Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/'
INFILE = 'Friend_Or_Mni.nii.gz'
dataset = Dataset.load(DATASET_FILE)
decoder = decode.Decoder(dataset) #takes awhile to load, should only do this once.
img = imageutils.load_imgs(PREFIX + INFILE, decoder.mask)
result = decoder.decode(img)
np.savetxt(PREFIX + 'Friend_Decoded.txt', result)

# 5) Threshold at .001 - unix
fslmaths Friend_Or_Mni -thr 3 Friend_Or_Mni_001

# 6) Decode thresholded map - python
DATASET_FILE = '/Users/lukechang/Dropbox/Github/neurosynth/topics.pkl'
PREFIX = '/Users/lukechang/Research/Trust_Friend/Analyses/NeurosynthDecode/'
INFILE = 'Friend_Or_Mni_001.nii.gz'
dataset = Dataset.load(DATASET_FILE)
decoder = decode.Decoder(dataset) #takes awhile to load, should only do this once.
img = imageutils.load_imgs(PREFIX + INFILE, decoder.mask)
result = decoder.decode(img)
Beispiel #4
0
import cPickle
from neurosynth.analysis import decode

# Load a pickled Dataset instance. This example will work with the
# file saved in the create_a_new_dataset_and_load_features example.
dataset = cPickle.load(open('dataset.pkl', 'rb'))

# Initialize a new Decoder instance with a few features. Note that
# if you don't specify a subset of features, ALL features in the
# Dataset will be loaded, which will take a long time because
# meta-analysis images for each feature need to be generated.
decoder = decode.Decoder(
    dataset, features=['emotion', 'pain', 'somatosensory', 'wm', 'inhibition'])

# Decode three images. The sample images here are coactivation
# maps for ventral, dorsal, and posterior insula clusters,
# respectively. Maps are drawn from data reported in
# Chang, Yarkoni, Khaw, & Sanfey (2012); see paper for details.
# We save the output--an image x features matrix--to a file.
# By default, the decoder will use Pearson correlation, i.e.,
# each value in our results table indicates the correlation
# between the input image and each feature's meta-analysis image.
result = decoder.decode(['vIns.nii.gz', 'dIns.nii.gz', 'pIns.nii.gz'],
                        save='decoding_results.txt')
Beispiel #5
0
# In[16]:

print('loading dataset...')
tds = datetime.now()
dataset = Dataset('/Users/Katie/Dropbox/Data/neurosynth-v0.7/database.txt')
dataset.add_features('/Users/Katie/Dropbox/Data/neurosynth-v0.7/features.txt')
tdf = datetime.now()

print('dataset loaded! only took {0}'.format((tdf - tds)))

for i in np.arange(0, len(mask_names)):
    print('{0}\nmeta-analyzing {1}...'.format(datetime.now(), mask_names[i]))
    tmas = datetime.now()
    ids = dataset.get_studies(mask=roi_files[i], )
    ma = meta.MetaAnalysis(dataset, ids)
    ma.save_results(
        output_dir=sink_dir,
        prefix=mask_names[i],
        image_list=['association-test_z', 'association-test_z_FDR_0.01'])
    tmaf = datetime.now()
    print('meta-analysis took {0}\ndecoding {1}...'.format((tmaf - tmas),
                                                           mask_names[i]))
    decoder = decode.Decoder(dataset, image_type='association-test_z')
    result = decoder.decode([
        join(sink_dir, '{0}_association-test_z.nii.gz'.format(mask_names[i]))
    ],
                            save=join(sink_dir,
                                      'decoded_{0}.txt'.format(mask_names[i])))
    tdcf = datetime.now()
    print('decoding {0} took {1}'.format(mask_names[i], (tdcf - tmaf)))
# <codecell>

# Seed-based coactivation
network.coactivation(dataset, [[0, 20, 28]], threshold=0.1, outroot='coactivation_from_coords', r=10)

# <markdowncell>

# Here we're generating a coactivation map for a sphere with radius 10 mm centered on an anterior cingulate cortex (ACC) voxel. The threshold argument indicates what proportion of voxels within the ACC sphere have to be activated for a study to be considered 'active'.
# 
# In general, meta-analytic coactivation produces results quite similar--but substantially less spatially specific--than time series-based functional connectivity. Note that if you're only interested in individual points in the brain, you can find precomputed coactivation maps for spheres centered on every gray matter voxel in the brain on the Neurosynth website.
# 
# ### Decoding your own images
# 
# One of the most useful features of Neurosynth is the ability to 'decode' arbitrary images by assessing their similarity to the reverse inference meta-analysis maps generated for different terms. For example, you might wonder whether a group-level z-score map for some experimental contrast is more consistent with recollection or with recognition. You could even use Neurosynth as a simple (but often effective) classifier by running a series of individual subjects through the decoder and picking the class (i.e., term) with the highest similarity. Perhaps the most powerful--though somewhat more computationally intensive--use is to do open-ended decoding. That is, we can take the entire set of features included in the base Neurosynth data download and rank-order them by similarity to each of our input images.
# 
# In this example, we'll decode three insula-based coactivation networks drawn from Chang, Yarkoni, Khaw, & Sanfey (2012). You should substitute your own images into the list below. We assess the similarity of each map with respect to 9 different terms and save the results to a file. Note that if we left the features argument unspecified, the decoder would default to using the entire set of 500+ features (which will take a few minutes on most machines unless you've pregenerated the feature maps--but that's for a different tutorial).

# <codecell>

# Decode images
decoder = decode.Decoder(dataset, features=['taste', 'disgust', 'emotion', 'auditory', 'pain', 'somatosensory', 'conflict', 'switching', 'inhibition'])
data = decoder.decode(['pIns.nii.gz', 'vIns.nii.gz', 'dIns.nii.gz'], save='decoding_results.txt')

# <markdowncell>

# In decoding_results.txt, we have features in rows, and input images in columns. By default, each cell reflects the pearson correlation between the corresponding input image (i.e., the column) and reverse inference meta-analysis map (i.e., the row). Sort the columns in descending order and you've got a crude but potentially quite useful open-ended decoding of your images. Mind you, if you're lazy, you can achieve the same thing by uploading your images to [NeuroVault](http://neurovault.org) and then using the (currently experimental) [decode](http://neurosynth.org) function on the [Neurosynth website](http://neurosynth.org).

# <codecell>


print "Starting neurosynth decoding..."

from neurosynth import Dataset
from neurosynth.analysis import meta
from neurosynth.analysis import decode

neurosynth_data = "%s/neurosynth-data" %base

# you will need to open the features file in vim and add pmid as the first column name to appear before "action." 
dataset = Dataset('%s/database.txt' %neurosynth_data, normalized_features_file)

# Create decoder to decode our images

from nilearn.image import resample_img
decoder = decode.Decoder(dataset) # select all 211 features from our set
pickle.dump(decoder,open("%s/decoder.pkl" %output_folder,"wb"))
concept_maps = glob("%s/*.nii.gz" %output_folder)

# Generate maps for the features
neurosynth_feature_maps = "%s/feature_maps" %neurosynth_data
if not os.path.exists(neurosynth_feature_maps):
    os.mkdir(neurosynth_feature_maps)

meta.analyze_features(dataset, output_dir=neurosynth_feature_maps, prefix='cog_atlas')

#...and boom, you should have a full set of images. (-Tal Yarkoni) :)

# Decoder needs 2mm images
brain_2mm = get_standard_mask(2)
concept_maps_2mm = []
Beispiel #8
0
from neurosynth.base.dataset import Dataset
from neurosynth.analysis import decode, meta
from os.path import join

parent_dir = '/Users/Katie/Dropbox/Data/neurosynth_current_data/'
roi_dir = '/Users/Katie/Dropbox/Data/NSvBM-decoding/ROIs/'
sink_dir = '/Users/Katie/Dropbox/Data/NSvBM-decoding/'
paracentral = join(roi_dir, 'l-paracentral.nii.gz')
ofc = join(roi_dir, 'r-ofc.nii.gz')
uncus = join(roi_dir, 'r-ofc.nii.gz')
visual = join(roi_dir, 'smith-rsn70-1.nii.gz')
mask_list = [paracentral, ofc, uncus, visual]
mask_names = ['paracentral', 'orbitofrontal', 'uncus', 'visual']
dataset = Dataset.load(join(parent_dir, 'dataset.pkl'))
for i, mask in enumerate(mask_list):
    ids = dataset.get_studies(mask=mask)
    ma = meta.MetaAnalysis(dataset, ids)
    ma.save_results(output_dir='.', prefix=mask_names[i])
    decoder = decode.Decoder(dataset)
    result = decoder.decode(['{0}_pFgA_z.nii.gz'.format(mask_names)],
                            save=join(sink_dir,
                                      'decoded_{0}.txt'.format(mask_names)))