Ejemplo n.º 1
0
    def save_results(self, output_dir='.', prefix='', prefix_sep='_',
                     image_list=None):
        """ Write out any images generated by the meta-analysis.
        Args:
            output_dir (str): folder to write images to
            prefix (str): all image files will be prepended with this string
            prefix_sep (str): glue between the prefix and rest of filename
            image_list (list): optional list of images to save--e.g.,
                ['pFgA_z', 'pAgF']. If image_list is None (default), will save
                all images.
        """

        if prefix == '':
            prefix_sep = ''

        if not exists(output_dir):
            makedirs(output_dir)

        logger.debug("Saving results...")
        if image_list is None:
            image_list = self.images.keys()
        for suffix, img in self.images.items():
            if suffix in image_list:
                filename = prefix + prefix_sep + suffix + '.nii.gz'
                outpath = join(output_dir, filename)
                imageutils.save_img(img, outpath, self.dataset.masker)
Ejemplo n.º 2
0
def make_mask_map(data, infile, outfile, index=None):
    from neurosynth.base.mask import Masker
    from neurosynth.base import imageutils

    # Load image with masker
    masker = Masker(infile)
    img = imageutils.load_imgs(infile, masker)

    data = list(data)

    if index is None:
        index = np.arange(0, len(data))
        rev_index = None
    else:
        all_reg = np.arange(0, img.max())
        rev_index = all_reg[np.invert(np.in1d(all_reg, index))]

    min_val = img.min()

    for num, value in enumerate(data):
        n = index[num]
        np.place(img, img == n + min_val, [value])

    if rev_index is not None:
        for value in rev_index:
            np.place(img, img == value + min_val, 0)

    img = img.astype('float32')

    imageutils.save_img(img, outfile, masker)
Ejemplo n.º 3
0
def make_mask_map(data, infile, outfile, index=None):
    from neurosynth.base.mask import Masker
    from neurosynth.base import imageutils

    # Load image with masker
    masker = Masker(infile)
    img = imageutils.load_imgs(infile, masker)

    img = np.round(img)

    data = list(data)

    if index is None:
        index = np.unique(img)
        rev_index = None
    else:
        all_reg = np.arange(0, img.max())
        rev_index = all_reg[np.invert(np.in1d(all_reg, index))]

    min_val = img.min()

    for num, value in enumerate(data):
        ix = index[num]

        np.place(img, img == ix, [value])

    if rev_index is not None:
        for value in rev_index:
            np.place(img, img == value + min_val, 0)

    img = img.astype('float32')

    imageutils.save_img(img, outfile, masker)
Ejemplo n.º 4
0
    def _create_cluster_images(self, labels, output_dir=None):
        ''' Creates a Nifti image of reconstructed cluster labels. 
        Args:
            labels: A vector of cluster labels
            output_dir: A string indicating folder to output images to. If None, 
                creates a "ClusterImages" directory below the Clusterer instance's
                output directory.
        Outputs:
            Cluster_k.nii.gz: Will output a nifti image with cluster labels
        '''

        labels += 1

        # Reconstruct grid into original space
        # TODO: replace with masker.unmask()
        if hasattr(self, 'grid'):
            regions = self.masker.mask(self.grid)
            unique_regions = np.unique(regions)
            n_regions = unique_regions.size
            m = np.zeros(regions.size)
            for i in range(n_regions):
                m[regions == unique_regions[i]] = labels[i] + 1

            labels = m

        if output_dir is None:
             output_dir = os.path.join(self.output_dir, 'ClusterImages')

        if not os.path.isdir(output_dir):
            os.makedirs(output_dir)

        outfile = os.path.join(output_dir,'Cluster_k%d.nii.gz' % (len(np.unique(labels))))
        imageutils.save_img(labels, outfile, self.masker)
Ejemplo n.º 5
0
    def save_results(self,
                     output_dir='.',
                     prefix='',
                     prefix_sep='_',
                     image_list=None):
        """ Write out any images generated by the meta-analysis.
        Args:
            output_dir (str): folder to write images to
            prefix (str): all image files will be prepended with this string
            prefix_sep (str): glue between the prefix and rest of filename
            image_list (list): optional list of images to save--e.g.,
                ['pFgA_z', 'pAgF']. If image_list is None (default), will save
                all images.
        """

        if prefix == '':
            prefix_sep = ''

        if not exists(output_dir):
            makedirs(output_dir)

        logger.debug("Saving results...")
        if image_list is None:
            image_list = self.images.keys()
        for suffix, img in self.images.items():
            if suffix in image_list:
                filename = prefix + prefix_sep + suffix + '.nii.gz'
                outpath = join(output_dir, filename)
                imageutils.save_img(img, outpath, self.dataset.masker)
Ejemplo n.º 6
0
    def dimension_reduction(self, reducer, n_components=100, save=False):
        """ Reduces the dimensionalty of the currently loaded reference data.
        Args:
            reducer (str or sklearn object): The reduction method to use. Can be
            either a string (one of 'pca', 'ica', or 'ward') or an object that
            follows sklearn's TransformerMixin pattern.
            n_components: Number of components to extract, if applicable. When
                method is an object, this argument is ignored.
            save: if True, writes out images of the components.
        """
        if isinstance(reducer, basestring):

            _valid = {
                'pca': RandomizedPCA,
                'ica': FastICA,
            }
            if reducer not in _valid:
                raise ValueError("Dimensionality reduction method must be one "
                    "of %s; '%s' is not a valid value." %
                    (str(_valid.keys), reducer))

            reducer = _valid[reducer](n_components=n_components)

        self.reference_data = reducer.fit_transform(self.reference_data.T).T

        if True:
            print "Saving components..."
            for i in range(n_components):
                comp = reducer.components_[i, :]
                outf = join(self.output_dir, 'ICA_component_%d.nii.gz' % i)
                print comp.min(), comp.max(), comp.dtype, type(comp), comp.shape
                imageutils.save_img((comp-comp.mean())/comp.std(), outf, self.masker)
Ejemplo n.º 7
0
    def dimension_reduction(self, reducer, n_components=100, save=False):
        """ Reduces the dimensionalty of the currently loaded reference data.
        Args:
            reducer (str or sklearn object): The reduction method to use. Can be
            either a string (one of 'pca', 'ica', or 'ward') or an object that
            follows sklearn's TransformerMixin pattern.
            n_components: Number of components to extract, if applicable. When
                method is an object, this argument is ignored.
            save: if True, writes out images of the components.
        """
        if isinstance(reducer, basestring):

            _valid = {
                'pca': RandomizedPCA,
                'ica': FastICA,
            }
            if reducer not in _valid:
                raise ValueError("Dimensionality reduction method must be one "
                                 "of %s; '%s' is not a valid value." %
                                 (str(_valid.keys), reducer))

            reducer = _valid[reducer](n_components=n_components)

        self.reference_data = reducer.fit_transform(self.reference_data.T).T

        if True:
            print "Saving components..."
            for i in range(n_components):
                comp = reducer.components_[i, :]
                outf = join(self.output_dir, 'ICA_component_%d.nii.gz' % i)
                print comp.min(), comp.max(), comp.dtype, type(
                    comp), comp.shape
                imageutils.save_img((comp - comp.mean()) / comp.std(), outf,
                                    self.masker)
Ejemplo n.º 8
0
    def _create_cluster_images(self, labels, output_dir=None):
        ''' Creates a Nifti image of reconstructed cluster labels. 
        Args:
            labels: A vector of cluster labels
            output_dir: A string indicating folder to output images to. If None, 
                creates a "ClusterImages" directory below the Clusterer instance's
                output directory.
        Outputs:
            Cluster_k.nii.gz: Will output a nifti image with cluster labels
        '''

        labels += 1

        # Reconstruct grid into original space
        # TODO: replace with masker.unmask()
        if hasattr(self, 'grid'):
            regions = self.masker.mask(self.grid)
            unique_regions = np.unique(regions)
            n_regions = unique_regions.size
            m = np.zeros(regions.size)
            for i in range(n_regions):
                m[regions == unique_regions[i]] = labels[i] + 1

            labels = m

        if output_dir is None:
            output_dir = os.path.join(self.output_dir, 'ClusterImages')

        if not os.path.isdir(output_dir):
            os.makedirs(output_dir)

        outfile = os.path.join(output_dir,
                               'Cluster_k%d.nii.gz' % (len(np.unique(labels))))
        imageutils.save_img(labels, outfile, self.masker)
Ejemplo n.º 9
0
 def save_results(self, outroot, image_list=None):
   """ Write out any images generated by the meta-analysis. The outroot argument is prepended 
   to all file names. Optionally, a restricted list of images to save can be passed; otherwise, 
   all images currently stored in self.images will be saved. """
   print "Saving results..."
   if image_list == None: image_list = self.images.keys()
   for suffix, img in self.images.items():
     if suffix in image_list:
       imageutils.save_img(img, '%s_%s.nii.gz' % (outroot, suffix), self.dataset.volume)
Ejemplo n.º 10
0
def neurosynthMatch(db,papers,author,outdir=None,outprefix=None):
    """Match neurosynth id with papers id"""

    # Get all IDs in neuroSynth
    neurosynth_ids = getIDs(db)
    
    # LIST OF IDS ---------------------------------------------------------
    # Input is DOI with list of papers
    if bool(re.search("[/]",papers[0])):
      # NeuroSynth is also DOI
      if bool(re.search("[/]",neurosynth_ids[0])):
        print "Search for " + str(len(papers)) + " ids in NeuroSynth database..."
        # Find intersection
        valid_ids = [x for x in papers if x in neurosynth_ids]
      # Neurosynth is PMID
      else:
        print "ERROR: Please provide doi to use the 525 database!"
        sys.exit()
    # Input is pmid with list of papers
    else:
      # NeuroSynth is also pmid
      if not bool(re.search("[/]",neurosynth_ids[0])):
        print "Search for " + str(len(papers)) + " ids in NeuroSynth database..."
        # Find intersection
        valid_ids = [x for x in papers if x in neurosynth_ids]
      # Neurosynth is doi
      else:
        print "ERROR: Please provide pmid to use the 3000 database!"
        sys.exit()

    if (len(valid_ids) > 0):
      # Do meta analysis
      ma = meta.MetaAnalysis(db,valid_ids)
      # 1) the z score map corresponding to the probability that a study in the database is tagged with a particular feature given that activation is present at a particular voxel, FDR corrected .05
      dataFDR = ma.images[ma.images.keys()[1]]
      # 2) the probability of feature given activation with uniform prior imposed
      dataPRIOR = ma.images[ma.images.keys()[6]]
      # 3) the probability of feature given activation
      data = ma.images[ma.images.keys()[7]]
      # 4) the probability of feature given activation, Z score
      dataZ = ma.images[ma.images.keys()[8]]
      # 5) z score map corresponding to the probability of activation given that a study is tagged with the feature (author)
      datapAgF = ma.images[ma.images.keys()[4]]
      # If user specifies an output directory
      if outdir:
        print "Saving results to output directory " + outdir + "..."
        if not outprefix:
          outprefix = author.replace(" ","")
        imageutils.save_img(datapAgF, '%s/%s_pAgF_z_FDR_0.05.nii.gz' % (outdir, outprefix), db.volume)
        imageutils.save_img(dataFDR, '%s/%s_pFgA_z_FDR_0.05.nii.gz' % (outdir, outprefix), db.volume)
        imageutils.save_img(dataPRIOR, '%s/%s_pFgA_given_pF=0.50.nii.gz' % (outdir, outprefix), db.volume)
        imageutils.save_img(data, '%s/%s_pFgA.nii.gz' % (outdir, outprefix), db.volume)
        imageutils.save_img(dataZ, '%s/%s_pFgA_z.nii.gz' % (outdir, outprefix), db.volume)
      return ma.images
    else:
      print "No overlapping studies found in database for author " + author + "."
Ejemplo n.º 11
0
def remove_value(infile, vals_rm, outfile):
    masker = mask.Masker(infile)
    img = imageutils.load_imgs(infile, masker)

    # Remove value
    for val in vals_rm:
        np.place(img, img == val, [0])

        # Save
    imageutils.save_img(img, outfile, masker)
Ejemplo n.º 12
0
 def save_results(self, outroot, image_list=None):
     """ Write out any images generated by the meta-analysis. The outroot argument is prepended
 to all file names. Optionally, a restricted list of images to save can be passed; otherwise,
 all images currently stored in self.images will be saved. """
     print "Saving results..."
     if image_list == None: image_list = self.images.keys()
     for suffix, img in self.images.items():
         if suffix in image_list:
             imageutils.save_img(img, '%s_%s.nii.gz' % (outroot, suffix),
                                 self.dataset.volume)
Ejemplo n.º 13
0
def remove_value(infile, vals_rm, outfile):
    masker = mask.Masker(infile)
    img = imageutils.load_imgs(infile, masker)
    img = np.round(img)

    # Remove value
    for val in vals_rm:
        np.place(img, img == val, [0])

    # Save
    imageutils.save_img(img, outfile, masker)
Ejemplo n.º 14
0
    def _create_cluster_images(self, labels, coactivation_maps):
        ''' Creates a Nifti image of reconstructed cluster labels. 
        Args:
            labels: A vector of cluster labels
        Outputs:
            Cluster_k.nii.gz: Will output a nifti image with cluster labels
        '''
        # Reconstruct grid into original space
        # TODO: replace with masker.unmask()
        if hasattr(self, 'grid'):
            regions = self.masker.mask(self.grid)
            unique_regions = np.unique(regions)
            n_regions = unique_regions.size
            m = np.zeros(regions.size)
            for i in range(n_regions):
                m[regions == unique_regions[i]] = labels[i] + 1

            labels = m

        clusters = np.unique(labels)
        n_clusters = len(clusters)

        prefix = '' if self.prefix is None else self.prefix + '_'
        output_dir = join(self.output_dir,
                          prefix + self.algorithm + '_k' + str(n_clusters))

        if not isdir(output_dir):
            os.makedirs(output_dir)

        outfile = join(output_dir, 'cluster_labels.nii.gz')
        imageutils.save_img(labels, outfile, self.masker)

        # Generate a coactivation map for each cluster
        if coactivation_maps:
            coact_dir = join(output_dir, 'coactivation')
            if not isdir(coact_dir):
                os.makedirs(coact_dir)
            for c in clusters:
                img = np.zeros_like(labels)
                img[labels == c] = 1
                img = self.masker.unmask(img)
                ids = self.dataset.get_ids_by_mask(img, 0.25)
                ma = meta.MetaAnalysis(self.dataset, ids)
                ma.save_results(coact_dir, 'cluster_%d' % c)
Ejemplo n.º 15
0
    def _create_cluster_images(self, labels, coactivation_maps):
        ''' Creates a Nifti image of reconstructed cluster labels. 
        Args:
            labels: A vector of cluster labels
        Outputs:
            Cluster_k.nii.gz: Will output a nifti image with cluster labels
        '''
        # Reconstruct grid into original space
        # TODO: replace with masker.unmask()
        if hasattr(self, 'grid'):
            regions = self.masker.mask(self.grid)
            unique_regions = np.unique(regions)
            n_regions = unique_regions.size
            m = np.zeros(regions.size)
            for i in range(n_regions):
                m[regions == unique_regions[i]] = labels[i] + 1

            labels = m

        clusters = np.unique(labels)
        n_clusters = len(clusters)

        prefix = '' if self.prefix is None else self.prefix + '_'
        output_dir = join(self.output_dir, prefix + self.algorithm + '_k' + str(n_clusters))

        if not isdir(output_dir):
            os.makedirs(output_dir)

        outfile = join(output_dir,prefix + self.algorithm + '_k' + str(n_clusters) + 'cluster_labels.nii.gz')
        imageutils.save_img(labels, outfile, self.masker)

        # Generate a coactivation map for each cluster
        if coactivation_maps:
            coact_dir = join(output_dir, 'coactivation')
            if not isdir(coact_dir):
                os.makedirs(coact_dir)
            for c in clusters:
                img = np.zeros_like(labels)
                img[labels==c] = 1
                img = self.masker.unmask(img)
                ids = self.dataset.get_ids_by_mask(img, 0.25)
                ma = meta.MetaAnalysis(self.dataset, ids)
                ma.save_results(coact_dir, 'cluster_%d' % c)
Ejemplo n.º 16
0
def neurosynthQuery(searchTerm,thresh,dataset,outdir=None):
    thresh = float(thresh)
    query = dataset.get_ids_by_features('*' + searchTerm + '*',threshold=thresh)
    ma = meta.MetaAnalysis(dataset,query)
    # This gets the absolute value FDR corrected at threshold
    # 'pAgF_z_FDR_0.05'
    # the z score map corresponding to the map of the probability of activation given that a study is tagged with the feature, FDR corrected .05
    data = ma.images[ma.images.keys()[4]]
    # Print this image to file, to look at later
    if outdir:
      imageutils.save_img(data, '%s/%s.nii.gz' % (outdir, searchTerm), dataset.volume)
    img = dataset.volume.unmask(data)
    # These are x,y,z coordinates of nonnzero voxels    
    idx = np.nonzero(img)
    affine =  dataset.volume.volume.get_affine()
    coords = np.dot(affine,[ idx[0],idx[1],idx[2] ,np.ones(len(idx[0]))])
    coords = np.transpose(coords)
    # Get rid of 4th column
    coords = np.delete(coords,-1,1)
    # These are MNI coordinates for non-zero voxels
    return coords
Ejemplo n.º 17
0
def neurosynthMatch(db, papers, author, outdir=None, outprefix=None):
    """Match neurosynth id with papers id"""

    # Get all IDs in neuroSynth
    neurosynth_ids = getIDs(db)

    # LIST OF IDS ---------------------------------------------------------
    # Input is DOI with list of papers
    if bool(re.search("[/]", papers[0])):
        # NeuroSynth is also DOI
        if bool(re.search("[/]", neurosynth_ids[0])):
            print "Search for " + str(
                len(papers)) + " ids in NeuroSynth database..."
            # Find intersection
            valid_ids = [x for x in papers if x in neurosynth_ids]
        # Neurosynth is PMID
        else:
            print "ERROR: Please provide doi to use the 525 database!"
            sys.exit()
    # Input is pmid with list of papers
    else:
        # NeuroSynth is also pmid
        if not bool(re.search("[/]", neurosynth_ids[0])):
            print "Search for " + str(
                len(papers)) + " ids in NeuroSynth database..."
            # Find intersection
            valid_ids = [x for x in papers if x in neurosynth_ids]
        # Neurosynth is doi
        else:
            print "ERROR: Please provide pmid to use the 3000 database!"
            sys.exit()

    if (len(valid_ids) > 0):
        # Do meta analysis
        ma = meta.MetaAnalysis(db, valid_ids)
        # 1) the z score map corresponding to the probability that a study in the database is tagged with a particular feature given that activation is present at a particular voxel, FDR corrected .05
        dataFDR = ma.images[ma.images.keys()[1]]
        # 2) the probability of feature given activation with uniform prior imposed
        dataPRIOR = ma.images[ma.images.keys()[6]]
        # 3) the probability of feature given activation
        data = ma.images[ma.images.keys()[7]]
        # 4) the probability of feature given activation, Z score
        dataZ = ma.images[ma.images.keys()[8]]
        # 5) z score map corresponding to the probability of activation given that a study is tagged with the feature (author)
        datapAgF = ma.images[ma.images.keys()[4]]
        # If user specifies an output directory
        if outdir:
            print "Saving results to output directory " + outdir + "..."
            if not outprefix:
                outprefix = author.replace(" ", "")
            imageutils.save_img(
                datapAgF, '%s/%s_pAgF_z_FDR_0.05.nii.gz' % (outdir, outprefix),
                db.volume)
            imageutils.save_img(
                dataFDR, '%s/%s_pFgA_z_FDR_0.05.nii.gz' % (outdir, outprefix),
                db.volume)
            imageutils.save_img(
                dataPRIOR,
                '%s/%s_pFgA_given_pF=0.50.nii.gz' % (outdir, outprefix),
                db.volume)
            imageutils.save_img(data,
                                '%s/%s_pFgA.nii.gz' % (outdir, outprefix),
                                db.volume)
            imageutils.save_img(dataZ,
                                '%s/%s_pFgA_z.nii.gz' % (outdir, outprefix),
                                db.volume)
        return ma.images
    else:
        print "No overlapping studies found in database for author " + author + "."
Ejemplo n.º 18
0
from neurosynth.base.dataset import Dataset
import neurosynth.base.imageutils as it

dataset = Dataset.load("../data/datasets/abs_topics_filt.pkl")

print "Filtering voxels..."

data = dataset.image_table.data.toarray()

voxel_mask = data.mean(axis=1) > 0.005

img = it.load_imgs('../masks/ward/30.nii.gz', dataset.masker)

good_voxels = img[voxel_mask]

it.save_img(good_voxels, "../masks/ward/30_masked.nii.gz", dataset.masker)
Ejemplo n.º 19
0
dataset = Dataset.load("../data/datasets/abs_topics_filt.pkl")

print "Filtering voxels..."

data = dataset.image_table.data.toarray()

voxel_mask = data.mean(axis=1) > 0.0135

good_voxels = data[voxel_mask]

good_voxels = sparse.csr_matrix(good_voxels)

for i in [20, 30, 40, 50]:
	print "Clustering..."

	print i

	k_means = KMeans(init='k-means++', n_clusters=i, n_jobs=16)
	k_means.fit(good_voxels)

	# ward = Ward(n_clusters=30)
	# ward.fit(good_voxels)

	print "Stretching clustering results..."
	cluster_voxels = np.zeros(voxel_mask.shape)
	cluster_voxels[voxel_mask] = k_means.labels_ + 1

	print "Saving image..."
	save_img(cluster_voxels, "k_means_" + str(i) + "_0135.nii.gz", dataset.masker)
Ejemplo n.º 20
0
# Remove those not in a good community
bad = list(set(zero_ix))

# Remove
for value in bad:
	np.place(img, img == value, [0])

non_0_ix.sort()

# Translate numbers to continous 
translated = zip(range(1, non_0_ix.shape[0]+1), list(non_0_ix))

for pair in translated:
	np.place(img, img == pair[1], pair[0])

print "New shape:"
print np.bincount([int(vox) for vox in img]).shape
imageutils.save_img(img, outfile, masker)


# Write key
with open('../masks/Andy/parcels_' + str(min_vox) + '_key.csv', 'w') as file:
	writer = csv.writer(file)
	writer.writerow(['new', 'original'])
	for pair in translated:
		writer.writerow(list(pair))




Ejemplo n.º 21
0
dataset = Dataset.load("../data/datasets/abs_topics_filt.pkl")

print "Filtering voxels..."

data = dataset.image_table.data.toarray()

voxel_mask = data.mean(axis=1) > 0.0135

good_voxels = data[voxel_mask]

good_voxels = sparse.csr_matrix(good_voxels)

for i in [20, 30, 40, 50]:
    print "Clustering..."

    print i

    k_means = KMeans(init='k-means++', n_clusters=i, n_jobs=16)
    k_means.fit(good_voxels)

    # ward = Ward(n_clusters=30)
    # ward.fit(good_voxels)

    print "Stretching clustering results..."
    cluster_voxels = np.zeros(voxel_mask.shape)
    cluster_voxels[voxel_mask] = k_means.labels_ + 1

    print "Saving image..."
    save_img(cluster_voxels, "k_means_" + str(i) + "_0135.nii.gz",
             dataset.masker)