Esempio n. 1
0
File: macm.py Progetto: NBCLab/cALE
def macm_workflow(ns_data_dir, output_dir, prefix, mask_fn):

    # download neurosynth dataset if necessary
    dataset_file = op.join(ns_data_dir, 'neurosynth_dataset.pkl.gz')

    if not op.isfile(dataset_file):
        if not op.isdir(ns_data_dir):
            os.mkdir(ns_data_dir)
        download(ns_data_dir, unpack=True)
        ###############################################################################
        # Convert Neurosynth database to NiMARE dataset file
        # --------------------------------------------------
        dset = convert_neurosynth_to_dataset(
            op.join(ns_data_dir, 'database.txt'),
            op.join(ns_data_dir, 'features.txt'))
        dset.save(dataset_file)

    dset = Dataset.load(dataset_file)
    mask_ids = dset.get_studies_by_mask(mask_fn)
    maskdset = dset.slice(mask_ids)
    nonmask_ids = sorted(list(set(dset.ids) - set(mask_ids)))
    nonmaskdset = dset.slice(nonmask_ids)

    ale = ALE(kernel__fwhm=15)
    ale.fit(maskdset)

    corr = FWECorrector(method='permutation',
                        n_iters=10,
                        n_cores=-1,
                        voxel_thresh=0.001)
    cres = corr.transform(ale.results)
    cres.save_maps(output_dir=output_dir, prefix=prefix)
Esempio n. 2
0
def neurosynth_download(ns_data_dir):

    dataset_file = op.join(ns_data_dir, "neurosynth_dataset.pkl.gz")

    os.makedirs(ns_data_dir, exist_ok=True)

    download(ns_data_dir, unpack=True)
    ###############################################################################
    # Convert Neurosynth database to NiMARE dataset file
    # --------------------------------------------------
    dset = convert_neurosynth_to_dataset(op.join(ns_data_dir, "database.txt"),
                                         op.join(ns_data_dir, "features.txt"))
    dset.save(dataset_file)
Esempio n. 3
0
def _getdata():
    """Downloads data from neurosynth and returns it as a Dataset.

    Also pickles the dataset for future use."""
    LOG.warning("Downloading and processing Neurosynth database")

    os.makedirs("data", exist_ok=True)
    from neurosynth.base.dataset import download

    download(path="data", unpack=True)

    data = Dataset("data/database.txt")
    data.add_features("data/features.txt")
    data.save("data/dataset.pkl")
    return data
Esempio n. 4
0
def neurosynth_download(ns_data_dir=None):

    if ns_data_dir is None:
        raise Exception('A valid directory is required for downloading Neurosynth data!')

    dataset_file = op.join(ns_data_dir, 'neurosynth_dataset.pkl.gz')

    if not op.isdir(ns_data_dir):
        os.mkdir(ns_data_dir)

    download(ns_data_dir, unpack=True)
    ###############################################################################
    # Convert Neurosynth database to NiMARE dataset file
    # --------------------------------------------------
    dset = convert_neurosynth_to_dataset(
        op.join(ns_data_dir, 'database.txt'),
        op.join(ns_data_dir, 'features.txt'))
    dset.save(dataset_file)
Esempio n. 5
0
def fetch_neurosynth_dataset(data_dir, return_pkl=True):
    """Downloads the Neurosynth dataset

    Parameters
    ----------
    data_dir : str
        Directory in which to download the dataset.
    return_pkl : bool
        If true, creates and returns the .pkl file. Otherwise returns
        the dataset and features files.

    Returns
    -------
    tuple, str
        If save_pkl is false, returns a tuple containing the path to the
        database.txt and the features.txt file. Otherwise returns the path
        to the .pkl file.

    """
    if not os.path.isdir(data_dir):
        os.mkdir(data_dir)

    dataset_file = os.path.join(data_dir, "database.txt")
    if not os.path.isfile(dataset_file):
        logging.info("Downloading the Neurosynth dataset.")
        download(data_dir, unpack=True)
    feature_file = os.path.join(data_dir, "features.txt")

    if return_pkl:
        pkl_file = os.path.join(data_dir, "dataset.pkl")
        if not os.path.isfile(pkl_file):
            logging.info(
                "Converting Neurosynth data to a .pkl file. This may take a while."
            )
            dataset = Dataset(dataset_file, feature_file)
            dataset.save(pkl_file)
        return pkl_file

    return (dataset_file, feature_file)
Esempio n. 6
0
# --------------------------------
import os

from neurosynth.base.dataset import download

import nimare

###############################################################################
# Download Neurosynth
# --------------------------------
out_dir = os.path.abspath('../example_data/')
if not os.path.isdir(out_dir):
    os.mkdir(out_dir)

if not os.path.isfile(os.path.join(out_dir, 'database.txt')):
    download(out_dir, unpack=True)

###############################################################################
# Convert Neurosynth database to NiMARE dataset file
# --------------------------------------------------
dset = nimare.io.convert_neurosynth_to_dataset(
    os.path.join(out_dir, 'database.txt'),
    os.path.join(out_dir, 'features.txt'))
dset.save(os.path.join(out_dir, 'neurosynth_dataset.pkl.gz'))

###############################################################################
# Add article abstracts to dataset
# --------------------------------
dset = nimare.extract.download_abstracts(dset, '*****@*****.**')
dset.save(os.path.join(out_dir, 'neurosynth_nimare_with_abstracts.pkl.gz'))
Esempio n. 7
0
"""
Download Neurosynth dataset and add abstracts.
"""
import os
from neurosynth.base.dataset import download
import nimare

# Download Neurosynth
if not os.path.isfile('resources/database.txt'):
    download('resources/', unpack=True)

# Convert Neurosynth database files to NiMARE Dataset
dset = nimare.io.convert_neurosynth_to_dataset('resources/database.txt',
                                               'resources/features.txt')

# Add article abstracts to Dataset and save to file
dset = nimare.extract.download_abstracts(dset, '*****@*****.**')
dset.save('resources/neurosynth_with_abstracts.pkl.gz')