Beispiel #1
0
def test_convert_neurosynth_to_dataset_smoke():
    """Smoke test for Neurosynth file conversion."""
    coordinates_file = os.path.join(
        get_test_data_path(),
        "data-neurosynth_version-7_coordinates.tsv.gz",
    )
    metadata_file = os.path.join(
        get_test_data_path(),
        "data-neurosynth_version-7_metadata.tsv.gz",
    )
    features = {
        "features":
        os.path.join(
            get_test_data_path(),
            "data-neurosynth_version-7_vocab-terms_source-abstract_type-tfidf_features.npz",
        ),
        "vocabulary":
        os.path.join(get_test_data_path(),
                     "data-neurosynth_version-7_vocab-terms_vocabulary.txt"),
    }
    dset = io.convert_neurosynth_to_dataset(
        coordinates_file,
        metadata_file,
        annotations_files=features,
    )
    assert isinstance(dset, nimare.dataset.Dataset)
    assert "terms_abstract_tfidf__abilities" in dset.annotations.columns
Beispiel #2
0
def macm_workflow(ns_data_dir, output_dir, prefix, mask_fn):

    # download neurosynth dataset if necessary
    dataset_file = op.join(ns_data_dir, 'neurosynth_dataset.pkl.gz')

    if not op.isfile(dataset_file):
        if not op.isdir(ns_data_dir):
            os.mkdir(ns_data_dir)
        download(ns_data_dir, unpack=True)
        ###############################################################################
        # Convert Neurosynth database to NiMARE dataset file
        # --------------------------------------------------
        dset = convert_neurosynth_to_dataset(
            op.join(ns_data_dir, 'database.txt'),
            op.join(ns_data_dir, 'features.txt'))
        dset.save(dataset_file)

    dset = Dataset.load(dataset_file)
    mask_ids = dset.get_studies_by_mask(mask_fn)
    maskdset = dset.slice(mask_ids)
    nonmask_ids = sorted(list(set(dset.ids) - set(mask_ids)))
    nonmaskdset = dset.slice(nonmask_ids)

    ale = ALE(kernel__fwhm=15)
    ale.fit(maskdset)

    corr = FWECorrector(method='permutation',
                        n_iters=10,
                        n_cores=-1,
                        voxel_thresh=0.001)
    cres = corr.transform(ale.results)
    cres.save_maps(output_dir=output_dir, prefix=prefix)
Beispiel #3
0
def test_convert_neurosynth_to_dataset_smoke():
    """
    Smoke test for Sleuth text file conversion.
    """
    db_file = op.join(get_test_data_path(), 'test_neurosynth_database.txt')
    features_file = op.join(get_test_data_path(),
                            'test_neurosynth_features.txt')
    dset = io.convert_neurosynth_to_dataset(db_file, features_file)
    assert isinstance(dset, nimare.dataset.Dataset)
Beispiel #4
0
def neurosynth_download(ns_data_dir):

    dataset_file = op.join(ns_data_dir, "neurosynth_dataset.pkl.gz")

    os.makedirs(ns_data_dir, exist_ok=True)

    download(ns_data_dir, unpack=True)
    ###############################################################################
    # Convert Neurosynth database to NiMARE dataset file
    # --------------------------------------------------
    dset = convert_neurosynth_to_dataset(op.join(ns_data_dir, "database.txt"),
                                         op.join(ns_data_dir, "features.txt"))
    dset.save(dataset_file)
Beispiel #5
0
def neurosynth_download(ns_data_dir=None):

    if ns_data_dir is None:
        raise Exception('A valid directory is required for downloading Neurosynth data!')

    dataset_file = op.join(ns_data_dir, 'neurosynth_dataset.pkl.gz')

    if not op.isdir(ns_data_dir):
        os.mkdir(ns_data_dir)

    download(ns_data_dir, unpack=True)
    ###############################################################################
    # Convert Neurosynth database to NiMARE dataset file
    # --------------------------------------------------
    dset = convert_neurosynth_to_dataset(
        op.join(ns_data_dir, 'database.txt'),
        op.join(ns_data_dir, 'features.txt'))
    dset.save(dataset_file)
    database, rather than handling it locally with NiMARE.

"""
###############################################################################
# Start with the necessary imports
# --------------------------------
import os.path as op
from os import mkdir
from neurosynth.base.dataset import download

from nimare.io import convert_neurosynth_to_dataset
from nimare.dataset import Dataset

###############################################################################
# Download Neurosynth
# --------------------------------
out_dir = op.abspath('../example_data/')
if not op.isdir(out_dir):
    mkdir(out_dir)

if not op.isfile(op.join(out_dir, 'database.txt')):
    download(out_dir, unpack=True)

###############################################################################
# Convert Neurosynth database to NiMARE dataset file
# --------------------------------------------------
dset = convert_neurosynth_to_dataset(op.join(out_dir, 'database.txt'),
                                     op.join(out_dir, 'features.txt'))
gz_file = op.join(out_dir, 'neurosynth_dataset.pkl.gz')
dset.save(gz_file)
Beispiel #7
0
    data_dir=out_dir,
    version="7",
    overwrite=False,
    source="abstract",
    vocab="terms",
)
# Note that the files are saved to a new folder within "out_dir" named "neurosynth".
pprint(files)
neurosynth_db = files[0]

###############################################################################
# Convert Neurosynth database to NiMARE dataset file
# -----------------------------------------------------------------------------
neurosynth_dset = convert_neurosynth_to_dataset(
    coordinates_file=neurosynth_db["coordinates"],
    metadata_file=neurosynth_db["metadata"],
    annotations_files=neurosynth_db["features"],
)
neurosynth_dset.save(os.path.join(out_dir, "neurosynth_dataset.pkl.gz"))
print(neurosynth_dset)

###############################################################################
# Add article abstracts to dataset
# -----------------------------------------------------------------------------
# This is only possible because Neurosynth uses PMIDs as study IDs.
#
# Make sure you replace the example email address with your own.
neurosynth_dset = download_abstracts(neurosynth_dset, "*****@*****.**")
neurosynth_dset.save(
    os.path.join(out_dir, "neurosynth_dataset_with_abstracts.pkl.gz"))