Exemple #1
0
def load_atlas_init(source=None, n_components=20):
    if source == 'smith':
        if n_components == 70:
            init = fetch_atlas_smith_2009().rsn70
        elif n_components == 20:
            init = fetch_atlas_smith_2009().rsn20
        else:
            raise NotImplementedError('Unexpected argument')
    else:
        init = None
    return init
Exemple #2
0
def load_data():
    with open(expanduser('~/data/HCP_unmasked/data.json'), 'r') as f:
        data = json.load(f)
        for this_data in data:
            this_data['array'] += '.npy'
        mask_img = expanduser('~/data/HCP_mask/mask_img.nii.gz')
    masker = NiftiMasker(mask_img=mask_img, smoothing_fwhm=4, standardize=True)
    masker.fit()
    smith2009 = fetch_atlas_smith_2009()
    init = smith2009.rsn70
    dict_init = masker.transform(init)
    return masker, dict_init, sorted(data, key=lambda t: t['filename'])
Exemple #3
0
def load_data():
    with open(expanduser('~/data/HCP_unmasked/data.json'), 'r') as f:
        data = json.load(f)
        for this_data in data:
            this_data['array'] += '.npy'
        mask_img = expanduser('~/data/HCP_mask/mask_img.nii.gz')
    masker = NiftiMasker(mask_img=mask_img, smoothing_fwhm=4,
                         standardize=True)
    masker.fit()
    smith2009 = fetch_atlas_smith_2009()
    init = smith2009.rsn70
    dict_init = masker.transform(init)
    return masker, dict_init, sorted(data, key=lambda t: t['filename'])
Exemple #4
0
def run(idx, reduction, alpha, mask, raw, n_components, init, func_filenames):
    output_dir = join(trace_folder, 'experiment_%i' % idx)
    try:
        os.makedirs(output_dir)
    except OSError:
        pass
    dict_fact = SpcaFmri(mask=mask,
                         smoothing_fwhm=3,
                         batch_size=40,
                         shelve=not raw,
                         n_components=n_components,
                         replacement=False,
                         dict_init=fetch_atlas_smith_2009().rsn70 if
                         init else None,
                         reduction=reduction,
                         alpha=alpha,
                         random_state=0,
                         n_epochs=2,
                         l1_ratio=0.5,
                         backend='c',
                         memory=expanduser("~/nilearn_cache"), memory_level=2,
                         verbose=5,
                         n_jobs=1,
                         trace_folder=output_dir
                         )

    print('[Example] Learning maps')
    t0 = time.time()
    dict_fact.fit(func_filenames, raw=raw)
    t1 = time.time() - t0
    print('[Example] Dumping results')
    # Decomposition estimator embeds their own masker
    masker = dict_fact.masker_
    components_img = masker.inverse_transform(dict_fact.components_)
    components_img.to_filename(join(output_dir, 'components_final.nii.gz'))
    print('[Example] Run in %.2f s' % t1)
    # Show components from both methods using 4D plotting tools
    import matplotlib.pyplot as plt
    from nilearn.plotting import plot_prob_atlas, show

    print('[Example] Displaying')
    fig, axes = plt.subplots(2, 1)
    plot_prob_atlas(components_img, view_type="filled_contours",
                    axes=axes[0])
    plot_stat_map(index_img(components_img, 0),
                  axes=axes[1],
                  colorbar=False,
                  threshold=0)
    plt.savefig(join(output_dir, 'components.pdf'))
    show()
Exemple #5
0
def run(idx, reduction, alpha, mask, raw, n_components, init, func_filenames):
    output_dir = join(trace_folder, 'experiment_%i' % idx)
    try:
        os.makedirs(output_dir)
    except OSError:
        pass
    dict_fact = SpcaFmri(
        mask=mask,
        smoothing_fwhm=3,
        batch_size=40,
        shelve=not raw,
        n_components=n_components,
        replacement=False,
        dict_init=fetch_atlas_smith_2009().rsn70 if init else None,
        reduction=reduction,
        alpha=alpha,
        random_state=0,
        n_epochs=2,
        l1_ratio=0.5,
        backend='c',
        memory=expanduser("~/nilearn_cache"),
        memory_level=2,
        verbose=5,
        n_jobs=1,
        trace_folder=output_dir)

    print('[Example] Learning maps')
    t0 = time.time()
    dict_fact.fit(func_filenames, raw=raw)
    t1 = time.time() - t0
    print('[Example] Dumping results')
    # Decomposition estimator embeds their own masker
    masker = dict_fact.masker_
    components_img = masker.inverse_transform(dict_fact.components_)
    components_img.to_filename(join(output_dir, 'components_final.nii.gz'))
    print('[Example] Run in %.2f s' % t1)
    # Show components from both methods using 4D plotting tools
    import matplotlib.pyplot as plt
    from nilearn.plotting import plot_prob_atlas, show

    print('[Example] Displaying')
    fig, axes = plt.subplots(2, 1)
    plot_prob_atlas(components_img, view_type="filled_contours", axes=axes[0])
    plot_stat_map(index_img(components_img, 0),
                  axes=axes[1],
                  colorbar=False,
                  threshold=0)
    plt.savefig(join(output_dir, 'components.pdf'))
    show()
Exemple #6
0
def test_fetch_atlas_smith_2009():
    bunch = datasets.fetch_atlas_smith_2009(data_dir=tmpdir, verbose=0)

    keys = ("rsn20", "rsn10", "rsn70", "bm20", "bm10", "bm70")
    filenames = [
        "rsn20.nii.gz",
        "PNAS_Smith09_rsn10.nii.gz",
        "rsn70.nii.gz",
        "bm20.nii.gz",
        "PNAS_Smith09_bm10.nii.gz",
        "bm70.nii.gz",
    ]

    assert_equal(len(url_request.urls), 6)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(tmpdir, 'smith_2009', fn))
Exemple #7
0
def get_atlas(name):
    if name == "destrieux_2009":
        atlas = datasets.fetch_atlas_destrieux_2009()
        atlas_filename = atlas['maps']
    elif name == "harvard_oxford":
        atlas = datasets.fetch_atlas_harvard_oxford("cort-maxprob-thr25-2mm")
        atlas_filename = atlas['maps']
    elif name == "aal":
        atlas = datasets.fetch_atlas_aal()
        atlas_filename = atlas['maps']
    elif name == "smith_2009":
        atlas = datasets.fetch_atlas_smith_2009()
        atlas_filename = atlas['rsn70']
    else:
        raise ValueError('Atlas name unkown')
    return atlas_filename
def test_fetch_atlas_smith_2009():
    bunch = datasets.fetch_atlas_smith_2009(data_dir=tmpdir, verbose=0)

    keys = ("rsn20", "rsn10", "rsn70",
            "bm20", "bm10", "bm70")
    filenames = [
            "rsn20.nii.gz",
            "PNAS_Smith09_rsn10.nii.gz",
            "rsn70.nii.gz",
            "bm20.nii.gz",
            "PNAS_Smith09_bm10.nii.gz",
            "bm70.nii.gz",
    ]

    assert_equal(len(url_request.urls), 6)
    for key, fn in zip(keys, filenames):
        assert_equal(bunch[key], os.path.join(tmpdir, 'smith_2009', fn))
def get_established_parcellation(parcellation="Harvard_Oxford",
                                 target_img=None,
                                 parcellation_dir=None):
    if parcellation == "Harvard_Oxford":
        name = "Harvard_Oxford_cort-prob-2mm"
        data = datasets.fetch_atlas_harvard_oxford('cort-prob-2mm',
                                                   data_dir=parcellation_dir)
        parcel = nibabel.load(data['maps'])
        labels = data['labels'][1:]  # first label is background
        atlas_threshold = 25
    elif parcellation == "smith":
        name = "smith_rsn70"
        data = datasets.fetch_atlas_smith_2009(
            data_dir=parcellation_dir)['rsn70']
        parcel = nibabel.load(data)
        labels = range(parcel.shape[-1])
        atlas_threshold = 4
    elif parcellation == "glasser":
        glasser_dir = path.join(parcellation_dir, 'glasser')
        data = image.load_img(
            path.join(glasser_dir, 'HCP-MMP1_on_MNI152_ICBM2009a_nlin.nii.gz'))
        parcel = image.new_img_like(data, (data.get_fdata() + .01).astype(int))
        labels = list(
            np.genfromtxt(path.join(glasser_dir,
                                    'HCP-MMP1_on_MNI152_ICBM2009a_nlin.txt'),
                          dtype=str,
                          usecols=1))
        name = 'glasser'
        atlas_threshold = None
        # split down midline into lateralized ROIs
        data_coords = np.where(parcel.get_data())
        right_coords = *(i[data_coords[0] > parcel.shape[0] // 2]
                         for i in data_coords),  # tuple comprehension
        parcel.get_data()[right_coords] += len(labels)
        labels = labels + [l.replace('L_', 'R_') for l in labels]
    if target_img:
        parcel = image.resample_to_img(parcel,
                                       target_img,
                                       interpolation='nearest')
    return parcel, labels, name, atlas_threshold
Exemple #10
0
def get_init_objective(output_dir):
    mask, func_filenames = get_hcp_data(data_dir=data_dir, raw=True)

    masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None, standardize=False)
    masker.fit()

    rsn70 = fetch_atlas_smith_2009().rsn70
    components = masker.transform(rsn70)
    print(components.shape)
    enet_scale(components.T, inplace=True)
    print(np.sum(np.abs(components), axis=1))
    test_data = func_filenames[(-n_test_records * 2)::2]

    n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape
    X = np.empty((n_test_records * n_samples, n_voxels))

    for i, this_data in enumerate(test_data):
        X[i * n_samples:(i + 1) * n_samples] = np.load(this_data,
                                                       mmap_mode='r')
    exp_var = {}
    for alpha in [1e-2, 1e-3, 1e-4]:
        exp_var[alpha] = objective_function(X, components, alpha)
    json.dump(open(join(output_dir, 'init_objective.json'), 'r'))
Exemple #11
0
def get_init_objective(output_dir):
    mask, func_filenames = get_hcp_data(raw=True)

    masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None,
                         standardize=False)
    masker.fit()

    rsn70 = fetch_atlas_smith_2009().rsn70
    components = masker.transform(rsn70)
    print(components.shape)
    enet_scale(components.T, inplace=True)
    print(np.sum(np.abs(components), axis=1))
    test_data = func_filenames[(-n_test_records * 2)::2]

    n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape
    X = np.empty((n_test_records * n_samples, n_voxels))

    for i, this_data in enumerate(test_data):
        X[i * n_samples:(i + 1) * n_samples] = np.load(this_data,
                                                       mmap_mode='r')
    exp_var = {}
    for alpha in [1e-2, 1e-3, 1e-4]:
        exp_var[alpha] = objective_function(X, components, alpha)
    json.dump(open(join(output_dir, 'init_objective.json'), 'r'))
# The file contains a 3D volume, we can easily visualize it as a
# statistical map:
from nilearn import plotting
plotting.plot_stat_map(tmap_filename)

###############################################################################
# Visualizing works better with a threshold
plotting.plot_stat_map(tmap_filename, threshold=3)

###############################################################################
# Visualizing one volume in a 4D file
# -----------------------------------
#
# We can download resting-state networks from the Smith 2009 study on
# correspondance between rest and task
rsn = datasets.fetch_atlas_smith_2009()['rsn10']
print(rsn)

###############################################################################
# It is a 4D nifti file. We load it into the memory to print its
# shape.
from nilearn import image
print(image.load_img(rsn).shape)

###############################################################################
# We can retrieve the first volume (note that Python indexing starts at 0):
first_rsn = image.index_img(rsn, 0)
print(first_rsn.shape)

###############################################################################
# first_rsn is a 3D image.
Exemple #13
0
    if isinstance(rs_file, list):
        all_sub_rs_maps = concat_imgs(rs_file)
    else:
        all_sub_rs_maps = nib.load(rs_file)
    cur_shape = all_sub_rs_maps.get_data().shape
    size_in_GB = all_sub_rs_maps.get_data().nbytes / 1e9

    print('% rs images: %.2f GB' % (cur_shape[-1], size_in_GB))

    ###############################################################################
    # dump network projections
    ###############################################################################

    # retrieve network projections
    from nilearn import datasets as ds
    smith_pkg = ds.fetch_atlas_smith_2009()
    icas_path = smith_pkg['rsn20']

    from nilearn.input_data import NiftiMapsMasker
    nmm = NiftiMapsMasker(
        mask_img=mask_file, maps_img=icas_path, resampling_target='mask',
        standardize=True, detrend=True)
    nmm.fit()
    nmm.maps_img_.to_filename('dbg_ica_maps.nii.gz')

    FS_netproj = nmm.transform(all_sub_rs_maps)
    np.save('%i_nets_timeseries' % sub_id, FS_netproj)

    # compute network sparse inverse covariance
    from sklearn.covariance import GraphLassoCV
    from nilearn.image import index_img
Exemple #14
0
mask, func_filenames = get_hcp_data(data_dir='/storage/data')

func_filenames = func_filenames[:2]

# Apply our decomposition estimator with reduction
n_components = 70
n_jobs = 20
raw = True
init = True

dict_fact = SpcaFmri(mask=mask,
                     smoothing_fwhm=3,
                     shelve=not raw,
                     n_components=n_components,
                     dict_init=fetch_atlas_smith_2009().rsn70 if init else None,
                     reduction=12,
                     alpha=0.001,
                     random_state=0,
                     n_epochs=1,
                     memory=expanduser("~/nilearn_cache"), memory_level=2,
                     verbose=4,
                     n_jobs=1,
                     )

print('[Example] Learning maps')
timings = np.zeros(20)
for n_jobs in range(1, 21):
    with num_threads(n_jobs):
        t0 = time.time()
        dict_fact.fit(func_filenames, raw=raw)
Exemple #15
0
which is already defined.

See :ref:`plotting` for more information to know how to tune the parameters.
"""
# Load 4D probabilistic atlases
from nilearn import datasets

# Harvard Oxford Atlas
harvard_oxford = datasets.fetch_atlas_harvard_oxford('cort-prob-2mm')
harvard_oxford_sub = datasets.fetch_atlas_harvard_oxford('sub-prob-2mm')

# Multi Subject Dictionary Learning Atlas
msdl = datasets.fetch_atlas_msdl()

# Smith ICA Atlas and Brain Maps 2009
smith = datasets.fetch_atlas_smith_2009()

# ICBM tissue probability
icbm = datasets.fetch_icbm152_2009()

# Visualization
import matplotlib.pyplot as plt
from nilearn import plotting

atlas_types = {'Harvard_Oxford': harvard_oxford.maps,
               'Harvard_Oxford sub': harvard_oxford_sub.maps,
               'MSDL': msdl.maps, 'Smith 2009 10 RSNs': smith.rsn10,
               'Smith2009 20 RSNs': smith.rsn20,
               'Smith2009 70 RSNs': smith.rsn70,
               'Smith2009 10 Brainmap': smith.bm10,
               'Smith2009 20 Brainmap': smith.bm20,
Exemple #16
0
which is already defined.

See :ref:`plotting` for more information to know how to tune the parameters.
"""
# Load 4D probabilistic atlases
from nilearn import datasets

# Harvard Oxford Atlas
harvard_oxford = datasets.fetch_atlas_harvard_oxford('cort-prob-2mm')
harvard_oxford_sub = datasets.fetch_atlas_harvard_oxford('sub-prob-2mm')

# Multi Subject Dictionary Learning Atlas
msdl = datasets.fetch_atlas_msdl()

# Smith ICA Atlas and Brain Maps 2009
smith = datasets.fetch_atlas_smith_2009()

# ICBM tissue probability
icbm = datasets.fetch_icbm152_2009()

# Visualization
import matplotlib.pyplot as plt
from nilearn import plotting

atlas_types = {
    'Harvard_Oxford': harvard_oxford.maps,
    'Harvard_Oxford sub': harvard_oxford_sub.maps,
    'MSDL': msdl.maps,
    'Smith 2009 10 RSNs': smith.rsn10,
    'Smith2009 20 RSNs': smith.rsn20,
    'Smith2009 70 RSNs': smith.rsn70,
# statistical map:
from nilearn import plotting
plotting.plot_stat_map(tmap_filename)

###############################################################################
# Visualizing works better with a threshold
plotting.plot_stat_map(tmap_filename, threshold=3)


###############################################################################
# Visualizing one volume in a 4D file
# -----------------------------------
#
# We can download resting-state networks from the Smith 2009 study on
# correspondance between rest and task
rsn = datasets.fetch_atlas_smith_2009()['rsn10']
print(rsn)

###############################################################################
# It is a 4D nifti file. We load it into the memory to print its
# shape.
from nilearn import image
print(image.load_img(rsn).shape)

###############################################################################
# We can retrieve the first volume (note that Python indexing starts at 0):
first_rsn = image.index_img(rsn, 0)
print(first_rsn.shape)

###############################################################################
# first_rsn is a 3D image.
Exemple #18
0
from modl.plotting.fmri import display_maps
from modl.utils.system import get_cache_dirs

n_components = 20
batch_size = 50
learning_rate = .92
method = 'masked'
step_size = 0.01
reduction = 12
alpha = 1e-3
n_epochs = 2
verbose = 15
n_jobs = 2
smoothing_fwhm = 6

dict_init = fetch_atlas_smith_2009().rsn20

dataset = fetch_adhd(n_subjects=40)
data = dataset.rest.values
train_data, test_data = train_test_split(data, test_size=1, random_state=0)
train_imgs, train_confounds = zip(*train_data)
test_imgs, test_confounds = zip(*test_data)
mask = dataset.mask
memory = Memory(cachedir=get_cache_dirs()[0], verbose=2)

cb = rfMRIDictionaryScorer(test_imgs, test_confounds=test_confounds)
dict_fact = fMRIDictFact(
    smoothing_fwhm=smoothing_fwhm,
    method=method,
    step_size=step_size,
    mask=mask,
Exemple #19
0
 def __init__(self, atlas_name=''):
     from nilearn import datasets
     smith = datasets.fetch_atlas_smith_2009()
     self.dataset = getattr(smith, atlas_name)