Esempio n. 1
0
 def new_SpaceNet(self):
     if self.task == "classification":
         decoder = SpaceNetClassifier(memory="nilearn_cache",
                                      penalty=self.penalty,
                                      screening_percentile=15.,
                                      memory_level=1)
     else:
         decoder = SpaceNetRegressor(memory="nilearn_cache",
                                     penalty=self.penalty,
                                     screening_percentile=15.,
                                     memory_level=1)
     return decoder
Esempio n. 2
0
y = np.array(inputdata['ppvtraw'])  # 'phonemes' 'ppvtraw' etc

logging.info('Data loaded.')

# Sort data for better visualization
perm = np.argsort(y)
y = y[perm]
X = X[perm]

# 2. CONSTRUCT TRAINING & TEST DATA
rng = check_random_state(42)
X_train, X_test, y_train, y_test = train_test_split(
    X, y, train_size=.8, random_state=rng)
###########################################################################
# Fit the SpaceNet and predict with it
decoder = SpaceNetRegressor(memory="nilearn_cache", penalty=fitpenalty,
                            screening_percentile=screenp, memory_level=2, n_jobs=4)

logging.info('Classifier set up. Starting model fit.')
decoder.fit(X_train, y_train)  # fit
logging.info('Model fit complete. Saving outputs...')

y_pred = decoder.predict(X_test).ravel()  # predict
mse = np.mean(np.abs(y_test - y_pred))
logging.info('Mean square error (MSE) of the prediction: %.2f' % mse)

# 4. OUTPUTS
# Save csv metadata
inputdata.to_csv(logdir+inputcsv)
# Save decoder object
decoder_dir = './outputs/%s-%sx%s/decoder_sma/' % (timestamp,taskcode,predictor) # directory
if not os.path.exists(decoder_dir):

###########################################################################
# Fit the SpaceNet and predict with it
# -------------------------------------
from nilearn.decoding import SpaceNetRegressor

# To save time (because these are anat images with many voxels), we include
# only the 5-percent voxels most correlated with the age variable to fit.
# Also, we set memory_level=2 so that more of the intermediate computations
# are cached. Also, you may pass and n_jobs=<some_high_value> to the
# SpaceNetRegressor class, to take advantage of a multi-core system.
#
# Also, here we use a graph-net penalty but more beautiful results can be
# obtained using the TV-l1 penalty, at the expense of longer runtimes.
decoder = SpaceNetRegressor(memory="nilearn_cache", penalty="graph-net",
                            screening_percentile=5., memory_level=2)
decoder.fit(gm_imgs_train, age_train)  # fit
coef_img = decoder.coef_img_
y_pred = decoder.predict(gm_imgs_test).ravel()  # predict
mse = np.mean(np.abs(age_test - y_pred))
print('Mean square error (MSE) on the predicted age: %.2f' % mse)


###########################################################################
# Visualize the resulting maps
from nilearn.plotting import plot_stat_map, show
# weights map
background_img = gm_imgs[0]
plot_stat_map(coef_img, background_img, title="graph-net weights",
              display_mode="z", cut_coords=1)
Esempio n. 4
0
"""
# author: DOHMATOB Elvis Dopgima,
#         GRAMFORT Alexandre

### Load data #################################################################
from nilearn.datasets import fetch_mixed_gambles
data = fetch_mixed_gambles(n_subjects=16)
zmaps, object_category, mask_img = data.zmaps, data.gain, data.mask_img

### Fit TV-L1 #################################################################
# Here we're using the regressor object given that the task is to predict a
# continuous variable, the gain of the gamble.
from nilearn.decoding import SpaceNetRegressor
decoder = SpaceNetRegressor(
    mask=mask_img,
    penalty="tv-l1",
    eps=1e-1,  # prefer large alphas
    memory="cache")
decoder.fit(zmaps, object_category)  # fit

# Visualize TV-L1 weights
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map
plot_stat_map(decoder.coef_img_,
              title="tv-l1",
              display_mode="yz",
              cut_coords=[20, -2])

### Fit Graph-Net #############################################################
decoder = SpaceNetRegressor(
    mask=mask_img,
data = fetch_mixed_gambles(n_subjects=16)

zmap_filenames = data.zmaps
behavioral_target = data.gain
mask_filename = data.mask_img

##########################################################################
# Fit TV-L1
# Here we're using the regressor object given that the task is to predict a
# continuous variable, the gain of the gamble.
from nilearn.decoding import SpaceNetRegressor

decoder = SpaceNetRegressor(
    mask=mask_filename,
    penalty="tv-l1",
    eps=1e-1,  # prefer large alphas
    memory="nilearn_cache")

decoder.fit(zmap_filenames, behavioral_target)

# Visualize TV-L1 weights
from nilearn.plotting import plot_stat_map, show

plot_stat_map(decoder.coef_img_,
              title="tv-l1",
              display_mode="yz",
              cut_coords=[20, -2])

##########################################################################
# Fit Graph-Net
# Load the data from the Jimura mixed-gamble experiment
from nilearn.datasets import fetch_mixed_gambles
data = fetch_mixed_gambles(n_subjects=16)

zmap_filenames = data.zmaps
behavioral_target = data.gain
mask_filename = data.mask_img


##########################################################################
# Fit TV-L1
# Here we're using the regressor object given that the task is to predict a
# continuous variable, the gain of the gamble.
from nilearn.decoding import SpaceNetRegressor
decoder = SpaceNetRegressor(mask=mask_filename, penalty="tv-l1",
                            eps=1e-1,  # prefer large alphas
                            memory="nilearn_cache")

decoder.fit(zmap_filenames, behavioral_target)

# Visualize TV-L1 weights
from nilearn.plotting import plot_stat_map, show
plot_stat_map(decoder.coef_img_, title="tv-l1", display_mode="yz",
              cut_coords=[20, -2])


##########################################################################
# Fit Graph-Net
decoder = SpaceNetRegressor(mask=mask_filename, penalty="graph-net",
                            eps=1e-1,  # prefer large alphas
                            memory="nilearn_cache")
    zmap_filenames.append('/home/jmuraskin/Projects/CCD/working_v1/seed-to-voxel/%s/%s/%s_%s.nii.gz' % (fc,secondlevel_folder_names[fb],fc,subj))

mask_filename='/home/jmuraskin/Projects/CCD/working_v1/seg_probabilities/grey_matter_mask-20-percent.nii.gz'

from scipy.stats import zscore
#load phenotypic data
phenoFile='/home/jmuraskin/Projects/CCD/Pheno/narsad+vt_new.csv'
pheno=read_csv(phenoFile)
pheno=pheno.set_index('participant')

ages=zscore(pheno.loc[goodsubj]['V1_DEM_001'])

mf=zscore(pheno.loc[goodsubj]['V1_DEM_002'])

motionTest=read_csv('/home/jmuraskin/Projects/CCD/CCD-scripts/analysis/CCD_meanFD.csv')
meanFD=zscore(motionTest[motionTest.FB=='FEEDBACK'][motionTest.Subject_ID.isin(goodsubj)]['train_meanFD'])


imgs=image.concat_imgs(zmap_filenames)

clean_imgs=image.clean_img(imgs,confounds=[ages,mf,meanFD],detrend=False,standardize=True)


from nilearn.decoding import SpaceNetRegressor

decoder = SpaceNetRegressor(mask=mask_filename, penalty="tv-l1",
                            eps=1e-1,  # prefer large alphas
                            memory="nilearn_cache",n_jobs=30)

decoder.fit(clean_imgs, behavioral_target)
# ensembling. Although these maps have been thresholded for display, they are
# not sparse (i.e. almost all voxels have non-zero coefficients). See also this
# :ref:`other example <sphx_glr_auto_examples_02_decoding_plot_haxby_frem.py>`
# using FREM, and related :ref:`section of user guide <frem>`.
#

#############################################################################
# Example use of TV-L1 SpaceNet
# ---------------------------------------------------------------------------
# :ref:`SpaceNet<space_net>` is another method available in Nilearn to decode
# with spatially sparse models. Depending on the penalty that is used,
# it yields either very structured maps (TV-L1) or unstructured maps
# (graph_net). Because of their heavy computational costs, these methods are
# not demonstrated on this example but you can try them easily if you have a
# few minutes. Example code is included below.
#

from nilearn.decoding import SpaceNetRegressor

# We use the regressor object since the task is to predict a continuous
# variable (gain of the gamble).

tv_l1 = SpaceNetRegressor(
    mask=mask_filename,
    penalty="tv-l1",
    eps=1e-1,  # prefer large alphas
    memory="nilearn_cache")
# tv_l1.fit(zmap_filenames, behavioral_target)
# plot_stat_map(tv_l1.coef_img_, title="TV-L1", display_mode="yz",
#               cut_coords=[20, -2])
# author: DOHMATOB Elvis Dopgima,
#         GRAMFORT Alexandre


### Load data #################################################################
from nilearn.datasets import fetch_mixed_gambles
data = fetch_mixed_gambles(n_subjects=16)
zmaps, object_category, mask_img = data.zmaps, data.gain, data.mask_img


### Fit TV-L1 #################################################################
# Here we're using the regressor object given that the task is to predict a
# continuous variable, the gain of the gamble.
from nilearn.decoding import SpaceNetRegressor
decoder = SpaceNetRegressor(mask=mask_img, penalty="tv-l1",
                            eps=1e-1,  # prefer large alphas
                            memory="cache")
decoder.fit(zmaps, object_category)  # fit

# Visualize TV-L1 weights
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map
plot_stat_map(decoder.coef_img_, title="tv-l1", display_mode="yz",
              cut_coords=[20, -2])


### Fit Graph-Net #############################################################
decoder = SpaceNetRegressor(mask=mask_img, penalty="graph-net",
                            eps=1e-1,  # prefer large alphas
                            memory="cache")
decoder.fit(zmaps, object_category)  # fit