y_random = dataset.label[12:]
y_shape = (10, 10)

### Preprocess data ###########################################################
from utils import masking, signal, cm


sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
X_train = []
for x_random in X_random:
    # Mask data
    x_img = nibabel.load(x_random)
    x = masking.apply_mask(x_img, dataset.mask)
    x = signal.clean(x, standardize=True, detrend=True)
    X_train.append(x[offset:])

# Load target data and reshape it in 2D
y_train = []
for y in y_random:
    y_train.append(np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
        (-1,) + y_shape, order='F')[:-offset].astype(float))

X_train = np.vstack(X_train)
y_train = np.vstack(y_train)

# Flatten the stimuli
y_train = np.reshape(y_train, (-1, y_shape[0] * y_shape[1]))
X_random = dataset.func[12:]
y_random = dataset.label[12:]
y_shape = (10, 10)

### Preprocess data ###########################################################
from utils import masking, signal, cm

sys.stderr.write("Preprocessing data...")
t0 = time.time()

# Load and mask fMRI data
X_train = []
for x_random in X_random:
    # Mask data
    x_img = nibabel.load(x_random)
    x = masking.apply_mask(x_img, dataset.mask)
    x = signal.clean(x, standardize=True, detrend=True)
    X_train.append(x[offset:])

# Load target data and reshape it in 2D
y_train = []
for y in y_random:
    y_train.append(
        np.reshape(np.loadtxt(y, dtype=np.int, delimiter=','),
                   (-1, ) + y_shape,
                   order='F')[:-offset].astype(float))

X_train = np.vstack(X_train)
y_train = np.vstack(y_train)

# Flatten the stimuli
def plot_ica_map(map_3d, vmax):
    # Mask the background
    map_3d = np.ma.masked_array(map_3d,
            np.logical_not(mask_img.get_data().astype(bool)))
    # Normalize the map
    section = map_3d[:, :, z]
    pl.figure(figsize=(3.8, 4.5))
    pl.axes([0, 0, 1, 1])
    pl.imshow(np.rot90(section), interpolation='nearest',
              vmax=vmax, vmin=-vmax, cmap=cmap)
    pl.axis('off')

# Mask data
X_ = []
for x in dataset.func:
    X_.append(masking.apply_mask(x, mask_img, smoothing_fwhm=6.))
X = X_

# Clean signals
X_ = []
for x in X:
    X_.append(signal.clean(x, standardize=True, detrend=False))
X = X_

### CanICA ####################################################################

if not exists(join(path, 'canica.nii.gz')):
    try:
        from nilearn.decomposition.canica import CanICA
        t0 = time.time()
        canica = CanICA(n_components=n_components, mask=mask_img,
Beispiel #4
0
brain states <http://hal.inria.fr/inria-00589201>`_, Michel et al,
Pattern Recognition 2011.

"""

### Load nyu_rest dataset #####################################################

from os.path import join
import numpy as np
import pylab as pl
from utils import datasets, masking, signal
import nibabel

adhd_mask = join('utils', 'adhd_mask.nii.gz')
dataset = datasets.fetch_adhd(n_subjects=1)
X = masking.apply_mask(dataset.func[0], adhd_mask)
X = signal.clean(X, standardize=True, detrend=False)
X_smoothed = masking.apply_mask(dataset.func[0], adhd_mask, smoothing_fwhm=6.)
X_smoothed = signal.clean(X_smoothed, standardize=True, detrend=False)
mask = nibabel.load(adhd_mask).get_data().astype(np.bool)

z = 42


def plot_labels(labels, seed):
    labels = labels.astype(int)
    n_colors = np.max(labels)
    cut = labels[:, :, z]
    np.random.seed(seed)
    colors = np.random.random(size=(n_colors + 1, 3))
Pattern Recognition 2011.

"""

### Load nyu_rest dataset #####################################################

from os.path import join
import numpy as np
import pylab as pl
from utils import datasets, masking, signal
import nibabel


adhd_mask = join('utils', 'adhd_mask.nii.gz')
dataset = datasets.fetch_adhd(n_subjects=1)
X = masking.apply_mask(dataset.func[0], adhd_mask)
X = signal.clean(X, standardize=True, detrend=False)
X_smoothed = masking.apply_mask(dataset.func[0], adhd_mask,
        smoothing_fwhm=6.)
X_smoothed = signal.clean(X_smoothed, standardize=True, detrend=False)
mask = nibabel.load(adhd_mask).get_data().astype(np.bool)

z = 42


def plot_labels(labels, seed):
    labels = labels.astype(int)
    n_colors = np.max(labels)
    cut = labels[:, :, z]
    np.random.seed(seed)
    colors = np.random.random(size=(n_colors + 1, 3))
Beispiel #6
0
    # Normalize the map
    section = map_3d[:, :, z]
    pl.figure(figsize=(3.8, 4.5))
    pl.axes([0, 0, 1, 1])
    pl.imshow(np.rot90(section),
              interpolation='nearest',
              vmax=vmax,
              vmin=-vmax,
              cmap=cmap)
    pl.axis('off')


# Mask data
X_ = []
for x in dataset.func:
    X_.append(masking.apply_mask(x, mask_img, smoothing_fwhm=6.))
X = X_

# Clean signals
X_ = []
for x in X:
    X_.append(signal.clean(x, standardize=True, detrend=False))
X = X_

### CanICA ####################################################################

if not exists(join(path, 'canica.nii.gz')):
    try:
        from nilearn.decomposition.canica import CanICA
        t0 = time.time()
        canica = CanICA(n_components=n_components,
Beispiel #7
0
mean_img = fmri_data.mean(axis=-1)

### Restrict to faces and houses ##############################################
condition_mask = np.logical_or(conditions == 'face', conditions == 'house')
X = fmri_data[..., condition_mask]
y = y[condition_mask]
session = session[condition_mask]
conditions = conditions[condition_mask]

### Masking step ##############################################################
from utils import masking, signal
from nibabel import Nifti1Image

# Mask data
X_img = Nifti1Image(X, affine)
X = masking.apply_mask(X_img, mask, smoothing_fwhm=4)
X = signal.clean(X, standardize=True, detrend=False)

###############################################################################
#                                                                             #
#   F-score                                                                   #
#                                                                             #
###############################################################################

from sklearn.feature_selection import f_classif
f_values, p_values = f_classif(X, y)
p_values = -np.log10(p_values)
p_values[np.isnan(p_values)] = 0
p_values[p_values > 10] = 10
p_unmasked = masking.unmask(p_values, mask)