This is a simple extract from data with no meaning and no filter. """ print __doc__ ### All the imports import numpy as np from scipy import signal from sklearn.svm import SVC from sklearn.feature_selection import SelectKBest, f_classif from sklearn.pipeline import Pipeline from sklearn.cross_validation import LeaveOneLabelOut, cross_val_score from nisl import datasets from matplotlib import pyplot as plt ### Load dataset data = datasets.fetch_haxby_data(data_dir='../nisl_data') y = data.target session = data.session X = data.data mask = data.mask img_shape = X[..., 0].shape mean_img = X.mean(-1) # Process the data in order to have a two-dimensional design matrix X of # shape (nb_samples, nb_features). X = X[mask != 0].T # Convert X to floats: working with integers gives rounding errors X = X.astype(np.float) print "detrending data" # Detrend data on each session independently
""" The haxby dataset: face vs house in object recognition ======================================================= """ ### Load Haxby dataset ######################################################## from nisl import datasets dataset = datasets.fetch_haxby_data() X = dataset.data mask = dataset.mask y = dataset.target session = dataset.session ### Preprocess data ########################################################### import numpy as np from scipy import signal # Build the mean image because we have no anatomic data mean_img = X.mean(-1) X.shape # (40, 64, 64, 1452) mask.shape # (40, 64, 64) # Process the data in order to have a two-dimensional design matrix X of # shape (nb_samples, nb_features). X = X[mask != 0].T X.shape