Ejemplo n.º 1
0
def test_fetch_adhd():
    local_url = "file://" + datadir

    sub1 = [3902469, 7774305, 3699991]
    sub2 = [2014113, 4275075, 1019436,
            3154996, 3884955,   27034,
            4134561,   27018, 6115230,
              27037, 8409791,   27011]
    sub3 = [3007585, 8697774, 9750701,
              10064,   21019,   10042,
              10128, 2497695, 4164316,
            1552181, 4046678,   23012]
    sub4 = [1679142, 1206380,   23008,
            4016887, 1418396, 2950754,
            3994098, 3520880, 1517058,
            9744150, 1562298, 3205761, 3624598]
    subs = np.asarray(sub1 + sub2 + sub3 + sub4)
    subs = subs.view(dtype=[('Subject', '<i8')])
    file_mock.add_csv('ADHD200_40subs_motion_parameters_and_phenotypics.csv',
                      subs)

    adhd = datasets.fetch_adhd(data_dir=tmpdir, url=local_url,
                               n_subjects=12, verbose=0)
    assert_equal(len(adhd.func), 12)
    assert_equal(len(adhd.confounds), 12)
    assert_equal(len(url_request.urls), 13)  # Subjects + phenotypic
def run_mini_pipeline():
    atlas = datasets.fetch_atlas_msdl()
    atlas_img = atlas['maps']
    labels = pd.read_csv(atlas['labels'])['name']

    masker = NiftiMapsMasker(maps_img=atlas_img, standardize=True,
                               memory='/tmp/nilearn', verbose=0)

    data = datasets.fetch_adhd(number_subjects)

    figures_folder = '../figures/'
    count=0
    for func_file, confound_file in zip(data.func, data.confounds):
        
        # fit the data to the atlas mask, regress out confounds
        time_series = masker.fit_transform(func_file, confounds=confound_file)

        correlation = np.corrcoef(time_series.T)

        #plotting starts here
        plt.figure(figsize=(10, 10))
        plt.imshow(correlation, interpolation="nearest")
        x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
        y_ticks = plt.yticks(range(len(labels)), labels)
        corr_file = figures_folder+'subject_number_' + str(count) + '_correlation.pdf'
        plt.savefig(corr_file)

        atlas_region_coords = [plotting.find_xyz_cut_coords(img) for img in image.iter_img(atlas_img)]
        threshold = 0.6
        plotting.plot_connectome(correlation, atlas_region_coords, edge_threshold=threshold)
        connectome_file = figures_folder+'subject_number_' + str(count) + '_connectome.pdf'
        plt.savefig(connectome_file)


        #graph setup

        #binarize correlation matrix
        correlation[correlation<threshold] = 0
        correlation[correlation != 0] = 1

        graph = nx.from_numpy_matrix(correlation)

        partition=louvain.best_partition(graph)

        values = [partition.get(node) for node in graph.nodes()]

        plt.figure()
        nx.draw_spring(graph, cmap = plt.get_cmap('jet'), node_color = values, node_size=30, with_labels=True)
        graph_file = figures_folder+'subject_number_' + str(count) + '_community.pdf'
        plt.savefig(graph_file)

        count += 1

        plt.close('all')
Ejemplo n.º 3
0
def run_canica(params):
    """CanICA

    Perform Canonical Independent Component Analysis.

    Parameters
    ----------

    n_components: (20) number of components

    smoothing_fwhm: (6.) smoothing fwhm

    threshold: (3.) specify threshold

    verbose: (['10', '0', '10']) select verbosity level

    input_folder: (folder) select input folder

    output_folder: (folder) define ouput folder

    References
    ----------
    * G. Varoquaux et al. "A group model for stable multi-subject ICA on
      fMRI datasets", NeuroImage Vol 51 (2010), p. 288-299

    * G. Varoquaux et al. "ICA-based sparse features recovery from fMRI
      datasets", IEEE ISBI 2010, p. 1177
    """
    dataset = datasets.fetch_adhd()
    func_files = dataset.func
    output_dir = osp.abspath(params.pop('output_folder'))
    prepare_report_directory(output_dir)
    run(func_files, params, output_dir)
    json.dump(params, open(osp.join(output_dir, 'params.json'), 'w'))
    img_src_filenames = [osp.join(output_dir, 'images', fname) for fname in
                         os.listdir(osp.join(output_dir, 'images'))
                         if fname.startswith('IC_')]
    report = generate_report(params, img_src_filenames)
    reportindex = osp.abspath(osp.join(output_dir, 'index.html'))
    report.save_html(reportindex)
    return ('file', 'file://{}'.format(reportindex))
        matrix = matrix.copy()  # avoid side effects
        # Set diagonal to zero, for better visualization
        np.fill_diagonal(matrix, 0)
        vmax = np.max(np.abs(matrix))
        title = '{0}, subject {1}'.format(matrix_kind, n_subject)
        plotting.plot_matrix(matrix, vmin=-vmax, vmax=vmax, cmap='RdBu_r',
                             title=title, figure=fig, colorbar=False)


###############################################################################
# Load ADHD dataset and MSDL atlas
# --------------------------------
# We study only 20 subjects from the ADHD dataset, to save computation time.
from nilearn import datasets

adhd_data = datasets.fetch_adhd(n_subjects=20)

###############################################################################
# We use probabilistic regions of interest (ROIs) from the MSDL atlas.
msdl_data = datasets.fetch_atlas_msdl()
msdl_coords = msdl_data.region_coords
n_regions = len(msdl_coords)
print('MSDL has {0} ROIs, part of the following networks :\n{1}.'.format(
    n_regions, msdl_data.networks))

###############################################################################
# Region signals extraction
# -------------------------
# To extract regions time series, we instantiate a
# :class:`nilearn.input_data.NiftiMapsMasker` object and pass the atlas the
# file name to it, as well as filtering band-width and detrending option.
Ejemplo n.º 5
0
                         labels=labels)
    # Display precision matrix
    plotting.plot_matrix(prec,
                         cmap=plotting.cm.bwr,
                         vmin=-span,
                         vmax=span,
                         title="%s / precision" % title,
                         labels=labels)


##############################################################################
# Fetching datasets
# ------------------
from nilearn import datasets
msdl_atlas_dataset = datasets.fetch_atlas_msdl()
adhd_dataset = datasets.fetch_adhd(n_subjects=n_subjects)

# print basic information on the dataset
print('First subject functional nifti image (4D) is at: %s' %
      adhd_dataset.func[0])  # 4D data

##############################################################################
# Extracting region signals
# --------------------------
from nilearn import image
from nilearn import input_data

# A "memory" to avoid recomputation
from sklearn.externals.joblib import Memory
mem = Memory('nilearn_cache')
with the highest values.

"""

##############################################################################
# Retrieve the atlas and the data
# --------------------------------
from nilearn import datasets
atlas = datasets.fetch_atlas_msdl()
# Loading atlas image stored in 'maps'
atlas_filename = atlas['maps']
# Loading atlas data stored in 'labels'
labels = atlas['labels']

# Loading the functional datasets
data = datasets.fetch_adhd(n_subjects=1)

# print basic information on the dataset
print('First subject functional nifti images (4D) are at: %s' %
      data.func[0])  # 4D data

##############################################################################
# Extract time series
# --------------------
from nilearn.input_data import NiftiMapsMasker
masker = NiftiMapsMasker(maps_img=atlas_filename,
                         standardize=True,
                         memory='nilearn_cache',
                         verbose=5)

time_series = masker.fit_transform(data.func[0], confounds=data.confounds)
Ejemplo n.º 7
0
def get_adhd_data(data_dir='./datas/brain', n_subjects=1):

    dataset = datasets.fetch_adhd(data_dir=data_dir, n_subjects=n_subjects)
    imgs = dataset.func

    return imgs
Ejemplo n.º 8
0
from nilearn import datasets, plotting, image

data = datasets.fetch_adhd()

for i in range(4):
    mean_func = image.mean_img(data.func[i])
    plotting.plot_epi(mean_func)

"""
Functional connectivity measures for group analysis of connectomes
===================================================================

This example compares different measures of functional connectivity between
regions of interest : correlation, partial correlation, as well as a measure
called tangent. The resulting connectivity coefficients are used to
classify ADHD vs control subjects and the tangent measure outperforms the
standard measures.

"""

# Fetch dataset
from nilearn import datasets
atlas = datasets.fetch_atlas_msdl()
dataset = datasets.fetch_adhd(n_subjects=20)

######################################################################
# Extract regions time series signals
from nilearn import input_data
masker = input_data.NiftiMapsMasker(atlas.maps,
                                    resampling_target="maps",
                                    detrend=True,
                                    low_pass=.1,
                                    high_pass=.01,
                                    t_r=2.5,
                                    standardize=False,
                                    memory='nilearn_cache',
                                    memory_level=1)
subjects = []
sites = []
Ejemplo n.º 10
0
"""

####################################################################
# Load atlases
# -------------
from nilearn import datasets

yeo = datasets.fetch_atlas_yeo_2011()
print('Yeo atlas nifti image (3D) with 17 parcels and liberal mask is located '
      'at: %s' % yeo['thick_17'])

#########################################################################
# Load functional data
# --------------------
data = datasets.fetch_adhd(n_subjects=10)

print('Functional nifti images (4D, e.g., one subject) are located at : %r'
      % data['func'][0])
print('Counfound csv files (of same subject) are located at : %r'
      % data['confounds'][0])

##########################################################################
# Extract coordinates on Yeo atlas - parcellations
# ------------------------------------------------
from nilearn.input_data import NiftiLabelsMasker
from nilearn.connectome import ConnectivityMeasure

# ConenctivityMeasure from Nilearn uses simple 'correlation' to compute
# connectivity matrices for all subjects in a list
connectome_measure = ConnectivityMeasure(kind='correlation')
Ejemplo n.º 11
0
    # Display precision matrix
    plt.figure()
    plt.imshow(prec,
               interpolation="nearest",
               vmin=-span,
               vmax=span,
               cmap=plt.cm.get_cmap("bwr"))
    plt.colorbar()
    plt.title("%s / precision" % title)


# Fetching datasets ###########################################################
print("-- Fetching datasets ...")
from nilearn import datasets
msdl_atlas_dataset = datasets.fetch_msdl_atlas()
adhd_dataset = datasets.fetch_adhd()

# Extracting region signals ###################################################
import nilearn.image
import nilearn.input_data

from sklearn.externals.joblib import Memory
mem = Memory(".")

# Number of subjects to consider for group-sparse covariance
n_subjects = 10
subjects = []

func_filenames = adhd_dataset.func
confound_filenames = adhd_dataset.confounds
for func_filename, confound_filename in zip(func_filenames,
Ejemplo n.º 12
0
PLOTDIR = "/neurospin/nsap/research/resNet/fbns"
DATAFILE = os.path.join(WORKDIR, "ADHD40.npy")
PREDFILE = os.path.join(WORKDIR, "ADHD40_pred.npy")
MASKFILE = os.path.join(WORKDIR, "ADHD40_mask.nii.gz")
SEGFILE = "./MNI152_T1_1mm_Brain_FAST_seg.nii.gz"
SEED = 1234
BATCH_SIZE = 20
EPOCH = 99
random.seed(SEED)
np.random.seed(SEED)
setup_logging(level="info")
logger = logging.getLogger("pynet")


# Prepare data
adhd_dataset = datasets.fetch_adhd(n_subjects=40, data_dir=DATADIR)
func_filenames = adhd_dataset.func
print("Functional nifti image: {0}...{1} ({2})".format(
    func_filenames[0], func_filenames[1], len(func_filenames)))

# Build an EPI-based mask because we have no anatomical data
if not os.path.isfile(MASKFILE):
    target_img = nibabel.load(func_filenames[0])
    mask = (target_img.get_data()[..., 0] != 0).astype(int)
    mask_img = nibabel.Nifti1Image(mask, target_img.affine)
    nibabel.save(mask_img, MASKFILE)
else:
    mask_img = nibabel.load(MASKFILE)

# Mask and preproc EPI data
masker = MultiNiftiMasker(
Ejemplo n.º 13
0
# We generate a legend using the trick described on
# http://matplotlib.sourceforge.net/users/legend_guide.httpml#using-proxy-artist
from matplotlib.patches import Rectangle
p_v = Rectangle((0, 0), 1, 1, fc="red")
p_h = Rectangle((0, 0), 1, 1, fc="blue")
p_f = Rectangle((0, 0), 1, 1, fc="limegreen")
plt.legend([p_v, p_h, p_f], ["vt", "house", "face"])

show()

#%%
from nilearn import datasets
#import os
#rest_dataset = datasets.fetch_development_fmri(n_subjects=20)
rest_dataset = datasets.fetch_adhd(n_subjects=1)
func_filenames = rest_dataset.func


# nii_dir = 'C:\\Users\\MIT-DGMIF\\Desktop\\test\\'
# files = [file for file in os.listdir(nii_dir)]

confounds = rest_dataset.confounds
######################################################################
# Import dictionary learning algorithm from decomposition module and call the
# object and fit the model to the functional datasets
from nilearn.decomposition import DictLearning

# Initialize DictLearning object
dict_learn = DictLearning(n_components=8, smoothing_fwhm=4.,
                          memory="nilearn_cache", memory_level=1,
def run_mini_pipeline():
    atlas = datasets.fetch_atlas_msdl()
    atlas_img = atlas['maps']
    labels = pd.read_csv(atlas['labels'])['name']

    masker = NiftiMapsMasker(maps_img=atlas_img,
                             standardize=True,
                             memory='/tmp/nilearn',
                             verbose=0)

    data = datasets.fetch_adhd(number_subjects)

    figures_folder = '../figures/'
    count = 0
    for func_file, confound_file in zip(data.func, data.confounds):

        # fit the data to the atlas mask, regress out confounds
        time_series = masker.fit_transform(func_file, confounds=confound_file)

        correlation = np.corrcoef(time_series.T)

        #plotting starts here
        plt.figure(figsize=(10, 10))
        plt.imshow(correlation, interpolation="nearest")
        x_ticks = plt.xticks(range(len(labels)), labels, rotation=90)
        y_ticks = plt.yticks(range(len(labels)), labels)
        corr_file = figures_folder + 'subject_number_' + str(
            count) + '_correlation.pdf'
        plt.savefig(corr_file)

        atlas_region_coords = [
            plotting.find_xyz_cut_coords(img)
            for img in image.iter_img(atlas_img)
        ]
        threshold = 0.6
        plotting.plot_connectome(correlation,
                                 atlas_region_coords,
                                 edge_threshold=threshold)
        connectome_file = figures_folder + 'subject_number_' + str(
            count) + '_connectome.pdf'
        plt.savefig(connectome_file)

        #graph setup

        #binarize correlation matrix
        correlation[correlation < threshold] = 0
        correlation[correlation != 0] = 1

        graph = nx.from_numpy_matrix(correlation)

        partition = louvain.best_partition(graph)

        values = [partition.get(node) for node in graph.nodes()]

        plt.figure()
        nx.draw_spring(graph,
                       cmap=plt.get_cmap('jet'),
                       node_color=values,
                       node_size=30,
                       with_labels=True)
        graph_file = figures_folder + 'subject_number_' + str(
            count) + '_community.pdf'
        plt.savefig(graph_file)

        count += 1

        plt.close('all')
Ejemplo n.º 15
0
def process_data(num=180):
    # fp = "/home/jiaming/Desktop/ADNI_AV45PET/"
    # # fp = "/Users/zhaoxuandong/Public/Dropbox (Partners HealthCare)/MImadrid/"
    # # ep = "/Users/zhaoxuandong/Public/Dropbox (Partners HealthCare)/MImadrid/meta/metaData.xlsx"
    print("loading data...")

    if num == 120:

        #

        meta = np.load("AAL2.npy")
        coo_dict = {}

        # for i in range(1, 49):
        #     coo_dict[i] = np.where(meta_cort == i)

        # for i in range(1, 22):
        #     coo_dict[i + 48] = np.where(meta_sub == i)
        m = np.unique(meta)
        for i in range(1, 121):
            coo_dict[i] = np.where(meta == m[i])
            # print(coo_dict[i])

        vector_dict = []

        for i in tqdm(range(1, num + 1)):
            vector = []
            for folder in ["NC", "EMCI", "LMCI", "AD"]:
                for root, dirs, files in os.walk(folder):
                    for name in files:
                        if name[-1] != 'i':
                            continue
                        img = nib.load(os.path.join(root, name)).get_data()
                        mask = np.load('mask.npy')
                        img = img * mask
                        #img = (img - np.min(img))/(np.max(img) - np.min(img))
                        img = img[coo_dict[i]]
                        vector.append(np.average(img))
            vector_dict.append(np.array(vector))

        matrix_69 = np.array(vector_dict).transpose()

        np.save("mat120", matrix_69)

        print(matrix_69.shape)  # 419 * 180

        label = [0 for i in range(100)] + [1 for i in range(96)] + [
            2 for i in range(131)
        ] + [3 for i in range(92)]

        label = np.array(label)
        np.save("label120", label)

    elif num == 180:

        #

        meta = np.load("mmp.npy")
        coo_dict = {}

        # for i in range(1, 49):
        #     coo_dict[i] = np.where(meta_cort == i)

        # for i in range(1, 22):
        #     coo_dict[i + 48] = np.where(meta_sub == i)

        for i in range(1, 181):
            coo_dict[i] = np.where(meta == i)
            # print(coo_dict[i])

        vector_dict = []

        for i in tqdm(range(1, num + 1)):
            vector = []
            for folder in ["NC", "EMCI", "LMCI", "AD"]:
                for root, dirs, files in os.walk(folder):
                    for name in files:
                        if name[-1] != 'i':
                            continue
                        img = nib.load(os.path.join(root, name)).get_data()
                        mask = np.load('mask.npy')
                        img = img * mask
                        #img = (img - np.min(img))/(np.max(img) - np.min(img))
                        img = img[coo_dict[i]]
                        vector.append(np.average(img))
            vector_dict.append(np.array(vector))

        matrix_69 = np.array(vector_dict).transpose()

        np.save("mat180", matrix_69)

        print(matrix_69.shape)  # 419 * 180

        label = [0 for i in range(100)] + [1 for i in range(96)] + [
            2 for i in range(131)
        ] + [3 for i in range(92)]

        label = np.array(label)
        np.save("label180", label)

    elif num == 69:
        # fp = "/home/jiaming/Desktop/ADNI_AV45PET/"
        meta_cort = nib.load(
            "HarvardOxford-cort-maxprob-thr25-2mm.nii").get_data()  # 48
        meta_sub = nib.load(
            "HarvardOxford-sub-maxprob-thr25-2mm.nii").get_data()  # 21

        coo_dict = {}

        for i in range(1, 49):
            coo_dict[i] = np.where(meta_cort == i)

        for i in range(1, 22):
            coo_dict[i + 48] = np.where(meta_sub == i)

        vector_dict = []

        for i in tqdm(range(1, num + 1)):
            vector = []
            for folder in ["NC", "EMCI", "LMCI", "AD"]:
                for root, dirs, files in os.walk(folder):
                    for name in files:
                        if name[-1] != 'i':
                            continue
                        img = nib.load(os.path.join(root, name)).get_data()
                        mask = np.load('mask.npy')
                        img = img * mask
                        #img = (img - np.min(img))/(np.max(img) - np.min(img))
                        img = img[coo_dict[i]]
                        vector.append(np.average(img))
            vector_dict.append(np.array(vector))

        matrix_69 = np.array(vector_dict).transpose()

        np.save("mat69", matrix_69)

        print(matrix_69.shape)  # 419 * 69

        label = [0 for i in range(100)] + [1 for i in range(96)] + [
            2 for i in range(131)
        ] + [3 for i in range(92)]

        label = np.array(label)
        np.save("label69", label)

    elif num == 264:
        from nilearn import datasets
        adhd = datasets.fetch_adhd(n_subjects=1)
        power = datasets.fetch_coords_power_2011()
        mask = power.rois
        mask = np.array([mask.x, mask.y, mask.z]).transpose()
        mask[:, 0] = mask[:, 0] + 90
        mask[:, 1] = mask[:, 1] + 130
        mask[:, 2] = mask[:, 2] + 70
        mask = mask / 2
        mask = mask.astype(int)
        print(mask.shape)

        coo_dict = {}

        assert mask.shape[0] == num

        mask_264 = np.zeros([91, 109, 91])

        for i in range(num):
            xl = []
            yl = []
            zl = []
            cx, cy, cz = mask[i]
            #print(cx, cy, cz)
            for x in [cx - 2, cx - 1, cx, cx + 1, cx + 2]:
                for y in [cy - 2, cy - 1, cy, cy + 1, cy + 2]:
                    for z in [cz - 2, cz - 1, cz, cz + 1, cz + 2]:
                        if x >= 0 and y >= 0 and z >= 0 and x < 91 and y < 109 and z < 91:
                            if (x - cx)**2 + (y - cy)**2 + (z - cz)**2 < 10.25:
                                xl.append(x)
                                yl.append(y)
                                zl.append(z)
                                mask_264[x, y, z] = i + 1

            coo_dict[i] = (np.array(xl), np.array(yl), np.array(zl))
        #print(coo_dict)

        print(np.unique(mask_264))

        np.save("mask264", mask_264)

        vector_dict = []

        np.set_printoptions(suppress=True)

        for i in tqdm(range(num)):
            vector = []
            for folder in ["NC", "EMCI", "LMCI", "AD"]:
                for root, dirs, files in os.walk(folder):
                    for name in files:
                        if name[-1] != 'i':
                            continue
                        img = nib.load(os.path.join(root, name)).get_data()
                        mask = np.load('mask.npy')
                        img = img * mask
                        #img = (img - np.min(img))/(np.max(img) - np.min(img))
                        print(folder, img[[26, 57, 69]])

                        img = img[coo_dict[i]]
                        # print(img)
                        vector.append(np.average(img))
            vector_dict.append(np.array(vector))

        matrix_69 = np.array(vector_dict).transpose()

        r, c = matrix_69.nonzero()
        c_unique = np.unique(c)
        matrix_69 = matrix_69[:, c_unique]

        print(matrix_69.shape)  # 419 * 264

        np.save("mat264", matrix_69)

        label = [0 for i in range(100)] + [1 for i in range(96)] + [
            2 for i in range(131)
        ] + [3 for i in range(92)]

        label = np.array(label)
        np.save("label264", label)

    else:
        raise NotImplementedError
    * G. Varoquaux et al. "A group model for stable multi-subject ICA on
      fMRI datasets", NeuroImage Vol 51 (2010), p. 288-299

    * G. Varoquaux et al. "ICA-based sparse features recovery from fMRI
      datasets", IEEE ISBI 2010, p. 1177

Pre-prints for both papers are available on hal
(http://hal.archives-ouvertes.fr)
"""
import numpy as np

### Load ADHD rest dataset ####################################################
from nilearn import datasets
# Here we use a limited number of subjects to get faster-running code. For
# better results, simply increase the number.
dataset = datasets.fetch_adhd()
func_files = dataset.func[:5]

### Preprocess ################################################################
from nilearn import io

# This is a multi-subject method, thus we need to use the
# MultiNiftiMasker, rather than the NiftiMasker
# We specify the target_affine to downsample to 3mm isotropic
# resolution
masker = io.MultiNiftiMasker(smoothing_fwhm=6,
                             target_affine=np.diag((3, 3, 3)),
                             memory="nilearn_cache", memory_level=1,
                             verbose=True)
data_masked = masker.fit_transform(func_files)
Ejemplo n.º 17
0
        vmax = np.max(np.abs(matrix))
        title = '{0}, subject {1}'.format(matrix_kind, n_subject)
        plotting.plot_matrix(matrix,
                             vmin=-vmax,
                             vmax=vmax,
                             cmap='RdBu_r',
                             title=title,
                             figure=fig,
                             colorbar=False)


###############################################################################
# load data
adhd_dataset = datasets.fetch_adhd(n_subjects=2,
                                   data_dir='F:\sample-data',
                                   url=None,
                                   resume=True,
                                   verbose=1)
print(adhd_dataset['description'])
keys = adhd_dataset.keys()
print(keys)
func_filenames = adhd_dataset.func
print(func_filenames[0])
confounds = adhd_dataset.confounds
###############################################################################

###############################################################################
# extract the coordinates of Power atlas
power = datasets.fetch_coords_power_2011()
print('Power atlas comes with {0}.'.format(power.keys()))
power_coords = np.vstack((power.rois['x'], power.rois['y'], power.rois['z'])).T
    plt.title("%s / covariance" % title)

    # Display precision matrix
    plt.figure()
    plt.imshow(prec, interpolation="nearest",
               vmin=-span, vmax=span,
               cmap=plotting.cm.bwr)
    plt.colorbar()
    plt.title("%s / precision" % title)


# Fetching datasets ###########################################################
print("-- Fetching datasets ...")
from nilearn import datasets
msdl_atlas_dataset = datasets.fetch_msdl_atlas()
adhd_dataset = datasets.fetch_adhd(n_subjects=1)


# Extracting region signals ###################################################
import nilearn.image
import nilearn.input_data

from sklearn.externals.joblib import Memory
mem = Memory('nilearn_cache')

masker = nilearn.input_data.NiftiMapsMasker(
    msdl_atlas_dataset.maps, resampling_target="maps", detrend=True,
    low_pass=None, high_pass=0.01, t_r=2.5, standardize=True,
    memory=mem, memory_level=1, verbose=2)
masker.fit()
Ejemplo n.º 19
0
def nilearn(request):
    global adhd_data
    start = timeit.default_timer()
    canica = decomposition.CanICA(n_components=20, mask_strategy="background")

    num = 6
    if adhd_data == None:
        print("None")
        adhd_data = datasets.fetch_adhd(n_subjects=num)

    print("1")
    func = adhd_data["func"]
    print("2")

    canica.fit(func)
    print("3")
    components = canica.components_
    print("4")
    components_img = canica.masker_.inverse_transform(components)
    print("5")
    masker = input_data.NiftiMapsMasker(components_img,
                                        smoothing_fwhm=6,
                                        standardize=False,
                                        detrend=True,
                                        t_r=2.5,
                                        low_pass=0.1,
                                        high_pass=0.01)
    print("6")
    subjects = []
    adhds = []
    sites = []
    labels = []

    for func_file, confound_file, phenotypic in zip(adhd_data.func,
                                                    adhd_data.confounds,
                                                    adhd_data.phenotypic):
        time_series = masker.fit_transform(func_file, confounds=confound_file)
        subjects.append(time_series)
        is_adhd = phenotypic["adhd"]
        if is_adhd == 1:
            adhds.append(time_series)
        sites.append(phenotypic["site"])
        labels.append(phenotypic["adhd"])

    print("7")
    correlation_measure = ConnectivityMeasure(kind="correlation")
    print("8")
    correlation_matrices = correlation_measure.fit_transform(subjects)
    print("9")
    X_train, X_test, y_train, y_test = train_test_split(correlation_matrices,
                                                        labels,
                                                        test_size=0.3)

    print("10")
    classifier = keras.models.Sequential()
    print("11")
    classifier.add(
        keras.layers.Dense(16,
                           activation="relu",
                           kernel_initializer="random_normal"))
    classifier.add(
        keras.layers.Dense(16,
                           activation="relu",
                           kernel_initializer="random_normal"))
    classifier.add(
        keras.layers.Dense(1,
                           activation="sigmoid",
                           kernel_initializer="random_normal"))
    print("12")
    classifier.compile(optimizer=keras.optimizers.Adam(lr=.0001),
                       loss="binary_crossentropy",
                       metrics=["accuracy"])
    print("13")

    classifier.fit(np.array(X_train),
                   np.array(y_train),
                   batch_size=1,
                   epochs=10)
    print("14")
    eval_model = classifier.evaluate(np.array(X_train), np.array(y_train))
    print("15")
    print(eval_model)

    y_pred = classifier.predict(X_test, batch_size=32)
    print("16")
    y_pred = (y_pred > 0.5)
    print("17")
    print(y_pred)
    print("The End")

    stop = timeit.default_timer()
    runtime = stop - start
    print("num: ", num)
    print("Code run time: ", runtime)

    return "All good"