def main(): # Apply our decomposition estimator with reduction n_components = 70 n_jobs = 1 raw = True init = True mask, func_filenames = get_hcp_data(raw=raw) reduction_list = [1, 2, 4, 8, 12] alpha_list = [1e-2, 1e-3, 1e-4] Parallel(n_jobs=n_jobs, verbose=10)(delayed(run)(idx, reduction, alpha, mask, raw, n_components, init, func_filenames) for idx, (reduction, alpha) in enumerate( itertools.product(reduction_list, alpha_list)))
def main(): # Apply our decomposition estimator with reduction n_components = 70 n_jobs = 1 raw = True init = True mask, func_filenames = get_hcp_data(raw=raw) reduction_list = [1, 2, 4, 8, 12] alpha_list = [1e-2, 1e-3, 1e-4] Parallel(n_jobs=n_jobs, verbose=10)( delayed(run)(idx, reduction, alpha, mask, raw, n_components, init, func_filenames) for idx, ( reduction, alpha) in enumerate(itertools.product(reduction_list, alpha_list)))
def main(output_dir, n_jobs): dir_list = [join(output_dir, f) for f in os.listdir(output_dir) if os.path.isdir(join(output_dir, f))] mask, func_filenames = get_hcp_data(raw=True) masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None, standardize=False) masker.fit() test_data = func_filenames[(-n_test_records * 2)::2] n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape X = np.empty((n_test_records * n_samples, n_voxels)) for i, this_data in enumerate(test_data): X[i * n_samples:(i + 1) * n_samples] = np.load(this_data, mmap_mode='r') Parallel(n_jobs=n_jobs, verbose=1, temp_folder='/dev/shm')( delayed(analyse_dir)(dir_name, X, masker) for dir_name in dir_list)
def get_init_objective(output_dir): mask, func_filenames = get_hcp_data(data_dir=data_dir, raw=True) masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None, standardize=False) masker.fit() rsn70 = fetch_atlas_smith_2009().rsn70 components = masker.transform(rsn70) print(components.shape) enet_scale(components.T, inplace=True) print(np.sum(np.abs(components), axis=1)) test_data = func_filenames[(-n_test_records * 2)::2] n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape X = np.empty((n_test_records * n_samples, n_voxels)) for i, this_data in enumerate(test_data): X[i * n_samples:(i + 1) * n_samples] = np.load(this_data, mmap_mode='r') exp_var = {} for alpha in [1e-2, 1e-3, 1e-4]: exp_var[alpha] = objective_function(X, components, alpha) json.dump(open(join(output_dir, 'init_objective.json'), 'r'))
def get_init_objective(output_dir): mask, func_filenames = get_hcp_data(raw=True) masker = NiftiMasker(mask_img=mask, smoothing_fwhm=None, standardize=False) masker.fit() rsn70 = fetch_atlas_smith_2009().rsn70 components = masker.transform(rsn70) print(components.shape) enet_scale(components.T, inplace=True) print(np.sum(np.abs(components), axis=1)) test_data = func_filenames[(-n_test_records * 2)::2] n_samples, n_voxels = np.load(test_data[-1], mmap_mode='r').shape X = np.empty((n_test_records * n_samples, n_voxels)) for i, this_data in enumerate(test_data): X[i * n_samples:(i + 1) * n_samples] = np.load(this_data, mmap_mode='r') exp_var = {} for alpha in [1e-2, 1e-3, 1e-4]: exp_var[alpha] = objective_function(X, components, alpha) json.dump(open(join(output_dir, 'init_objective.json'), 'r'))
# Author: Arthur Mensch # License: BSD # Adapted from nilearn example import time from os.path import expanduser import numpy as np from nilearn.datasets import fetch_atlas_smith_2009 from modl._utils.system.mkl import num_threads from modl.datasets.hcp import get_hcp_data from modl.spca_fmri import SpcaFmri mask, func_filenames = get_hcp_data(data_dir='/storage/data') func_filenames = func_filenames[:2] # Apply our decomposition estimator with reduction n_components = 70 n_jobs = 20 raw = True init = True dict_fact = SpcaFmri(mask=mask, smoothing_fwhm=3, shelve=not raw, n_components=n_components, dict_init=fetch_atlas_smith_2009().rsn70 if init else None, reduction=12, alpha=0.001,