def compute(in_file, n_vols, out_file): print('reading stimuli') st = read_prf_stimuli(n_vols) print('making model') model, grids, bounds = make_model(st) nii = load(in_file) data = nii.get_data() indices = array(list(ndindex(data.shape[:3]))) x = data.reshape((-1, data.shape[-1])) good_voxels = x.mean(axis=1) > 2500 x = x[good_voxels, :] indices = indices[good_voxels, :] bundle = utils.multiprocess_bundle(og.GaussianFit, model, x, grids, bounds, indices, auto_fit=True, verbose=1, Ns=3) print('starting computation') with Pool(40) as pool: output = pool.map(utils.parallel_fit, bundle) nif = utils.recast_estimation_results(output, nii) nif.to_filename(out_file)
def test_parallel_fit_manual_grids(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0, 360, 45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = slice(-5, 4, 5) y_grid = slice(-5, 7, 5) s_grid = slice(1 / stimulus.ppd, 5.25, 5) b_grid = slice(0.1, 4.0, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (1 / stimulus.ppd, 12.0) b_bound = (1e-8, 1e2) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # make 3 voxels all_data = np.array([data, data, data]) num_voxels = data.shape[0] indices = [(1, 2, 3)] * 3 # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # assert equivalence for fit in output: npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.beta, beta, 2) npt.assert_almost_equal(fit.baseline, baseline, 2)
def test_recast_estimation_results(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0, 360, 45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-5, 4, 5) y_grid = utils.grid_slice(-5, 7, 5) s_grid = utils.grid_slice(1 / stimulus.ppd, 5.25, 5) b_grid = utils.grid_slice(0.1, 4.0, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (1 / stimulus.ppd, 12.0) b_bound = (1e-8, 1e2) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # create 3 voxels of data all_data = np.array([data, data, data]) indices = [(0, 0, 0), (0, 0, 1), (0, 0, 2)] # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # create grid parent arr = np.zeros((1, 1, 3)) grid_parent = nibabel.Nifti1Image(arr, np.eye(4, 4)) # recast the estimation results nif = utils.recast_estimation_results(output, grid_parent) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[..., 0]), x) npt.assert_almost_equal(np.mean(dat[..., 1]), y) npt.assert_almost_equal(np.mean(dat[..., 2]), sigma) npt.assert_almost_equal(np.mean(dat[..., 3]), beta) npt.assert_almost_equal(np.mean(dat[..., 4]), baseline) # recast the estimation results - OVERLOADED nif = utils.recast_estimation_results(output, grid_parent, True) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[..., 0]), np.arctan2(y, x), 2) npt.assert_almost_equal(np.mean(dat[..., 1]), np.sqrt(x**2 + y**2), 2) npt.assert_almost_equal(np.mean(dat[..., 2]), sigma) npt.assert_almost_equal(np.mean(dat[..., 3]), beta) npt.assert_almost_equal(np.mean(dat[..., 4]), baseline)
def test_parallel_fit_manual_grids(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1,2,3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = slice(-5,4,5) y_grid = slice(-5,7,5) s_grid = slice(1/stimulus.ppd,5.25,5) b_grid = slice(0.1,4.0,5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (1/stimulus.ppd,12.0) b_bound = (1e-8,1e2) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # make 3 voxels all_data = np.array([data,data,data]) num_voxels = data.shape[0] indices = [(1,2,3)]*3 # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # assert equivalence for fit in output: npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.beta, beta, 2) npt.assert_almost_equal(fit.baseline, baseline, 2)
def test_recast_estimation_results(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1,2,3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-5,4,5) y_grid = utils.grid_slice(-5,7,5) s_grid = utils.grid_slice(1/stimulus.ppd,5.25,5) b_grid = utils.grid_slice(0.1,4.0,5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (1/stimulus.ppd,12.0) b_bound = (1e-8,1e2) m_bound = (None,None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # create 3 voxels of data all_data = np.array([data,data,data]) indices = [(0,0,0),(0,0,1),(0,0,2)] # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # create grid parent arr = np.zeros((1,1,3)) grid_parent = nibabel.Nifti1Image(arr,np.eye(4,4)) # recast the estimation results nif = utils.recast_estimation_results(output, grid_parent) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[...,0]), x) npt.assert_almost_equal(np.mean(dat[...,1]), y) npt.assert_almost_equal(np.mean(dat[...,2]), sigma) npt.assert_almost_equal(np.mean(dat[...,3]), beta) npt.assert_almost_equal(np.mean(dat[...,4]), baseline) # recast the estimation results - OVERLOADED nif = utils.recast_estimation_results(output, grid_parent, True) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[...,0]), np.arctan2(y,x),2) npt.assert_almost_equal(np.mean(dat[...,1]), np.sqrt(x**2+y**2),2) npt.assert_almost_equal(np.mean(dat[...,2]), sigma) npt.assert_almost_equal(np.mean(dat[...,3]), beta) npt.assert_almost_equal(np.mean(dat[...,4]), baseline)
############################################################################################################################################ # # actual fitting # ############################################################################################################################################ voxel_indices = [(xx, 0, 0) for xx in np.arange(data.shape[1])] print("starting fitting of {subject}, hemi {hemi}".format(subject=subject, hemi=hemi)) bundle = utils.multiprocess_bundle(Fit=css.CompressiveSpatialSummationFit, model=css_model, data=data.T, grids=css_grids, bounds=css_bounds, indices=voxel_indices, auto_fit=True, verbose=1, Ns=12) # run analysis pool = multiprocessing.Pool(N_PROCS) output = pool.map(utils.parallel_fit, bundle) for fit in output: estimates[:6, fit.voxel_index[0]] = fit.estimate estimates[6, fit.voxel_index[0]] = fit.rsquared # try to free up memory by closing the pool and joining them with the main thread pool.close()
zorder=2) plt.xticks(fontsize=16) plt.yticks(fontsize=16) plt.xlabel('Time', fontsize=18) plt.ylabel('Amplitude', fontsize=18) plt.xlim(0, len(fit.data)) plt.legend(loc=0) ## multiprocess 3 voxels data = [data, data, data] indices = ([1, 2, 3], [4, 6, 5], [7, 8, 9]) bundle = utils.multiprocess_bundle(og.GaussianFit, model, data, grids, bounds, indices, auto_fit=True, verbose=1, Ns=3) ## run print("popeye will analyze %d voxels across %d cores" % (len(bundle), 3)) with sharedmem.Pool(np=3) as pool: t1 = datetime.datetime.now() output = pool.map(utils.parallel_fit, bundle) t2 = datetime.datetime.now() delta = t2 - t1 print("popeye multiprocessing finished in %s.%s seconds" % (delta.seconds, delta.microseconds))