def test_brute_force_search_manual_grids(): # create a parameter to estimate params = (10, 10) # we need to define some search bounds grid_1 = utils.grid_slice(5, 15, 5) grid_2 = utils.grid_slice(5, 15, 5) grids = ( grid_1, grid_2, ) bounds = () # set the verbose level 0 is silent, 1 is final estimate, 2 is each iteration verbose = 0 # create a simple function to transform the parameters func = lambda freq, offset: np.sin( np.linspace(0, 1, 1000) * 2 * np.pi * freq) + offset # create a "response" response = func(*params) # get the ball-park estimate p0 = utils.brute_force_search(response, utils.error_function, func, grids, bounds) # assert that the estimate is equal to the parameter npt.assert_equal(params, p0[0])
def fit_popeye(bars, dat): stimulus = generate_stimulus(bars) model = generate_model(stimulus) # set search grid x_grid = grid_slice(-10, 10, 5) y_grid = grid_slice(-10, 10, 5) s_grid = grid_slice(0.25, 5.25, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (0.001, 12.0) b_bound = (1e-8, None) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # fit the response fit = GaussianFit(model, dat, grids, bounds) return fit
def test_brute_force_search_manual_grids(): # create a parameter to estimate params = (10,10) # we need to define some search bounds grid_1 = utils.grid_slice(5,15,5) grid_2 = utils.grid_slice(5,15,5) grids = (grid_1,grid_2,) bounds = () # set the verbose level 0 is silent, 1 is final estimate, 2 is each iteration verbose = 0 # create a simple function to transform the parameters func = lambda freq, offset: np.sin( np.linspace(0,1,1000) * 2 * np.pi * freq) + offset # create a "response" response = func(*params) # get the ball-park estimate p0 = utils.brute_force_search(response, utils.error_function, func, grids, bounds) # assert that the estimate is equal to the parameter npt.assert_equal(params, p0[0])
def test_resurrect_model(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,90) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.5 frames_per_tr = 1.0 scale_factor = 0.50 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.01) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # set cache grids x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.55,5.25, 5) grids = (x_grid, y_grid, s_grid,) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 model.mask_size = 5 # seed rng np.random.seed(4932) # cache the model cache = model.cache_model(grids, ncpus=3) # resurrect cached model cached_model_path = '/tmp/og_cached_model.pkl' model = og.GaussianModel(stimulus, utils.double_gamma_hrf, cached_model_path=cached_model_path) model.hrf_delay = 0 model.mask_size = 5 # make sure the same nt.assert_true(np.sum([c[0] for c in cache] - model.cached_model_timeseries) == 0) nt.assert_true(np.sum([c[1] for c in cache] - model.cached_model_parameters) == 0)
def test_cache_model_slice(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,90) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.5 frames_per_tr = 1.0 scale_factor = 0.50 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.01) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 model.mask_size = 5 # set cache grids x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.55,5.25, 5) grids = (x_grid, y_grid, s_grid,) # seed rng np.random.seed(4932) # cache the pRF model cache = model.cache_model(grids, ncpus=3) # save it out pickle.dump(cache, open('/tmp/og_cached_model.pkl','wb')) # make sure its the right size cached_model = pickle.load(open('/tmp/og_cached_model.pkl','rb')) nt.assert_equal(np.sum([c[0] for c in cache]),np.sum([c[0] for c in cached_model]))
def test_grid_slice(): # test this case from_1 = 5 to_1 = 15 from_2 = 0 to_2 = 2 Ns = 5 # set a parameter to estimate params = (10, 1) # see if we properly tile the parameter space for Ns=2 grid_1 = utils.grid_slice(from_1, to_1, Ns) grid_2 = utils.grid_slice(from_2, to_2, Ns) grids = (grid_1, grid_2) # unbounded bounds = () # create a simple function to generate a response from the parameter func = lambda freq, offset: np.sin( np.linspace(0, 1, 1000) * 2 * np.pi * freq) + offset # create a "response" response = func(*params) # get the ball-park estimate p0 = utils.brute_force_search(response, utils.error_function, func, grids, bounds) # make sure we fit right npt.assert_equal(params, p0[0]) # make sure we sliced it right npt.assert_equal(p0[2][0].min(), from_1) npt.assert_equal(p0[2][0].max(), to_1) npt.assert_equal(p0[2][1].min(), from_2) npt.assert_equal(p0[2][1].max(), to_2)
def test_grid_slice(): # test this case from_1 = 5 to_1 = 15 from_2 = 0 to_2 = 2 Ns=5 # set a parameter to estimate params = (10,1) # see if we properly tile the parameter space for Ns=2 grid_1 = utils.grid_slice(from_1, to_1, Ns) grid_2 = utils.grid_slice(from_2, to_2, Ns) grids = (grid_1, grid_2) # unbounded bounds = () # create a simple function to generate a response from the parameter func = lambda freq,offset: np.sin( np.linspace(0,1,1000) * 2 * np.pi * freq) + offset # create a "response" response = func(*params) # get the ball-park estimate p0 = utils.brute_force_search(response, utils.error_function, func, grids, bounds) # make sure we fit right npt.assert_equal(params, p0[0]) # make sure we sliced it right npt.assert_equal(p0[2][0].min(),from_1) npt.assert_equal(p0[2][0].max(),to_1) npt.assert_equal(p0[2][1].min(),from_2) npt.assert_equal(p0[2][1].max(),to_2)
def test_xval(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.array([-1, 0, 90, 180, 270, -1]) num_blank_steps = 30 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1,2,3) auto_fit = True verbose = 1 # rng np.random.seed(2764932) # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.5, 3.25, 5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (0.001,12.0) b_bound = (1e-8,None) m_bound = (None,None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # pack multiple "runs" data = np.vstack((data,data)) # make it a singular "voxel" data = np.reshape(data, (1,data.shape[0],data.shape[1])) # set bootstraps and resamples bootstraps = 2 kfolds = 2 # make fodder bundle = utils.xval_bundle(bootstraps, kfolds, og.GaussianFit, model, data, grids, bounds, np.tile((1,2,3),(3,1))) # test for b in bundle: fit = utils.parallel_xval(b) npt.assert_almost_equal(fit.rss,0) npt.assert_equal(fit.cod, 100.0) npt.assert_(np.all(fit.tst_data == fit.trn_data)) npt.assert_(np.all(fit.tst_idx != fit.trn_idx))
def test_og_fit(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0, 360, 90) thetas = np.insert(thetas, 0, -1) thetas = np.append(thetas, -1) num_blank_steps = 30 num_bar_steps = 30 ecc = 12 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 1.0 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 model.mask_size = 6 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.25, 5.25, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (0.001, 12.0) b_bound = (1e-8, None) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # fit the response fit = og.GaussianFit(model, data, grids, bounds) # coarse fit ballpark = [-5., 5., 2.75] npt.assert_almost_equal((fit.x0, fit.y0, fit.s0), ballpark) # the baseline/beta should be 0/1 when regressed data vs. estimate (m, b) = np.polyfit(fit.scaled_ballpark_prediction, data, 1) npt.assert_almost_equal(m, 1.0) npt.assert_almost_equal(b, 0.0) # assert equivalence npt.assert_almost_equal(fit.x, x) npt.assert_almost_equal(fit.y, y) npt.assert_almost_equal(fit.sigma, sigma) npt.assert_almost_equal(fit.beta, beta) # test receptive field rf = generate_og_receptive_field(x, y, sigma, fit.model.stimulus.deg_x, fit.model.stimulus.deg_y) rf /= (2 * np.pi * sigma**2) * 1 / np.diff(model.stimulus.deg_x[0, 0:2])**2 npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF npt.assert_almost_equal( np.round(fit.model.generate_receptive_field(x, y, sigma).sum()), np.round(fit.receptive_field.sum()))
def test_recast_estimation_results(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1,2,3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-5,4,5) y_grid = utils.grid_slice(-5,7,5) s_grid = utils.grid_slice(1/stimulus.ppd,5.25,5) b_grid = utils.grid_slice(0.1,4.0,5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (1/stimulus.ppd,12.0) b_bound = (1e-8,1e2) m_bound = (None,None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # create 3 voxels of data all_data = np.array([data,data,data]) indices = [(0,0,0),(0,0,1),(0,0,2)] # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # create grid parent arr = np.zeros((1,1,3)) grid_parent = nibabel.Nifti1Image(arr,np.eye(4,4)) # recast the estimation results nif = utils.recast_estimation_results(output, grid_parent) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[...,0]), x) npt.assert_almost_equal(np.mean(dat[...,1]), y) npt.assert_almost_equal(np.mean(dat[...,2]), sigma) npt.assert_almost_equal(np.mean(dat[...,3]), beta) npt.assert_almost_equal(np.mean(dat[...,4]), baseline) # recast the estimation results - OVERLOADED nif = utils.recast_estimation_results(output, grid_parent, True) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[...,0]), np.arctan2(y,x),2) npt.assert_almost_equal(np.mean(dat[...,1]), np.sqrt(x**2+y**2),2) npt.assert_almost_equal(np.mean(dat[...,2]), sigma) npt.assert_almost_equal(np.mean(dat[...,3]), beta) npt.assert_almost_equal(np.mean(dat[...,4]), baseline)
def test_strf_fit(): viewing_distance = 38 screen_width = 25 thetas = np.tile(np.arange(0,360,90),2) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 200 pixels_across = 200 dtype = ctypes.c_int16 Ns = 3 voxel_index = (1,2,3) auto_fit = True verbose = 1 projector_hz = 480 tau = 0.00875 mask_size = 5 hrf = 0.25 # create the sweeping bar stimulus in memory stim = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(stim, viewing_distance, screen_width, scale_factor, tr_length, dtype) stimulus.fps = projector_hz flicker_vec = np.zeros_like(stim[0,0,:]).astype('uint8') flicker_vec[1*20:5*20] = 1 flicker_vec[5*20:9*20] = 2 stimulus.flicker_vec = flicker_vec stimulus.flicker_hz = [10,20] # initialize the gaussian model model = strf.SpatioTemporalModel(stimulus, utils.spm_hrf) model.tau = tau model.hrf_delay = hrf model.mask_size = mask_size # generate a random pRF estimate x = -2.24 y = 1.58 sigma = 1.23 weight = 0.90 beta = 1.0 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, weight, beta, baseline) # set search grid x_grid = utils.grid_slice(-8.0,7.0,5) y_grid = utils.grid_slice(-8.0,7.0,5) s_grid = utils.grid_slice(0.75,3.0,5) w_grid = utils.grid_slice(0.05,0.95,5) # set search bounds x_bound = (-10,10) y_bound = (-10,10) s_bound = (1/stimulus.ppd,10) w_bound = (1e-8,1.0) b_bound = (1e-8,1e5) u_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid, w_grid,) bounds = (x_bound, y_bound, s_bound, w_bound, b_bound, u_bound) # fit the response fit = strf.SpatioTemporalFit(model, data, grids, bounds) # coarse fit ballpark = [-0.5, 3.25, 2.4375, 0.94999999999999996, 1.0292, -0.24999999999999992] npt.assert_almost_equal((fit.x0,fit.y0,fit.sigma0,fit.weight0,fit.beta0,fit.baseline0),ballpark,4) # fine fit npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.weight, weight, 2) npt.assert_almost_equal(fit.beta, beta, 2) npt.assert_almost_equal(fit.baseline, baseline, 2) # overloaded npt.assert_almost_equal(fit.overloaded_estimate, [ 2.5272803, 2.7411676, 1.23 , 0.9 , 1. , -0.25 ], 2) m_rf = fit.model.m_rf(fit.model.tau) p_rf = fit.model.p_rf(fit.model.tau) npt.assert_almost_equal(simps(np.abs(m_rf)),simps(p_rf),2) # responses m_resp = fit.model.generate_m_resp(fit.model.tau) p_resp = fit.model.generate_p_resp(fit.model.tau) npt.assert_(np.max(m_resp,0)[0]<np.max(m_resp,0)[1]) npt.assert_(np.max(p_resp,0)[0]>np.max(p_resp,0)[1]) # amps npt.assert_(fit.model.m_amp[0]<fit.model.m_amp[1]) npt.assert_(fit.model.p_amp[0]>fit.model.p_amp[1]) # receptive field rf = generate_og_receptive_field(x, y, sigma, fit.model.stimulus.deg_x, fit.model.stimulus.deg_y) rf /= (2 * np.pi * sigma**2) * 1/np.diff(model.stimulus.deg_x[0,0:2])**2 npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF npt.assert_almost_equal(np.round(fit.model.generate_receptive_field(x,y,sigma).sum()), np.round(fit.receptive_field.sum()))
def test_auditory_hrf_fit(): # stimulus features duration = 30 # seconds Fs = int(44100/2) # Hz lo_freq = 200.0 # Hz hi_freq = 10000.0 # Hz tr_length = 1.0 # seconds clip_number = 0 # TRs dtype = ctypes.c_double # fit settings auto_fit = True verbose = 1 debug = False Ns = 10 # generate auditory stimulus time = np.linspace(0,duration,duration*Fs) ch = chirp(time, lo_freq, duration, hi_freq, method='logarithmic') signal = np.tile(np.concatenate((ch,ch[::-1])),5) blank = np.zeros((30*Fs)) signal = np.concatenate((blank,signal,blank),-1) # instantiate an instance of the Stimulus class stimulus = AuditoryStimulus(signal, Fs, tr_length, dtype) ### stimulus # initialize the gaussian model model = aud.AuditoryModel(stimulus, utils.spm_hrf) ### model model.hrf_delay = 0 # invent pRF estimate center_freq_hz = 987 sigma_hz = 123 center_freq = np.log10(center_freq_hz) sigma = np.log10(sigma_hz) hrf_delay = 1.25 beta = 2.4 baseline = 0.59 # generate data data = model.generate_prediction(center_freq, sigma, hrf_delay, beta, baseline) # search grids c_grid = utils.grid_slice(np.log10(300), np.log10(1000), Ns) s_grid = utils.grid_slice(np.log10(100), np.log10(500), Ns) h_grid = utils.grid_slice(-1,1,Ns) grids = (c_grid, s_grid, h_grid,) # search bounds c_bound = (np.log10(lo_freq), np.log10(hi_freq)) s_bound = (np.log10(50), np.log10(hi_freq)) h_bound = (-2,2) b_bound = (1e-8, None) m_bound = (None,None) bounds = (c_bound, s_bound, h_bound, b_bound, m_bound) # fit it fit = aud.AuditoryFit(model, data, grids, bounds, Ns=Ns) # grid fit npt.assert_almost_equal(fit.center_freq0, 3) npt.assert_almost_equal(fit.sigma0, 2) npt.assert_almost_equal(fit.hrf0, 1.2222222222222223) npt.assert_almost_equal(fit.beta0, 2.3404365192849017) npt.assert_almost_equal(fit.baseline0, 1.416) # final fit npt.assert_almost_equal(fit.center_freq, center_freq) npt.assert_almost_equal(fit.sigma, sigma) npt.assert_almost_equal(fit.beta, beta) npt.assert_almost_equal(fit.baseline, baseline) npt.assert_almost_equal(fit.center_freq_hz, center_freq_hz) # test receptive field rf = np.exp(-((10**fit.model.stimulus.freqs-10**fit.center_freq)**2)/(2*(10**fit.sigma)**2)) rf /= (10**fit.sigma*np.sqrt(2*np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF rf = np.exp(-((fit.model.stimulus.freqs-fit.center_freq)**2)/(2*fit.sigma**2)) rf /= (fit.sigma*np.sqrt(2*np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field_log10.sum()))
def test_dog(): # stimulus features viewing_distance = 31 screen_width = 41 thetas = np.arange(0,360,90) # thetas = np.insert(thetas,0,-1) # thetas = np.append(thetas,-1) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 auto_fit = True verbose = 0 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = dog.DifferenceOfGaussiansModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 model.mask_size = 20 # set the pRF params x = 2.2 y = 2.5 sigma = 0.90 sigma_ratio = 1.5 volume_ratio = 0.5 beta = 0.25 baseline = -0.10 # create "data" data = model.generate_prediction(x, y, sigma, sigma_ratio, volume_ratio, beta, baseline) # set up the grids x_grid = utils.grid_slice(-5,5,4) y_grid = utils.grid_slice(-5,5,4) s_grid = utils.grid_slice(1/stimulus.ppd0*1.10,3.5,4) sr_grid = utils.grid_slice(1.0,2.0,4) vr_grid = utils.grid_slice(0.10,0.90,4) grids = (x_grid, y_grid, s_grid, sr_grid, vr_grid,) # set up the bounds x_bound = (-ecc,ecc) y_bound = (-ecc,ecc) s_bound = (1/stimulus.ppd,5) sr_bound = (1.0,None) vr_bound = (1e-8,1.0) bounds = (x_bound, y_bound, s_bound, sr_bound, vr_bound,) # fit it fit = dog.DifferenceOfGaussiansFit(model, data, grids, bounds) # coarse fit ballpark = [1.666666666666667, 1.666666666666667, 2.8243187483428391, 1.9999999999999998, 0.10000000000000001, 0.3639449, -0.025000000000000022] npt.assert_almost_equal((fit.x0,fit.y0,fit.s0,fit.sr0,fit.vr0, fit.beta0, fit.baseline0), ballpark) # fine fit npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.sigma_ratio, sigma_ratio, 1) npt.assert_almost_equal(fit.volume_ratio, volume_ratio, 1) # test the RF rf = fit.model.receptive_field(*fit.estimate[0:-2]) est = fit.estimate[0:-2].copy() rf_new = fit.model.receptive_field(*est) value_1 = np.sqrt(simps(simps(rf))) value_2 = np.sqrt(simps(simps(rf_new))) nt.assert_almost_equal(value_1, value_2) # polar coordinates npt.assert_almost_equal([fit.theta,fit.rho],[np.arctan2(y,x),np.sqrt(x**2+y**2)], 5)
def test_bounded_amplitude_failure(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0, 360, 90) thetas = np.insert(thetas, 0, -1) thetas = np.append(thetas, -1) num_blank_steps = 30 num_bar_steps = 30 ecc = 12 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 1.0 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf, utils.percent_change) model.hrf_delay = 0 model.mask_size = 6 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = -0.25 baseline = 0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.25, 5.25, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (0.001, 12.0) b_bound = (1e-8, None) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # fit the response fit = og.GaussianFit(model, data, grids, bounds) nt.assert_true(fit.model.bounded_amplitude) nt.assert_true(fit.slope > 0) nt.assert_true(fit.beta0 > 0) nt.assert_true(fit.beta > 0) nt.assert_true(fit.beta != beta)
def test_negative_og_fit(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,90) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 30 num_bar_steps = 30 ecc = 12 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 1.0 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf, utils.percent_change) model.hrf_delay = 0 model.mask_size = 6 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = -0.25 baseline = 0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10,10,5) y_grid = utils.grid_slice(-10,10,5) s_grid = utils.grid_slice (0.25,5.25,5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (0.001,12.0) b_bound = (None,None) m_bound = (None,None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # fit the response fit = og.GaussianFit(model, data, grids, bounds) # coarse fit ballpark = [-5.0, 5.0, 2.75, -0.27940915461573274, -0.062499999999999993] npt.assert_almost_equal((fit.x0, fit.y0, fit.s0, fit.beta0, fit.baseline0), ballpark) # assert equivalence npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.beta, beta, 2) nt.assert_false(fit.model.bounded_amplitude) nt.assert_true(fit.slope<0) nt.assert_true(fit.beta0<0) nt.assert_true(fit.beta<0) # test receptive field rf = generate_og_receptive_field(x, y, sigma, fit.model.stimulus.deg_x, fit.model.stimulus.deg_y) rf /= (2 * np.pi * sigma**2) * 1/np.diff(model.stimulus.deg_x[0,0:2])**2 npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF npt.assert_almost_equal(np.round(fit.model.generate_receptive_field(x,y,sigma).sum()), np.round(fit.receptive_field.sum()))
def test_bounded_amplitude_failure(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,90) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 30 num_bar_steps = 30 ecc = 12 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 1.0 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf, utils.percent_change) model.hrf_delay = 0 model.mask_size = 6 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = -0.25 baseline = 0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10,10,5) y_grid = utils.grid_slice(-10,10,5) s_grid = utils.grid_slice (0.25,5.25,5) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (0.001,12.0) b_bound = (1e-8,None) m_bound = (None,None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid,) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # fit the response fit = og.GaussianFit(model, data, grids, bounds) nt.assert_true(fit.model.bounded_amplitude) nt.assert_true(fit.slope>0) nt.assert_true(fit.beta0>0) nt.assert_true(fit.beta>0) nt.assert_true(fit.beta != beta) # def test_og_nuisance_fit(): # # # stimulus features # viewing_distance = 38 # screen_width = 25 # thetas = np.arange(0,360,90) # thetas = np.insert(thetas,0,-1) # thetas = np.append(thetas,-1) # num_blank_steps = 30 # num_bar_steps = 30 # ecc = 12 # tr_length = 1.0 # frames_per_tr = 1.0 # scale_factor = 1.0 # pixels_across = 200 # pixels_down = 200 # dtype = ctypes.c_int16 # # # create the sweeping bar stimulus in memory # bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, # screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # # # create an instance of the Stimulus class # stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # # # initialize the gaussian model # model = og.GaussianModel(stimulus, utils.double_gamma_hrf) # model.hrf_delay = 0 # # # generate a random pRF estimate # x = -5.24 # y = 2.58 # sigma = 0.98 # beta = 2.5 # baseline = 0.0 # # # create the "data" # data = model.generate_prediction(x, y, sigma, beta, baseline) # # # create nuisance signal # step = np.zeros(len(data)) # step[30:-30] = 1 # # # add to data # data += step # # # create design matrix # nuisance = sm.add_constant(step) # # # recreate model with nuisance # model = og.GaussianModel(stimulus, utils.double_gamma_hrf, nuisance) # model.hrf_delay = 0 # # # set search grid # x_grid = (-7,7) # y_grid = (-7,7) # s_grid = (0.25,3.25) # # # set search bounds # x_bound = (-10.0,10.0) # y_bound = (-10.0,10.0) # s_bound = (0.001,10.0) # b_bound = (1e-8,None) # m_bound = (None, None) # # # loop over each voxel and set up a GaussianFit object # grids = (x_grid, y_grid, s_grid,) # bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # # # fit the response # fit = og.GaussianFit(model, data, grids, bounds, Ns=3) # # # assert equivalence # nt.assert_almost_equal(fit.x, x, 1) # nt.assert_almost_equal(fit.y, y, 1) # nt.assert_almost_equal(fit.sigma, sigma, 1) # nt.assert_almost_equal(fit.beta, beta, 1)
def test_strf_css_fit(): viewing_distance = 38 screen_width = 25 thetas = np.tile(np.arange(0,360,90),2) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 Ns = 3 voxel_index = (1,2,3) auto_fit = True verbose = 1 projector_hz = 480 tau = 0.00875 mask_size = 5 hrf = 0.25 # create the sweeping bar stimulus in memory stim1 = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.33) # create the sweeping bar stimulus in memory stim2 = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.0001) stim = np.concatenate((stim1,stim2),-1) # create an instance of the Stimulus class stimulus = VisualStimulus(stim, viewing_distance, screen_width, scale_factor, tr_length, dtype) stimulus.fps = projector_hz flicker_vec = np.zeros_like(stim1[0,0,:]).astype('uint8') flicker_vec[1*20:5*20] = 1 flicker_vec[5*20:9*20] = 2 flicker_vec = np.tile(flicker_vec,2) stimulus.flicker_vec = flicker_vec stimulus.flicker_hz = [10,20,10,20] # initialize the gaussian model model = strf.SpatioTemporalModel(stimulus, utils.spm_hrf) model.tau = tau model.hrf_delay = hrf model.mask_size = mask_size # generate a random pRF estimate x = -2.24 y = 1.58 sigma = 1.23 n = 0.90 weight = 0.95 beta = 0.88 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, n, weight, beta, baseline) # set search grid x_grid = utils.grid_slice(-8.0,7.0,4) y_grid = utils.grid_slice(-8.0,7.0,4) s_grid = utils.grid_slice(0.75,3.0,4) n_grid = utils.grid_slice(0.25,0.95,4) w_grid = utils.grid_slice(0.25,0.95,4) # set search bounds x_bound = (-10,10) y_bound = (-10,10) s_bound = (1/stimulus.ppd,10) n_bound = (1e-8,1.0-1e-8) w_bound = (1e-8,1.0-1e-8) b_bound = (1e-8,1e5) u_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid, n_grid, w_grid,) bounds = (x_bound, y_bound, s_bound, n_bound, w_bound, b_bound, u_bound) # fit the response fit = strf.SpatioTemporalFit(model, data, grids, bounds) # coarse fit ballpark = [-3.0, 2.0, 1.5, 0.95, 0.95, 0.88574075, -0.25] npt.assert_almost_equal((fit.x0,fit.y0,fit.sigma0, fit.n0, fit.weight0,fit.beta0,fit.baseline0),ballpark) # fine fit npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 1) npt.assert_almost_equal(fit.n, n, 2) npt.assert_almost_equal(fit.weight, weight, 2) npt.assert_almost_equal(fit.beta, beta, 2) npt.assert_almost_equal(fit.baseline, baseline, 2) # overloaded npt.assert_almost_equal(fit.overloaded_estimate,[2.5266437, 2.7390143, 1.3014282, 0.9004958, 0.9499708, 0.8801774], 2) # rfs m_rf = fit.model.m_rf(fit.model.tau) p_rf = fit.model.p_rf(fit.model.tau) npt.assert_almost_equal(simps(np.abs(m_rf)),simps(p_rf),5) # responses m_resp = fit.model.generate_m_resp(fit.model.tau) p_resp = fit.model.generate_p_resp(fit.model.tau) npt.assert_(np.max(m_resp,0)[0]<np.max(m_resp,0)[1]) npt.assert_(np.max(p_resp,0)[0]>np.max(p_resp,0)[1]) # amps npt.assert_(fit.model.m_amp[0]<fit.model.m_amp[1]) npt.assert_(fit.model.p_amp[0]>fit.model.p_amp[1]) # receptive field npt.assert_almost_equal(4.0, fit.receptive_field.sum())
def test_strf_hrf_fit(): viewing_distance = 38 screen_width = 25 thetas = np.tile(np.arange(0, 360, 90), 2) thetas = np.insert(thetas, 0, -1) thetas = np.append(thetas, -1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 200 pixels_across = 200 dtype = ctypes.c_int16 Ns = 3 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 projector_hz = 480 tau = 0.00875 mask_size = 5 # create the sweeping bar stimulus in memory stim = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(stim, viewing_distance, screen_width, scale_factor, tr_length, dtype) stimulus.fps = projector_hz flicker_vec = np.zeros_like(stim[0, 0, :]).astype('uint8') flicker_vec[1 * 20:5 * 20] = 1 flicker_vec[5 * 20:9 * 20] = 2 stimulus.flicker_vec = flicker_vec stimulus.flicker_hz = [10, 20] # initialize the gaussian model model = strf.SpatioTemporalModel(stimulus, utils.double_gamma_hrf) model.tau = tau model.mask_size = mask_size # generate a random pRF estimate x = -2.24 y = 1.58 sigma = 1.23 weight = 0.90 hrf_delay = -0.13 beta = 1.0 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, weight, hrf_delay, beta, baseline) # set search grid x_grid = utils.grid_slice(-8.0, 7.0, 3) y_grid = utils.grid_slice(-8.0, 7.0, 3) s_grid = utils.grid_slice(0.75, 3.0, 3) w_grid = utils.grid_slice(0.05, 0.95, 3) h_grid = utils.grid_slice(-0.25, 0.25, 3) # set search bounds x_bound = (-10, 10) y_bound = (-10, 10) s_bound = (1 / stimulus.ppd, 10) w_bound = (1e-8, 1.0) b_bound = (1e-8, 1e5) u_bound = (None, None) h_bound = (-2.0, 2.0) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, w_grid, h_grid, ) bounds = (x_bound, y_bound, s_bound, w_bound, h_bound, b_bound, u_bound) # fit the response fit = strf.SpatioTemporalFit(model, data, grids, bounds) # coarse fit npt.assert_almost_equal((fit.x0, fit.y0, fit.sigma0, fit.weight0, fit.hrf0, fit.beta0, fit.baseline0), [-0.5, -0.5, 3., 0.95, -0.25, 1., 0.02], 2) # fine fit npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.weight, weight, 2) npt.assert_almost_equal(fit.beta, beta, 2) npt.assert_almost_equal(fit.baseline, baseline, 2) # overloaded npt.assert_almost_equal(fit.overloaded_estimate, [2.53, 2.74, 1.23, 0.9, 5.87, 1., -0.25], 2) m_rf = fit.model.m_rf(fit.model.tau) p_rf = fit.model.p_rf(fit.model.tau) npt.assert_almost_equal(simps(np.abs(m_rf)), simps(p_rf), 5) # responses m_resp = fit.model.generate_m_resp(fit.model.tau) p_resp = fit.model.generate_p_resp(fit.model.tau) npt.assert_(np.max(m_resp, 0)[0] < np.max(m_resp, 0)[1]) npt.assert_(np.max(p_resp, 0)[0] > np.max(p_resp, 0)[1]) # amps npt.assert_(fit.model.m_amp[0] < fit.model.m_amp[1]) npt.assert_(fit.model.p_amp[0] > fit.model.p_amp[1]) # receptive field npt.assert_almost_equal(4.0, fit.receptive_field.sum())
def test_strf_css_fit(): viewing_distance = 38 screen_width = 25 thetas = np.tile(np.arange(0,360,90),2) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 Ns = 3 voxel_index = (1,2,3) auto_fit = True verbose = 1 projector_hz = 480 tau = 0.00875 mask_size = 5 hrf = 0.25 # create the sweeping bar stimulus in memory stim1 = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.33) # create the sweeping bar stimulus in memory stim2 = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.0001) stim = np.concatenate((stim1,stim2),-1) # create an instance of the Stimulus class stimulus = VisualStimulus(stim, viewing_distance, screen_width, scale_factor, tr_length, dtype) stimulus.fps = projector_hz flicker_vec = np.zeros_like(stim1[0,0,:]).astype('uint8') flicker_vec[1*20:5*20] = 1 flicker_vec[5*20:9*20] = 2 flicker_vec = np.tile(flicker_vec,2) stimulus.flicker_vec = flicker_vec stimulus.flicker_hz = [10,20,10,20] # initialize the gaussian model model = strf.SpatioTemporalModel(stimulus, utils.double_gamma_hrf) model.tau = tau model.hrf_delay = hrf model.mask_size = mask_size # generate a random pRF estimate x = -2.24 y = 1.58 sigma = 1.23 n = 0.90 weight = 0.95 beta = 1.0 baseline = 0 # create the "data" data = model.generate_prediction(x, y, sigma, n, weight, beta, baseline) # set search grid x_grid = utils.grid_slice(-8.0,7.0,3) y_grid = utils.grid_slice(-8.0,7.0,3) s_grid = utils.grid_slice(0.75,3.0,3) n_grid = utils.grid_slice(0.25,0.95,3) w_grid = utils.grid_slice(0.25,0.95,3) # set search bounds x_bound = (-10,10) y_bound = (-10,10) s_bound = (1/stimulus.ppd,10) n_bound = (1e-8,1.0-1e-8) w_bound = (1e-8,1.0-1e-8) b_bound = (1e-8,1e5) u_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = (x_grid, y_grid, s_grid, n_grid, w_grid,) bounds = (x_bound, y_bound, s_bound, n_bound, w_bound, b_bound, u_bound) # fit the response fit = strf.SpatioTemporalFit(model, data, grids, bounds) # coarse fit npt.assert_almost_equal((fit.x0,fit.y0,fit.sigma0, fit.n0, fit.weight0,fit.beta0,fit.baseline0),[-0.5 , -0.5 , 1.875, 0.95 , 0.95 , 1. , 0. ]) # fine fit npt.assert_almost_equal(fit.x, x, 1) npt.assert_almost_equal(fit.y, y, 1) npt.assert_almost_equal(fit.sigma, sigma, 1) npt.assert_almost_equal(fit.n, n, 1) npt.assert_almost_equal(fit.weight, weight, 1) npt.assert_almost_equal(fit.beta, beta, 1) npt.assert_almost_equal(fit.baseline, baseline, 1) # overloaded npt.assert_almost_equal(fit.overloaded_estimate, [2.5259863707822303, 2.7330681871539069, 1.3062396482386418, 0.9011492100931614, 0.94990930073215352, 1.0005707740082497],4) # rfs m_rf = fit.model.m_rf(fit.model.tau) p_rf = fit.model.p_rf(fit.model.tau) npt.assert_almost_equal(simps(np.abs(m_rf)),simps(p_rf),5) # responses m_resp = fit.model.generate_m_resp(fit.model.tau) p_resp = fit.model.generate_p_resp(fit.model.tau) npt.assert_(np.max(m_resp,0)[0]<np.max(m_resp,0)[1]) npt.assert_(np.max(p_resp,0)[0]>np.max(p_resp,0)[1]) # amps npt.assert_(fit.model.m_amp[0]<fit.model.m_amp[1]) npt.assert_(fit.model.p_amp[0]>fit.model.p_amp[1]) # receptive field npt.assert_almost_equal(4.0, fit.receptive_field.sum())
def test_dog(): # stimulus features viewing_distance = 31 screen_width = 41 thetas = np.arange(0, 360, 90) # thetas = np.insert(thetas,0,-1) # thetas = np.append(thetas,-1) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.50 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 auto_fit = True verbose = 0 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = dog.DifferenceOfGaussiansModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 model.mask_size = 20 # set the pRF params x = 2.2 y = 2.5 sigma = 0.90 sigma_ratio = 1.5 volume_ratio = 0.5 beta = 0.25 baseline = -0.10 # create "data" data = model.generate_prediction(x, y, sigma, sigma_ratio, volume_ratio, beta, baseline) # set up the grids x_grid = utils.grid_slice(-5, 5, 4) y_grid = utils.grid_slice(-5, 5, 4) s_grid = utils.grid_slice(1 / stimulus.ppd0 * 1.10, 3.5, 4) sr_grid = utils.grid_slice(1.0, 2.0, 4) vr_grid = utils.grid_slice(0.10, 0.90, 4) grids = ( x_grid, y_grid, s_grid, sr_grid, vr_grid, ) # set up the bounds x_bound = (-ecc, ecc) y_bound = (-ecc, ecc) s_bound = (1 / stimulus.ppd, 5) sr_bound = (1.0, None) vr_bound = (1e-8, 1.0) bounds = ( x_bound, y_bound, s_bound, sr_bound, vr_bound, ) # fit it fit = dog.DifferenceOfGaussiansFit(model, data, grids, bounds) # coarse fit ballpark = [ 1.666666666666667, 1.666666666666667, 2.8243187483428391, 1.9999999999999998, 0.10000000000000001 ] npt.assert_almost_equal((fit.x0, fit.y0, fit.s0, fit.sr0, fit.vr0), ballpark) # the baseline/beta should be 0/1 when regressed data vs. estimate (m, b) = np.polyfit(fit.scaled_ballpark_prediction, data, 1) npt.assert_almost_equal(m, 1.0) npt.assert_almost_equal(b, 0.0) # fine fit npt.assert_almost_equal(fit.x, x, 2) npt.assert_almost_equal(fit.y, y, 2) npt.assert_almost_equal(fit.sigma, sigma, 2) npt.assert_almost_equal(fit.sigma_ratio, sigma_ratio, 1) npt.assert_almost_equal(fit.volume_ratio, volume_ratio, 1) # test the RF rf = fit.model.receptive_field(*fit.estimate[0:-2]) est = fit.estimate[0:-2].copy() rf_new = fit.model.receptive_field(*est) value_1 = np.sqrt(simps(simps(rf))) value_2 = np.sqrt(simps(simps(rf_new))) nt.assert_almost_equal(value_1, value_2) # polar coordinates npt.assert_almost_equal( [fit.theta, fit.rho], [np.arctan2(y, x), np.sqrt(x**2 + y**2)], 4)
def test_bootstrap(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.array([-1, 0, 90, 180, 270, -1]) num_blank_steps = 30 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 # rng np.random.seed(2764932) # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.5, 3.25, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (0.001, 12.0) b_bound = (1e-8, None) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # pack multiple "runs" data = np.vstack((data, data, data)) # make it a singular "voxel" data = np.reshape(data, (1, data.shape[0], data.shape[1])) # set bootstraps and resamples bootstraps = 2 resamples = np.array((2, )) # make fodder bundle = utils.bootstrap_bundle(bootstraps, resamples, og.GaussianFit, model, data, grids, bounds, np.tile((1, 2, 3), (bootstraps, 1))) # test for b in bundle: fit = utils.parallel_bootstrap(b) npt.assert_almost_equal(fit.rss, 0) npt.assert_equal(fit.n_resamples, resamples[0]) npt.assert_equal(np.sum(fit.resamples), np.sum(np.arange(resamples[0])))
def test_auditory_fit(): # stimulus features duration = 30 # seconds Fs = 44100 # Hz lo_freq = 200.0 # Hz hi_freq = 10000.0 # Hz tr_length = 1.0 # seconds clip_number = 0 # TRs dtype = ctypes.c_double # fit settings auto_fit = True verbose = 1 debug = False Ns = 20 # generate auditory stimulus time = np.linspace(0, duration, duration * Fs) ch = chirp(time, lo_freq, duration, hi_freq, method='logarithmic') signal = np.tile(np.concatenate((ch, ch[::-1])), 5) blank = np.zeros((30 * Fs)) signal = np.concatenate((blank, signal, blank), -1) # instantiate an instance of the Stimulus class stimulus = AuditoryStimulus(signal, Fs, tr_length, dtype) ### stimulus # initialize the gaussian model model = aud.AuditoryModel(stimulus, utils.double_gamma_hrf) ### model model.hrf_delay = 0 # invent pRF estimate center_freq = np.log10(987) sigma = np.log10(123) beta = 1.0 baseline = 0.0 # generate data data = model.generate_prediction(center_freq, sigma, beta, baseline) # search grids c_grid = utils.grid_slice(np.log10(300), np.log10(1000), Ns) s_grid = utils.grid_slice(np.log10(100), np.log10(500), Ns) grids = ( c_grid, s_grid, ) # search bounds c_bound = (np.log10(lo_freq), np.log10(hi_freq)) s_bound = (np.log10(50), np.log10(hi_freq)) b_bound = (1e-8, None) m_bound = (None, None) bounds = (c_bound, s_bound, b_bound, m_bound) # fit it fit = aud.AuditoryFit(model, data, grids, bounds, Ns=Ns) # assert equivalence npt.assert_almost_equal(fit.center_freq, center_freq) npt.assert_almost_equal(fit.sigma, sigma) npt.assert_almost_equal(fit.beta, beta) npt.assert_almost_equal(fit.baseline, baseline) npt.assert_almost_equal(fit.center_freq0, 3) npt.assert_almost_equal(fit.sigma0, 2.14715158) npt.assert_almost_equal(fit.beta0, beta) npt.assert_almost_equal(fit.baseline0, baseline) npt.assert_almost_equal(fit.center_freq_hz, 987) # test receptive field rf = np.exp(-((10**fit.model.stimulus.freqs - 10**fit.center_freq)**2) / (2 * (10**fit.sigma)**2)) rf /= (10**fit.sigma * np.sqrt(2 * np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF rf = np.exp(-((fit.model.stimulus.freqs - fit.center_freq)**2) / (2 * fit.sigma**2)) rf /= (fit.sigma * np.sqrt(2 * np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field_log10.sum()))
def test_recast_estimation_results(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0, 360, 45) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1.0 frames_per_tr = 1.0 scale_factor = 0.10 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.spm_hrf) model.hrf_delay = 0 # generate a random pRF estimate x = -5.24 y = 2.58 sigma = 1.24 beta = 2.5 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, beta, baseline) # set search grid x_grid = utils.grid_slice(-5, 4, 5) y_grid = utils.grid_slice(-5, 7, 5) s_grid = utils.grid_slice(1 / stimulus.ppd, 5.25, 5) b_grid = utils.grid_slice(0.1, 4.0, 5) # set search bounds x_bound = (-12.0, 12.0) y_bound = (-12.0, 12.0) s_bound = (1 / stimulus.ppd, 12.0) b_bound = (1e-8, 1e2) m_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, ) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # create 3 voxels of data all_data = np.array([data, data, data]) indices = [(0, 0, 0), (0, 0, 1), (0, 0, 2)] # bundle the voxels bundle = utils.multiprocess_bundle(og.GaussianFit, model, all_data, grids, bounds, indices) # run analysis with sharedmem.Pool(np=3) as pool: output = pool.map(utils.parallel_fit, bundle) # create grid parent arr = np.zeros((1, 1, 3)) grid_parent = nibabel.Nifti1Image(arr, np.eye(4, 4)) # recast the estimation results nif = utils.recast_estimation_results(output, grid_parent) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[..., 0]), x) npt.assert_almost_equal(np.mean(dat[..., 1]), y) npt.assert_almost_equal(np.mean(dat[..., 2]), sigma) npt.assert_almost_equal(np.mean(dat[..., 3]), beta) npt.assert_almost_equal(np.mean(dat[..., 4]), baseline) # recast the estimation results - OVERLOADED nif = utils.recast_estimation_results(output, grid_parent, True) dat = nif.get_data() # assert equivalence npt.assert_almost_equal(np.mean(dat[..., 0]), np.arctan2(y, x), 2) npt.assert_almost_equal(np.mean(dat[..., 1]), np.sqrt(x**2 + y**2), 2) npt.assert_almost_equal(np.mean(dat[..., 2]), sigma) npt.assert_almost_equal(np.mean(dat[..., 3]), beta) npt.assert_almost_equal(np.mean(dat[..., 4]), baseline)
def test_resurrect_model(): # stimulus features viewing_distance = 38 screen_width = 25 thetas = np.arange(0,360,90) thetas = np.insert(thetas,0,-1) thetas = np.append(thetas,-1) num_blank_steps = 20 num_bar_steps = 20 ecc = 10 tr_length = 1.5 frames_per_tr = 1.0 scale_factor = 1.0 pixels_across = 100 pixels_down = 100 dtype = ctypes.c_int16 # create the sweeping bar stimulus in memory bar = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc, clip=0.01) # create an instance of the Stimulus class stimulus = VisualStimulus(bar, viewing_distance, screen_width, scale_factor, tr_length, dtype) # set cache grids x_grid = utils.grid_slice(-10, 10, 5) y_grid = utils.grid_slice(-10, 10, 5) s_grid = utils.grid_slice(0.55,5.25, 5) grids = (x_grid, y_grid, s_grid,) # set search bounds x_bound = (-12.0,12.0) y_bound = (-12.0,12.0) s_bound = (0.001,12.0) b_bound = (1e-8,None) m_bound = (None,None) bounds = (x_bound, y_bound, s_bound, b_bound, m_bound) # initialize the gaussian model model = og.GaussianModel(stimulus, utils.double_gamma_hrf) model.hrf_delay = 0 model.mask_size = 5 cache = model.cache_model(grids, ncpus=3) # seed rng np.random.seed(4932) # pluck an estimate and create timeseries x, y, sigma = cache[51][1] beta = 1.25 baseline = 0.25 # create "data" data = cache[51][0] # fit it fit = og.GaussianFit(model, data, grids, bounds, verbose=0) # assert npt.assert_equal(fit.estimate,fit.ballpark) # create "data" data = model.generate_prediction(x,y,sigma,beta,baseline) # fit it fit = og.GaussianFit(model, data, grids, bounds, verbose=0) # assert npt.assert_almost_equal(np.sum(fit.scaled_ballpark_prediction-fit.data)**2,0)
def test_strf_2dcos_fit(): viewing_distance = 38 screen_width = 25 thetas = np.tile(np.arange(0, 360, 90), 2) thetas = np.insert(thetas, 0, -1) thetas = np.append(thetas, -1) num_blank_steps = 0 num_bar_steps = 30 ecc = 10 tr_length = 1 frames_per_tr = 1 scale_factor = 1.0 pixels_down = 100 pixels_across = 100 dtype = ctypes.c_int16 Ns = 5 voxel_index = (1, 2, 3) auto_fit = True verbose = 1 projector_hz = 480 tau = 0.00875 mask_size = 5 hrf = 0.25 # create the sweeping bar stimulus in memory stim = simulate_bar_stimulus(pixels_across, pixels_down, viewing_distance, screen_width, thetas, num_bar_steps, num_blank_steps, ecc) # create an instance of the Stimulus class stimulus = VisualStimulus(stim, viewing_distance, screen_width, scale_factor, tr_length, dtype) stimulus.fps = projector_hz flicker_vec = np.zeros_like(stim[0, 0, :]).astype('uint8') flicker_vec[1 * 20:5 * 20] = 1 flicker_vec[5 * 20:9 * 20] = 2 stimulus.flicker_vec = flicker_vec stimulus.flicker_hz = [10, 20] # initialize the gaussian model model = strf.SpatioTemporalModel(stimulus, utils.spm_hrf) model.tau = tau model.hrf_delay = hrf model.mask_size = mask_size model.power = 0.7 # generate a random pRF estimate x = -2.24 y = 1.58 sigma = 1.23 weight = 0.90 beta = 1.0 baseline = -0.25 # create the "data" data = model.generate_prediction(x, y, sigma, weight, beta, baseline) # set search grid x_grid = utils.grid_slice(-8.0, 7.0, 5) y_grid = utils.grid_slice(-8.0, 7.0, 5) s_grid = utils.grid_slice(0.75, 3.0, 5) w_grid = utils.grid_slice(0.05, 0.95, 5) # set search bounds x_bound = (-10, 10) y_bound = (-10, 10) s_bound = (1 / stimulus.ppd, 10) w_bound = (1e-8, 1.0) b_bound = (1e-8, 1e5) u_bound = (None, None) # loop over each voxel and set up a GaussianFit object grids = ( x_grid, y_grid, s_grid, w_grid, ) bounds = (x_bound, y_bound, s_bound, w_bound, b_bound, u_bound) # fit the response fit = strf.SpatioTemporalFit(model, data, grids, bounds) # coarse fit ballpark = [ -0.5, 3.25, 3.0, 0.72499999999999998, 0.858317, -0.25000000000000011 ] npt.assert_almost_equal( (fit.x0, fit.y0, fit.sigma0, fit.weight0, fit.beta0, fit.baseline0), ballpark) # fine fit npt.assert_almost_equal(fit.x, x) npt.assert_almost_equal(fit.y, y) npt.assert_almost_equal(fit.sigma, sigma) npt.assert_almost_equal(fit.weight, weight) npt.assert_almost_equal(fit.beta, beta) npt.assert_almost_equal(fit.baseline, baseline) # overloaded npt.assert_almost_equal(fit.overloaded_estimate, [ 2.5272803327887128, 2.7411676344185993, 1.2300000000008835, 0.89999999999333258, 1.0000000000005003, -0.25000000000063088 ]) m_rf = fit.model.m_rf(fit.model.tau) p_rf = fit.model.p_rf(fit.model.tau) npt.assert_almost_equal(simps(np.abs(m_rf)), simps(p_rf), 5) # responses m_resp = fit.model.generate_m_resp(fit.model.tau) p_resp = fit.model.generate_p_resp(fit.model.tau) npt.assert_(np.max(m_resp, 0)[0] < np.max(m_resp, 0)[1]) npt.assert_(np.max(p_resp, 0)[0] > np.max(p_resp, 0)[1]) # amps npt.assert_(fit.model.m_amp[0] < fit.model.m_amp[1]) npt.assert_(fit.model.p_amp[0] > fit.model.p_amp[1]) # receptive field rf = generate_2dcos_receptive_field(x, y, sigma, fit.model.power, fit.model.stimulus.deg_x, fit.model.stimulus.deg_y) rf /= (2 * np.pi * sigma**2) * 1 / np.diff(model.stimulus.deg_x[0, 0:2])**2 npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF npt.assert_almost_equal( np.round(fit.model.generate_receptive_field(x, y, sigma).sum()), np.round(fit.receptive_field.sum()))
def test_auditory_hrf_fit(): # stimulus features duration = 30 # seconds Fs = int(44100 / 2) # Hz lo_freq = 200.0 # Hz hi_freq = 10000.0 # Hz tr_length = 1.0 # seconds clip_number = 0 # TRs dtype = ctypes.c_double # fit settings auto_fit = True verbose = 1 debug = False Ns = 10 # generate auditory stimulus time = np.linspace(0, duration, duration * Fs) ch = chirp(time, lo_freq, duration, hi_freq, method='logarithmic') signal = np.tile(np.concatenate((ch, ch[::-1])), 5) blank = np.zeros((30 * Fs)) signal = np.concatenate((blank, signal, blank), -1) # instantiate an instance of the Stimulus class stimulus = AuditoryStimulus(signal, Fs, tr_length, dtype) ### stimulus # initialize the gaussian model model = aud.AuditoryModel(stimulus, utils.spm_hrf) ### model model.hrf_delay = 0 # invent pRF estimate center_freq_hz = 987 sigma_hz = 123 center_freq = np.log10(center_freq_hz) sigma = np.log10(sigma_hz) hrf_delay = 1.25 beta = 2.4 baseline = 0.59 # generate data data = model.generate_prediction(center_freq, sigma, hrf_delay, beta, baseline) # search grids c_grid = utils.grid_slice(np.log10(300), np.log10(1000), Ns) s_grid = utils.grid_slice(np.log10(100), np.log10(500), Ns) h_grid = utils.grid_slice(-1, 1, Ns) grids = ( c_grid, s_grid, h_grid, ) # search bounds c_bound = (np.log10(lo_freq), np.log10(hi_freq)) s_bound = (np.log10(50), np.log10(hi_freq)) h_bound = (-2, 2) b_bound = (1e-8, None) m_bound = (None, None) bounds = (c_bound, s_bound, h_bound, b_bound, m_bound) # fit it fit = aud.AuditoryFit(model, data, grids, bounds, Ns=Ns) # grid fit npt.assert_almost_equal(fit.center_freq0, 3) npt.assert_almost_equal(fit.hrf0, 1.2222222222222223) # test the sigma parameter against best possibility grid_sigmas = np.arange(s_grid.start, s_grid.stop, s_grid.step) best_sigma = grid_sigmas[np.argmin(np.abs(grid_sigmas - sigma))] npt.assert_array_less(np.abs(fit.sigma0 - sigma), s_grid.step) # the baseline/beta should be 0/1 when regressed data vs. estimate (m, b) = np.polyfit(fit.scaled_ballpark_prediction, data, 1) npt.assert_almost_equal(m, 1.0) npt.assert_almost_equal(b, 0.0) # final fit npt.assert_almost_equal(fit.center_freq, center_freq) npt.assert_almost_equal(fit.sigma, sigma) npt.assert_almost_equal(fit.beta, beta) npt.assert_almost_equal(fit.baseline, baseline) npt.assert_almost_equal(fit.center_freq_hz, center_freq_hz) # test receptive field rf = np.exp(-((10**fit.model.stimulus.freqs - 10**fit.center_freq)**2) / (2 * (10**fit.sigma)**2)) rf /= (10**fit.sigma * np.sqrt(2 * np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field.sum())) # test model == fit RF rf = np.exp(-((fit.model.stimulus.freqs - fit.center_freq)**2) / (2 * fit.sigma**2)) rf /= (fit.sigma * np.sqrt(2 * np.pi)) npt.assert_almost_equal(np.round(rf.sum()), np.round(fit.receptive_field_log10.sum()))