def run(hps=None): defaults = PaperDefaults() #David's globals size=51 csize=9 npoints=37 scale=1. _DEFAULT_BWC_CSV_CTS = sp.array([0.0, .06, .12, .25, .50]) * 100 csvfiles=sp.array([[os.path.join(defaults._DATADIR, 'BWC2009_%i_%i.csv' \ % (i, j)) for i in _DEFAULT_BWC_CSV_CTS] for j in _DEFAULT_BWC_CSV_CTS]).T # experiment parameters im = sp.array([ stim.get_center_surround( size=size, csize=csize, cval=.25, sval=sp.nan), stim.get_center_surround( size=size, csize=csize, cval=.75, sval=sp.nan)]) # populations for vertical (masking) and horizontal (driving) stimuli ##################################################################### xv = model_utils.get_population(im[0], kind='circular', npoints=npoints, scale=scale) xh = model_utils.get_population(im[1], kind='circular', npoints=npoints, scale=scale) # superimposed populations ########################## v_contrasts = [0.0, .06, .12, .25, .50] h_contrasts = [0.0, .06, .12, .25, .50] nv, nh = len(v_contrasts), len(h_contrasts) x = sp.array([[h*xh + v*xv for h in h_contrasts] for v in v_contrasts]) x.shape = (nv * nh,) + x.shape[2:] # busse and wade data t_paper = sp.zeros((nv, nh, 13)) y_paper = sp.zeros((nv, nh, 13)) for idx in range(nv): for jdx in range(nh): t_paper[idx, jdx], y_paper[idx, jdx] = \ sp.genfromtxt(csvfiles[idx, jdx], delimiter=',').T res_y_paper = sp.zeros((y_paper.shape[0],y_paper.shape[1],npoints)) for r in range(y_paper.shape[0]): for c in range(y_paper.shape[1]): res_y_paper[r,c,:] = sp.signal.resample(y_paper[r,c,:],npoints) gt = [t_paper,res_y_paper] extra_vars = {} extra_vars['scale'] = scale extra_vars['npoints'] = npoints extra_vars['size'] = size extra_vars['csize'] = csize extra_vars['nv'] = nv extra_vars['nh'] = nh extra_vars['figure_name'] = 'bw' extra_vars['return_var'] = 'O' optimize_model(x,gt,extra_vars,defaults)
def run(): defaults = PaperDefaults() #David's globals size = 51 csize = 9 npoints = 32 scale = 2.0 cval = 0.5 csvfiles = [[ defaults._DATADIR + \ '/TB2015_%i_%s.csv' % (i, s) \ for i in range(-90, 90, 30)] for s in ('PS', 'PO') ] # experiment parameters ppop = { 'kind': 'circular', 'npoints': npoints, 'scale': scale, 'fdomain': (0, 1), } vals_ang = sp.array([-90., -60., -30., 0., 30., 60.]) vals = (vals_ang + 90.) / 180. imc1 = stim.get_center_surround(size=size, csize=csize, cval=cval, sval=sp.nan) x1 = model_utils.get_population(imc1, **ppop) x = sp.zeros((2, len(vals), npoints, size, size)) for vdx, v in enumerate(vals): imc2 = stim.get_center_surround(size=size, csize=csize, cval=v, sval=sp.nan) ims = stim.get_center_surround(size=size, csize=csize, cval=sp.nan, sval=v) x2 = model_utils.get_population(imc2, **ppop) xs = model_utils.get_population(ims, **ppop) x[0, vdx] = (x1 + x2) / 2. x[1, vdx] = (x1 + x2) / 2. + xs x.shape = (2 * len(vals), npoints, size, size) # trott and born 2015 data gt = get_gt(npoints, csvfiles) extra_vars = {} extra_vars['scale'] = scale extra_vars['npoints'] = npoints extra_vars['cval'] = cval extra_vars['size'] = size extra_vars['csize'] = csize extra_vars['vals'] = vals extra_vars['figure_name'] = 'tbp' extra_vars['return_var'] = 'O' optimize_model(x, gt, extra_vars, defaults)
def run(): defaults = PaperDefaults() #David's globals _DEFAULT_TILTEFFECT_DEGPERPIX = .25 # <OToole77> _DEFAULT_TILTEFFECT_SIZE = 51 #101 _DEFAULT_TILTEFFECT_CSIZE = iround(2. / _DEFAULT_TILTEFFECT_DEGPERPIX) _DEFAULT_TILTEFFECT_SSIZE = iround(8. / _DEFAULT_TILTEFFECT_DEGPERPIX) _DEFAULT_TILTEFFECT_CVAL = .5 _DEFAULT_TILTEFFECT_SVALS = np.linspace(0.0, 0.5, 10) _DEFAULT_TILTEFFECT_SCALES = {'ow77': 0.40, 'ms79': 0.60} #0.45 _DEFAULT_TILTEFFECT_NPOINTS = 25 #100 _DEFAULT_TILTEFFECT_DECODER_TYPE = 'circular_vote' _DEFAULT_TILTEFFECT_CSV = { 'ow77': os.path.join(defaults._DATADIR, 'OW_fig4_Black.csv'), 'ms79': os.path.join(defaults._DATADIR, 'MS1979.csv'), } # experiment parameters cpt = (_DEFAULT_TILTEFFECT_SIZE // 2, _DEFAULT_TILTEFFECT_SIZE // 2) spt = (_DEFAULT_TILTEFFECT_SIZE // 2, _DEFAULT_TILTEFFECT_SIZE // 2 + _DEFAULT_TILTEFFECT_CSIZE) dt_in = _DEFAULT_TILTEFFECT_CVAL - _DEFAULT_TILTEFFECT_SVALS # simulate populations im = sp.array([[ stim.get_center_nfsurrounds(size=_DEFAULT_TILTEFFECT_SIZE, csize=_DEFAULT_TILTEFFECT_CSIZE, nsize=_DEFAULT_TILTEFFECT_CSIZE, fsize=_DEFAULT_TILTEFFECT_SSIZE, cval=_DEFAULT_TILTEFFECT_CVAL, nval=_DEFAULT_TILTEFFECT_CVAL, fval=sval, bgval=sp.nan) ] for sval in _DEFAULT_TILTEFFECT_SVALS]) # get shifts for model for both papers, and from digitized data sortidx = sp.argsort(dt_in) # re-order in increasing angular differences # O'Toole and Wenderoth (1977) _, ds_ow77_paper_y = sp.genfromtxt(_DEFAULT_TILTEFFECT_CSV['ow77'], delimiter=',').T extra_vars = {} extra_vars['scale'] = _DEFAULT_TILTEFFECT_SCALES['ow77'] extra_vars['decoder'] = _DEFAULT_TILTEFFECT_DECODER_TYPE extra_vars['npoints'] = _DEFAULT_TILTEFFECT_NPOINTS extra_vars['npoints'] = _DEFAULT_TILTEFFECT_NPOINTS extra_vars['cval'] = _DEFAULT_TILTEFFECT_CVAL extra_vars['sortidx'] = sortidx extra_vars['cpt'] = cpt extra_vars['spt'] = spt extra_vars['sval'] = sval extra_vars['kind'] = 'circular' extra_vars['figure_name'] = 'f3a' extra_vars['return_var'] = 'O' optimize_model(im, ds_ow77_paper_y, extra_vars, defaults)
def run(): defaults = PaperDefaults() #David's globals size = 51 csize = 5 npoints = 64 scale = 2.0 neuron_theta = 0.50 cval = 0.5 csvfiles = [ defaults._DATADIR + \ '/TB2015_Fig1B_%s.csv' % (s,) \ for s in range(-90, 90, 30) + ['CO']] # experiment parameters cvals = (sp.arange(-90, 90, 30) + 90.) / 180. svals = sp.linspace(0.0, 1.0, 6).tolist() + [sp.nan] neuron_thetas = sp.linspace(0.0, 1.0, npoints) neuron_idx = sp.argmin(sp.absolute(neuron_thetas - neuron_theta)) stims = [ stim.get_center_surround(size=size, csize=csize, cval=cv, sval=sv) for cv in cvals for sv in svals ] x = sp.array([ model_utils.get_population(im, npoints=npoints, kind='circular', scale=scale, fdomain=(0, 1)) for im in stims ]) # [Array shapes] # trott and born 2015 data gt = get_gt(csvfiles) extra_vars = {} extra_vars['scale'] = scale extra_vars['npoints'] = npoints extra_vars['cval'] = cval extra_vars['cvals'] = cvals extra_vars['svals'] = svals extra_vars['size'] = size extra_vars['csize'] = csize extra_vars['neuron_idx'] = neuron_idx extra_vars['figure_name'] = 'tbtcso' extra_vars['return_var'] = 'O' optimize_model(x, gt, extra_vars, defaults)
def run(): defaults = PaperDefaults() #David's globals size = 51 mpp = 0.76 # 0.76 # 1.11 scale = 0.23 # 0.23 # 0.22 csv_file_x = os.path.join(defaults._DATADIR, 'WL1987_corrected_X.csv') csv_file_y = os.path.join(defaults._DATADIR, 'WL1987_corrected_Y.csv') # experiment parameters dd = (-150., 150.) # in seconds of arc sec2u = lambda s: (s - dd[0]) / (dd[1] - dd[0]) u2sec = lambda u: u * (dd[1] - dd[0]) + dd[0] min2pix = lambda m: iround(m / float(mpp)) npoints = 50 ndists = 10 dists = sp.linspace(0.0, 12., ndists) lh, lw = 1, 4. ph, pw = 2., 2. center_disp = 0.0 flanker_disp = -33.3 mp0 = size // 2 # Need to scale up the ecrfs defaults._DEFAULT_PARAMETERS[ 'srf'] = defaults._DEFAULT_PARAMETERS['srf'] * 2 - 1 defaults._DEFAULT_PARAMETERS[ 'ssn'] = defaults._DEFAULT_PARAMETERS['ssn'] * 2 - 1 defaults._DEFAULT_PARAMETERS[ 'ssf'] = defaults._DEFAULT_PARAMETERS['ssf'] * 2 - 1 # simulate populations im = get_wl87_stim(size=size, dists=min2pix(dists), cval=sec2u(center_disp), sval=sec2u(flanker_disp), ch=min2pix(lh), cw=min2pix(lw), sh=min2pix(ph), sw=min2pix(pw)) # Get ground truth data paper_data_x = sp.genfromtxt(csv_file_x, delimiter=',') paper_data_y = sp.genfromtxt(csv_file_y, delimiter=',') * -1 paper_fit_y = sfit(sp.linspace(dists.min(), dists.max(), 100), paper_data_x, sp.nanmean(paper_data_y, axis=0), k=2, t=[5.]) paper_fit_y = paper_fit_y[np.round( np.linspace(0, paper_fit_y.shape[0] - 1, ndists)).astype(int)] extra_vars = {} extra_vars['scale'] = scale extra_vars['kind'] = 'gaussian' extra_vars['decoder'] = 'circular_vote' extra_vars['npoints'] = npoints extra_vars['cval'] = sec2u(center_disp) extra_vars['sval'] = sec2u(flanker_disp) extra_vars['figure_name'] = 'f5' extra_vars['u2sec'] = u2sec extra_vars['min2pix'] = min2pix extra_vars['dists'] = dists extra_vars['flanker_disp'] = flanker_disp extra_vars['mp0'] = mp0 extra_vars['lh'] = lh extra_vars['pw'] = pw extra_vars['size'] = size extra_vars['gt_x'] = paper_data_x extra_vars['return_var'] = 'O' optimize_model(im, paper_fit_y, extra_vars, defaults)
def run(): defaults = PaperDefaults() #David's globals _DEFAULT_KW97_TILTEFFECT_DEGPERPIX = .45 # <OToole77> _DEFAULT_TILTEFFECT_SIZE = 101 #101 _DEFAULT_KW97_TILTEFFECT_CSIZE = iround(3.6 / _DEFAULT_KW97_TILTEFFECT_DEGPERPIX) _DEFAULT_KW97_TILTEFFECT_NSIZE = iround(5.4 / _DEFAULT_KW97_TILTEFFECT_DEGPERPIX) _DEFAULT_KW97_TILTEFFECT_FSIZE = iround(10.7 / _DEFAULT_KW97_TILTEFFECT_DEGPERPIX) _DEFAULT_TILTEFFECT_CVAL = .5 _DEFAULT_TILTEFFECT_SVALS = np.linspace(0.0, 0.5, 10) _DEFAULT_KW97_TILTEFFECT_SCALE = 1.25 _DEFAULT_TILTEFFECT_NPOINTS = 25 #100 _DEFAULT_TILTEFFECT_CIRCULAR = True _DEFAULT_TILTEFFECT_DECODER_TYPE = 'circular_vote' csvfiles = [ os.path.join(defaults._DATADIR, 'KW97_GH.csv'), os.path.join(defaults._DATADIR, 'KW97_JHK.csv'), os.path.join(defaults._DATADIR, 'KW97_LL.csv'), os.path.join(defaults._DATADIR, 'KW97_SJL.csv'), ] # experiment parameters cpt = (_DEFAULT_TILTEFFECT_SIZE // 2, _DEFAULT_TILTEFFECT_SIZE // 2) spt = (_DEFAULT_TILTEFFECT_SIZE // 2, _DEFAULT_TILTEFFECT_SIZE // 2 + _DEFAULT_KW97_TILTEFFECT_CSIZE) dt_in = _DEFAULT_TILTEFFECT_CVAL - _DEFAULT_TILTEFFECT_SVALS # simulate populations im = sp.array([[ stim.get_center_nfsurrounds(size=_DEFAULT_TILTEFFECT_SIZE, csize=_DEFAULT_KW97_TILTEFFECT_CSIZE, nsize=_DEFAULT_KW97_TILTEFFECT_NSIZE, fsize=_DEFAULT_KW97_TILTEFFECT_FSIZE, cval=_DEFAULT_TILTEFFECT_CVAL, nval=sp.nan, fval=sval, bgval=sp.nan) ] for sval in _DEFAULT_TILTEFFECT_SVALS]) # get shifts for model for both papers, and from digitized data sortidx = sp.argsort(dt_in) # re-order in increasing angular differences # O'Toole and Wenderoth (1977) n_subjects = len(csvfiles) ds_kw97_paper_x = sp.zeros((n_subjects, 9)) ds_kw97_paper_y = sp.zeros((n_subjects, 9)) for sidx, csv in enumerate(csvfiles): ds_kw97_paper_x[sidx], ds_kw97_paper_y[sidx] = \ sp.genfromtxt(csv, delimiter=',').T ds_kw97_paper_x = (ds_kw97_paper_x + 360.) % 360. - 45. ds_kw97_paper_y = 45. - ds_kw97_paper_y for sidx in range(n_subjects): ds_kw97_paper_x[sidx] = ds_kw97_paper_x[sidx][sp.argsort( ds_kw97_paper_x[sidx])] extra_vars = {} extra_vars['scale'] = _DEFAULT_KW97_TILTEFFECT_SCALE extra_vars['decoder'] = _DEFAULT_TILTEFFECT_DECODER_TYPE extra_vars['npoints'] = _DEFAULT_TILTEFFECT_NPOINTS extra_vars['npoints'] = _DEFAULT_TILTEFFECT_NPOINTS extra_vars['cval'] = _DEFAULT_TILTEFFECT_CVAL extra_vars['sortidx'] = sortidx extra_vars['cpt'] = cpt extra_vars['spt'] = spt extra_vars['sval'] = sval extra_vars['kind'] = 'circular' extra_vars['figure_name'] = 'f3b' extra_vars['return_var'] = 'O' adjusted_gt = signal.resample(np.mean(ds_kw97_paper_y, axis=0), 10) optimize_model(im, adjusted_gt, extra_vars, defaults)
def run(initialize_model=False): defaults = PaperDefaults() #David's globals _DEFAULT_KW2015_SO_PARAMETERS = { 'filters': { 'name': 'gabors', 'aspect_ratio': .6, 'sizes': sp.array([9]), 'spatial_frequencies': sp.array([[9.0]]), 'orientations': sp.arange(2) * sp.pi / 2, 'phases': sp.array([0]), 'with_center_surround': False, 'padding': 'reflect', 'corr': False, 'ndp': False }, 'model': { 'channels_so': ('R+G-', 'B+Y-', 'R+C-', 'Wh+Bl-', 'G+R-', 'Y+B-', 'C+R-', 'Bl+Wh-'), 'normalize': False }, 'dnp_so': None, 'selected_channels': [0, 1, 3, 4, 5, 7], 'norm_channels': [0, 1, 3, 4, 5, 7] } size = 51 csize = 9 n_train = 32 n_t_hues = 16 n_s_hues = 16 csvfiles = [ defaults._DATADIR + '/KW2015_%i.csv' % (i, ) for i in range(0, 360, 45) ] #Load data from experiments kw2015_fig2_x = sp.zeros((len(csvfiles), 16)) kw2015_fig2_y = sp.zeros((len(csvfiles), 16)) for idx, csv in enumerate(csvfiles): kw2015_fig2_x[idx], kw2015_fig2_y[idx] = \ sp.genfromtxt(csv, delimiter=',')[1:].T # experiment stimuli extra_vars = {} extra_vars['_DEFAULT_KW2015_SO_PARAMETERS'] = _DEFAULT_KW2015_SO_PARAMETERS extra_vars['_DEFAULT_FLOATX_NP'] = defaults._DEFAULT_FLOATX_NP extra_vars['size'] = size extra_vars['csize'] = csize extra_vars['n_train'] = n_train extra_vars['n_t_hues'] = n_t_hues extra_vars['n_s_hues'] = n_s_hues extra_vars['figure_name'] = 'f4' extra_vars['gt_x'] = kw2015_fig2_x extra_vars['f4_stimuli_file'] = defaults.f4_stimuli_file extra_vars['return_var'] = 'I' extra_vars['precalculated_x'] = True extra_vars['aux_y'] = [] extra_vars['percent_reg_train'] = 80. if initialize_model: create_stims(extra_vars) stim_files = np.load(extra_vars['f4_stimuli_file']) extra_vars['stims_all_lms'] = stim_files['stims_all_lms'] #Run model #cx.run(so_all, from_gpu=False) #sx_all[:] = cx.Y.get()[:, :, size//2, size//2] adj_gt = np.mean(kw2015_fig2_y, axis=0) im = stim_files['so_ind'].reshape( n_t_hues * n_s_hues, len(_DEFAULT_KW2015_SO_PARAMETERS['norm_channels']), size, size) extra_vars['aux_data'] = stim_files['so_all'].transpose(0, 2, 3, 1) extra_vars['cs_hue_diff'] = stim_files['cs_hue_diff'] optimize_model(im, adj_gt, extra_vars, defaults)