def writeWithHarvester(): from BinnedSpikeTrain import BinnedSpikeTrain from InitBox import initialize5 from Simulator import Path, OUSinusoidalParams import time N_phi = 20; print 'N_phi = ', N_phi phis = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) # D = DataHarvester('test2') D = DataHarvester('test2', 'test3') base_name = 'sinusoidal_spike_train_T=' for regime_name, T_sim, T_thresh in zip(['superT', 'subT', 'crit', 'superSin'], [5000 , 20000, 5000, 5000], [4., 32, 16., 16.]): regime_label = base_name + str(T_sim)+ '_' + regime_name for sample_id in xrange(3,4): file_name = regime_label + '_' + str(sample_id) + '.path' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phis) ps = binnedTrain._Path._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) D.setRegime(regime_name,abg_true, T_sim) phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh) Tf = binnedTrain.getTf() D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount()) start = time.clock() abg_init = initialize5(binnedTrain) finish = time.clock() D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) abg_est = abg_init start = time.clock() # abg_est = NMEstimator(S, binnedTrain, abg_init) time.sleep(rand()) finish = time.clock() D.addEstimate(sample_id, 'Nelder-Mead', abg_est, finish-start) start = time.clock() # abg_est = BFGSEstimator(S, binnedTrain, abg_init) time.sleep(rand()) finish = time.clock() D.addEstimate(sample_id, 'BFGS', abg_est, finish-start) start = time.clock() # abg_est = FortetEstimator(binnedTrain, abg_init) time.sleep(rand()) finish = time.clock() D.addEstimate(sample_id, 'Fortet', abg_est, finish-start)
def GradedDriver(): from scipy.optimize import fmin_bfgs N_phi = 10; print 'N_phi = ', N_phi phi_norms = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) print 'GradedEstimator' for file_name in ['sinusoidal_spike_train_T=20000_subT_3.path', 'sinusoidal_spike_train_T=20000_subT_8.path', 'sinusoidal_spike_train_T=20000_subT_13.path']: print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) binnedTrain.pruneBins(None, N_thresh = 32, T_thresh = 32.) abg_est = abs( initialize5(binnedTrain)) print 'abg_init = ',abg_est theta = binnedTrain.theta for T_thresh, N_thresh, max_iters in zip([32/8., 32/4., 32/2., 32.], [128, 128, 64, 32], [50,50,100,None]): binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) binnedTrain.pruneBins(None, N_thresh, T_thresh) print 'N_bins = ', len(binnedTrain.bins.keys()) Tf = binnedTrain.getTf() print 'Tf = ', Tf dx = .02; dt = FPMultiPhiSolver.calculate_dt(dx, 4., 10.) phis = binnedTrain.bins.keys(); S = FPMultiPhiSolver(theta, phis, dx, dt, Tf, X_MIN = -.5) from scipy.optimize import fmin def func(abg): 'Solve it:' Fs = S.solve(abg, visualize=False)[:,:,-1] Ss = S.transformSurvivorData(binnedTrain) Ls = Fs - Ss 'Return' G = .5*sum(Ls*Ls)*S._dt return G abg_est = fmin(func, abg_est, ftol = 1e-2*T_thresh, maxiter=max_iters) print 'current_estimate = ', abg_est print 'final estimate = ', abg_est
def NelderMeadSubTEstimator(): N_phi = 20; print 'N_phi = ', N_phi phi_norms = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) batch_start = time.clock() base_name = 'sinusoidal_spike_train_T=' D = DataHarvester('SubT_NMx16_refined_sim_dt') for regime_name, T_sim, T_thresh in zip(['subT'], [20000], [32.]): regime_label = base_name + str(T_sim)+ '_' + regime_name for sample_id in xrange(1,17): file_name = regime_label + '_' + str(sample_id) + '.path' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) ps = binnedTrain._Train._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) D.setRegime(regime_name,abg_true, T_sim) phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh) Tf = binnedTrain.getTf() D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount()) dx = .04; dt = FPMultiPhiSolver.calculate_dt(dx, 4., 2.) phis = binnedTrain.bins.keys(); theta = binnedTrain.theta S = FPMultiPhiSolver(theta, phis, dx, dt, Tf, X_MIN = -.5) start = time.clock() abg_init = initialize5(binnedTrain) finish = time.clock() D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) abg_init = abs(abg_init) start = time.clock() abg_est = NMEstimator(S, binnedTrain, abg_init) finish = time.clock() D.addEstimate(sample_id, 'Nelder-Mead', abg_est, finish-start) D.closeFile() print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
def BFGSGradedEstimator(): from scipy.optimize import fmin_bfgs print 'GradedEstimator' N_phi = 10; print 'N_phi = ', N_phi phi_norms = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) phi_omit = None for file_name in ['sinusoidal_spike_train_T=20000_subT_4.path', 'sinusoidal_spike_train_T=20000_subT_7.path', 'sinusoidal_spike_train_T=20000_subT_13.path']: print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) binnedTrain.pruneBins(phi_omit, N_thresh = 32, T_thresh = 32.) abg_est = abs( initialize5(binnedTrain)) print 'abg_init = ',abg_est for T_thresh, N_thresh, max_iters in zip([32/8., 32/4., 32/2., 32.], [128, 128, 64, 32], [32,24,16,8]): binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phis) binnedTrain.pruneBins(phi_omit, N_thresh, T_thresh) Tf = binnedTrain.getTf() print 'Tf = ', Tf print 'N_bins = ', len(binnedTrain.bins.keys()) solver_phis = binnedTrain.bins.keys(); theta = binnedTrain.theta x_min = -.5; S = FPMultiPhiSolver(theta, solver_phis, .1, .1, Tf, X_MIN = x_min) lE = Estimator(S, binnedTrain, verbose = True) # abg_est = fmin_bfgs(lE.func, abg_init, lE.dfunc, gtol = 1e-6*binnedTrain.getTf(), maxiter= 128, full_output = 0) abg_est, fopt, gopt, Bopt, func_calls, grad_calls, warnflag = fmin_bfgs(lE.func, abg_est, lE.dfunc, gtol = 1e-08*binnedTrain.getTf(), maxiter=max_iters, full_output = 1) print 'estimate gradient =', gopt print 'current_estimate = ', abg_est print 'final estimate = ', abg_est
def BatchGradedNMEstimator(): N_phi = 20; print 'N_phi = ', N_phi phi_norms = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) batch_start = time.clock() base_name = 'sinusoidal_spike_train_T=' D = DataHarvester('GradedNMx16', 'GradedNM_SubTx16') N_thresh = 32 for regime_name, T_sim, T_thresh in zip(['subT'], [20000], [32.]): regime_label = base_name + str(T_sim)+ '_' + regime_name for sample_id in xrange(4,17): file_name = regime_label + '_' + str(sample_id) + '.path' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) ps = binnedTrain._Path._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) D.setRegime(regime_name,abg_true, T_sim) phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = N_thresh, T_thresh=T_thresh) Tf = binnedTrain.getTf() D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount()) start = time.clock() abg_init = initialize5(binnedTrain) finish = time.clock() D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) start = time.clock() abg_est = GradedNMEstimator(file_name, phi_norms, abg_init, T_thresh, N_thresh) finish = time.clock() D.addEstimate(sample_id, 'Graded_Nelder-Mead', abg_est, finish-start) start = time.clock() abg_est = FortetEstimator(binnedTrain, abg_init) finish = time.clock() D.addEstimate(sample_id, 'Fortet', abg_est, finish-start) D.closeFile() print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
def writeManual(): from BinnedSpikeTrain import BinnedSpikeTrain from InitBox import initialize5 import time N_phi = 20; print 'N_phi = ', N_phi phis = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) h5file = openFile("manual_write.h5", mode = "w", title = "Manually Write Estimate Data") grp = h5file.createGroup("/", 'Estimates', "Estimates INformation") base_name = 'sinusoidal_spike_train_T=' for regime_name, T_sim, T_thresh in zip(['superT', 'subT', 'crit', 'superSin'], [5000 , 20000, 5000, 5000], [4., 32, 16., 16.]): estTable = h5file.createTable(grp, regime_name, Estimate , "Regime Estimate") # sampleTbl = h5file.createTable(grp, regime_name, Estimate , "Regime Estimate") regime_label = base_name + str(T_sim)+ '_' + regime_name for sample_id in xrange(1,3): file_name = regime_label + '_' + str(sample_id) + '.path' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phis) ps = binnedTrain._Path._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) if 1 == sample_id: print 'abg_true = ', abg_true phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh) print 'N_bins = ', len(binnedTrain.bins.keys()) Tf = binnedTrain.getTf() print 'Tf = ', Tf #Estimate: estimate = estTable.row start = time.clock() abg_init = initialize5(binnedTrain) finish = time.clock() estimate['method'] = 'Initializer' estimate['sample_id'] = sample_id estimate['alpha'] = abg_init[0] estimate['beta'] = abg_init[1] estimate['gamma'] = abg_init[2] estimate['walltime'] = finish - start estimate.append() abg_est = abg_init start = time.clock() # abg_est = NMEstimator(S, binnedTrain, abg_init) time.sleep(rand()) print 'Est. time = ', time.clock() - start print 'abg_est = ', abg_est start = time.clock() # abg_est = FortetEstimator(binnedTrain, abg_init) time.sleep(2*rand()) print 'Est. time = ', time.clock() - start print 'abg_est = ', abg_est estTable.flush() print '#'*44 print h5file h5file.close()
def AdjointEstimator(): N_phi = 20; print 'N_phi = ', N_phi phis = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) file_name = 'sinusoidal_spike_train_N=1000_crit_1' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phis) phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = 100, T_thresh = 10.0) print 'N_bins = ', len(binnedTrain.bins.keys()) Tf = binnedTrain.getTf() print 'Tf = ', Tf phis = binnedTrain.bins.keys(); theta = binnedTrain.theta ps = binnedTrain._Train._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) print 'abg_true = ', abg_true abg = abg_true xmin = FPMultiPhiSolver.calculate_xmin(Tf, abg) dx = FPMultiPhiSolver.calculate_dx(abg, xmin) dt = FPMultiPhiSolver.calculate_dt(dx, abg, xmin, factor = 8.) print 'xmin, dx, dt = ', xmin, dx, dt S = FPMultiPhiSolver(theta, phis, dx, dt, Tf, xmin) abg_init = initialize5(binnedTrain) print 'abg_init = ', abg_init # start = time.clock() # abg_est = TNCEstimator(S, binnedTrain, abg_init) # print 'Est. time = ', time.clock() - start # print 'abg_est = ', abg_est # start = time.clock() # abg_est = NMEstimator(S, binnedTrain, abg_init) # print 'Est. time = ', time.clock() - start # print 'abg_est = ', abg_est # # start = time.clock() # abg_est = COBYLAEstimator(S, binnedTrain, abg_init) # print 'Est. time = ', time.clock() - start # print 'abg_est = ', abg_est # start = time.clock() # abg_est = CGEstimator(S, binnedTrain, abg_init) # print 'Est. time = ', time.clock() - start # print 'abg_est = ', abg_est start = time.clock() abg_est = BFGSEstimator(S, binnedTrain, abg_init) print 'Est. time = ', time.clock() - start print 'abg_est = ', abg_est
def BFGSItersComparison(): N_phi = 20; print 'N_phi = ', N_phi phi_norms = linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi) batch_start = time.clock() base_name = 'sinusoidal_spike_train_T=' D = DataHarvester('BFGS_Iters') for regime_name, T_sim, T_thresh in zip(['crit', 'superSin'], [5000, 5000], [16., 16.]): regime_label = base_name + str(T_sim)+ '_' + regime_name for sample_id in xrange(1,4): file_name = regime_label + '_' + str(sample_id) + '.path' print file_name binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms) ps = binnedTrain._Train._params abg_true = array((ps._alpha, ps._beta, ps._gamma)) D.setRegime(regime_name,abg_true, T_sim) phi_omit = None binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh) Tf = binnedTrain.getTf() D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount()) dx = .025; dt = FPMultiPhiSolver.calculate_dt(dx, 5., 2.) phis = binnedTrain.bins.keys(); theta = binnedTrain.theta S = FPMultiPhiSolver(theta, phis, dx, dt, Tf, X_MIN = -2.0) start = time.clock() abg_init = initialize5(binnedTrain) finish = time.clock() D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) start = time.clock() abg_est = BFGSEstimator(S, binnedTrain, abg_init, max_iters = 8) finish = time.clock() D.addEstimate(sample_id, 'BFGS_8', abg_est, finish-start) start = time.clock() abg_est = BFGSEstimator(S, binnedTrain, abg_est,max_iters = 8) finish = time.clock() D.addEstimate(sample_id, 'BFGS_16', abg_est, finish-start) start = time.clock() abg_est = BFGSEstimator(S, binnedTrain, abg_est,max_iters = 8) finish = time.clock() D.addEstimate(sample_id, 'BFGS_24', abg_est, finish-start) D.closeFile() print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'