Ejemplo n.º 1
0
def MLEEstimate(N_spikes = 100, N_trains=1 ):
    N_phi_init = 8;
    
    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=%d_'%N_spikes

#    old_table_name = 'FinalEstimate_4x100_N=%d'%(N_spikes)
    new_table_file_name = 'MLEEstimate_4x100_N=%d'%(N_spikes)
    dHarvester = DataHarvester(new_table_file_name,                           
                               overwrite=False)
    print 'loading ', new_table_file_name
    
    for regime_name, T_thresh, N_phi in zip(['subT','superT', 'crit', 'superSin'],
                                             [128., 64., 64., 64.],
                                             4*[32]):
        regime_label = base_name + regime_name
            
        for sample_id in xrange(1,N_trains +1):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            #RELOAD ALL DATA:               
            mleBinnedTrain = MLEBinnedSpikeTrain.initFromFile(file_name,
                                                              N_phi)
            ps = mleBinnedTrain ._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            dHarvester.setRegime(regime_name, abg_true, Tsim=-1.0)
            
            abg_init = dHarvester.getEstimates(sample_id, 
                                              regime_name,
                                              'Initializer')
            #MLE F-P:
            dx = .05; dt = .05; 
            phis = mleBinnedTrain.phi_ms;
            S = FPMultiPhiSolver(mleBinnedTrain.theta, phis,
                                 dx, dt,
                                 mleBinnedTrain.getTf(), X_min = -1.0)            
            
            start = time.clock()
            abg_neldermead = MLEEstimator(S, mleBinnedTrain, abg_init)
            finish = time.clock()
            
            print abg_neldermead, finish-start
            dHarvester.addEstimate(sample_id, 'MLE_nm32',
                                   abg_neldermead, finish-start, 0)
                    
    dHarvester.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 2
0
def CvsPyEstimate():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=1000_'

    D = DataHarvester('CvsPY_2x4')
    for regime_name, T_thresh in zip(['subT', 'superSin'],
                                                       [32, 16.]):
        regime_label = base_name + regime_name
            
        for sample_id in xrange(1,4):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, Tsim=-1.0)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
             
            start = time.clock()
            abg_init = initialize_right_2std(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) 
             
            dx = .025; dt = FPMultiPhiSolver.calculate_dt(dx, abg_true, -1.0)
            
            phis = binnedTrain.bins.keys();
            theta = binnedTrain.theta
            
            S = FPMultiPhiSolver(theta, phis,
                                 dx, dt,
                                 Tf, X_min = -1.0)

            start = time.clock()
            abg_est = cNMEstimator(S, binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FP-C', abg_est, finish-start)
               
            start = time.clock()
            abg_est = NMEstimator(S, binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FP-PY', abg_est, finish-start) 
            
        
    D.closeFile()
Ejemplo n.º 3
0
def ThetaEstimate(N_spikes = 1000, N_trains=100, N_phi=20, 
                  thetas = [1, 5, 10, 20]):
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()
    base_name = 'sinusoidal_spike_train_N=%d_critical_theta='%N_spikes

    T_thresh = 64.
    
    D = DataHarvester('ThetaEstimate_%dx%d_N=%d'%(len(thetas),N_trains,N_spikes))
    for sample_id in xrange(1,N_trains +1):
        for theta in thetas:    
            regime_name = 'theta%d'%theta
            regime_label = base_name + '%d'%theta            
            file_name = regime_label + '_%d'%sample_id
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, Tsim=-1.0)
            
            binnedTrain.pruneBins(None, N_thresh = 5, T_thresh=T_thresh)
            D.addSample(sample_id, binnedTrain.getTf(), binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
                        
            abg_init = initialize_right_2std(binnedTrain)
            abg_init[1] = amax([.1, abg_init[1]])
            abg_init[2] = amax([.0, abg_init[2]])
            D.addEstimate(sample_id, 'Initializer', abg_init,.0, warnflag = 0) 
                       
            #RELOAD ALL DATA:               
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            binnedTrain.pruneBins(None, N_thresh = 1, T_thresh=T_thresh)
            
            #Weighted Fortet:            
            start = time.clock()
            abg_est, warnflag = FortetEstimatorSup(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'Fortet', abg_est, finish-start, warnflag)
            
            #Weighted F-P:
            dx = .025; dt = FPMultiPhiSolver.calculate_dt(dx, abg_true, -1.0) 
            phis = binnedTrain.bins.keys();
            S = FPMultiPhiSolver(binnedTrain.theta, phis,
                                 dx, dt,
                                 binnedTrain.getTf(), X_min = -1.0)            
            
            start = time.clock()
            abg_est, warnflag = FPSupEstimator(S, binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FP', abg_est, finish-start, warnflag)
                    
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 4
0
def FP_L2_vs_Sup(N_spikes = 1000, N_trains=20):
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=%d_'%N_spikes

    D = DataHarvester('FPvsWFP_4x%d_N=%d'%(N_trains,N_spikes))
    for regime_name, T_thresh in zip(['subT','superT', 'crit', 'superSin'],
                                     4*[64.]):
        regime_label = base_name + regime_name
            
        for sample_id in xrange(1,N_trains +1):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, Tsim=-1.0)
            
            #### N_thresh = 10
            binnedTrain.pruneBins(None, N_thresh = 10, T_thresh=T_thresh)
            D.addSample(sample_id, binnedTrain.getTf(), binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
                        
            abg_init = initialize_right_2std(binnedTrain)
            abg_init[1] = amax([.1, abg_init[1]])
            abg_init[2] = amax([.0, abg_init[2]])
            D.addEstimate(sample_id, 'init_N10', abg_init,.0) 


            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
                       
            dx = .025; dt = FPMultiPhiSolver.calculate_dt(dx, abg_true, -1.0)            
            theta = binnedTrain.theta            
            binnedTrain.pruneBins(None, N_thresh = 1, T_thresh=T_thresh)

            phis = binnedTrain.bins.keys();

            S = FPMultiPhiSolver(theta, phis,
                                 dx, dt,
                                 binnedTrain.getTf(), X_min = -1.0)            
            
            start = time.clock()
            abg_est = FPL2Estimator(S,binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FP_L2', abg_est, finish-start)
            
            start = time.clock()
            abg_est = FPSupEstimator(S,binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FP_Sup', abg_est, finish-start)
                    
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 5
0
def Fortet_SupVsL2(N_spikes = 1000, N_trains = 16):
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=%d_'%N_spikes

    D = DataHarvester('Fortet_SupVsL2_4x%d'%N_trains)
#    for regime_name, T_thresh in zip(['subT', 'crit', 'superSin', 'superT'],
#                                     [64., 64, 32., 32.]):
    for regime_name, T_thresh in zip(['crit', 'superSin', 'superT'],
                                     [64, 32., 32.]):
        regime_label = base_name + regime_name
        
            
        for sample_id in xrange(1,N_trains+1):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, Tsim=-1.0)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 10, T_thresh=T_thresh)
            D.addSample(sample_id, binnedTrain.getTf(), binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
                
            abg_init = initialize_right_2std(binnedTrain)
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            
            start = time.clock()
            abg_est = FortetEstimatorL2(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FortetL2', abg_est, finish-start)
            print abg_est, ' | %.2f'%(finish-start)
            
            start = time.clock()
            abg_est = FortetEstimatorSup(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'FortetSup', abg_est, finish-start)
            print abg_est, ' | %.2f'%(finish-start)
        
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 6
0
def FortetVsWeightedFortet():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=1000_'

    D = DataHarvester('FvsWF_4x16')
    for regime_name, T_thresh in zip(['superT', 'subT', 'crit', 'superSin'],
                                                       [6., 64, 32., 32.]):
        regime_label = base_name + regime_name
            
        for sample_id in xrange(1,17):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, Tsim=-1.0)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 10, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
             
            start = time.clock()
            abg_init = initialize_right_2std(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) 
            
            start = time.clock()
            abg_est = FortetEstimator(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'Fortet10', abg_est, finish-start)
            
            start = time.clock()
            abg_est = WeightedFortetEstimator(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'WeghtedFortet', abg_est, finish-start)
            
            binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh)
            start = time.clock()
            abg_est = FortetEstimator(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'Fortet64', abg_est, finish-start)
            
        
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 7
0
def BatchGradedNMEstimator():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_T='

    D = DataHarvester('GradedNMx16', 'GradedNM_SubTx16')
    N_thresh = 32
    for regime_name, T_sim, T_thresh in zip(['subT'],
                                                       [20000],
                                                       [32.]):
        regime_label = base_name + str(T_sim)+ '_' + regime_name
            
        for sample_id in xrange(4,17):
            file_name = regime_label + '_' + str(sample_id) + '.path'
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Path._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, T_sim)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = N_thresh, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
        
            start = time.clock()
            abg_init = initialize5(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) 
                    
            start = time.clock()
            abg_est = GradedNMEstimator(file_name, phi_norms, abg_init, T_thresh, N_thresh)
            finish = time.clock()
            D.addEstimate(sample_id, 'Graded_Nelder-Mead', abg_est, finish-start) 
            
            start = time.clock()
            abg_est = FortetEstimator(binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'Fortet', abg_est, finish-start)
        
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 8
0
def NelderMeadSubTEstimator():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_T='

    D = DataHarvester('SubT_NMx16_refined_sim_dt')
    for regime_name, T_sim, T_thresh in zip(['subT'],
                                           [20000],
                                           [32.]):

        regime_label = base_name + str(T_sim)+ '_' + regime_name
            
        for sample_id in xrange(1,17):
            file_name = regime_label + '_' + str(sample_id) + '.path'
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, T_sim)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
             
            dx = .04; dt = FPMultiPhiSolver.calculate_dt(dx, 4., 2.)
        
            phis = binnedTrain.bins.keys();
            theta = binnedTrain.theta
            
            S = FPMultiPhiSolver(theta, phis,
                                 dx, dt,
                                 Tf, X_MIN = -.5)
        
            start = time.clock()
            abg_init = initialize5(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) 

            abg_init = abs(abg_init)            
            start = time.clock()
            abg_est = NMEstimator(S, binnedTrain, abg_init)
            finish = time.clock()
            D.addEstimate(sample_id, 'Nelder-Mead', abg_est, finish-start) 
        
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 9
0
def BFGSItersComparison():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_T='

    D = DataHarvester('BFGS_Iters')
    for regime_name, T_sim, T_thresh in zip(['crit', 'superSin'],
                                                       [5000, 5000],
                                                       [16., 16.]):

        regime_label = base_name + str(T_sim)+ '_' + regime_name
            
        for sample_id in xrange(1,4):
            file_name = regime_label + '_' + str(sample_id) + '.path'
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, T_sim)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
             
            dx = .025; dt = FPMultiPhiSolver.calculate_dt(dx, 5., 2.)
        
            phis = binnedTrain.bins.keys();
            theta = binnedTrain.theta
            
            S = FPMultiPhiSolver(theta, phis,
                                 dx, dt,
                                 Tf, X_MIN = -2.0)
        
            start = time.clock()
            abg_init = initialize5(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Initializer', abg_init, finish-start) 
                    
            start = time.clock()
            abg_est = BFGSEstimator(S, binnedTrain, abg_init, max_iters = 8)
            finish = time.clock()
            D.addEstimate(sample_id, 'BFGS_8', abg_est, finish-start) 
            
            start = time.clock()
            abg_est = BFGSEstimator(S, binnedTrain, abg_est,max_iters = 8)
            finish = time.clock()
            D.addEstimate(sample_id, 'BFGS_16', abg_est, finish-start)
            
            start = time.clock()
            abg_est = BFGSEstimator(S, binnedTrain, abg_est,max_iters = 8)
            finish = time.clock()
            D.addEstimate(sample_id, 'BFGS_24', abg_est, finish-start)
            
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 3600.0, ' hrs'
Ejemplo n.º 10
0
def BatchInit5_vs_InitRightstd():
    N_phi = 20;
    print 'N_phi = ', N_phi
    
    phi_norms =  linspace(1/(2.*N_phi), 1. - 1/ (2.*N_phi), N_phi)

    batch_start = time.clock()    
    base_name = 'sinusoidal_spike_train_N=1000_'

    D = DataHarvester('InitComparison4x100_N100')
    for regime_name, T_thresh in zip(['superT', 'subT', 'crit', 'superSin'],
                                                       [4., 32, 16., 16.]):
        regime_label = base_name + regime_name
            
        for sample_id in xrange(1,17):
            file_name = regime_label + '_' + str(sample_id)
            print file_name
            
            binnedTrain = BinnedSpikeTrain.initFromFile(file_name, phi_norms)
            ps = binnedTrain._Train._params
            abg_true = array((ps._alpha, ps._beta, ps._gamma))
            D.setRegime(regime_name,abg_true, -.1)
            
            phi_omit = None
            binnedTrain.pruneBins(phi_omit, N_thresh = 64, T_thresh=T_thresh)
            Tf = binnedTrain.getTf()
            D.addSample(sample_id, Tf, binnedTrain.getBinCount(), binnedTrain.getSpikeCount())
        
            start = time.clock()
            abg_init = initialize5(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Init5pts', abg_init, finish-start) 
            
            start = time.clock()
            abg_init = initialize_right_2std(binnedTrain)
            finish = time.clock()
            D.addEstimate(sample_id, 'Init_right_2std', abg_init, finish-start) 
                                
    D.closeFile() 
   
    print 'batch time = ', (time.clock() - batch_start) / 60.0, ' mins'