Пример #1
0
###################### END CUSTOMIZATION #######################################

fitness = nrd_fitness.specie_concentration_fitness(species_list=mol)

############ Test fitness function
#sim = aju.xml.NeurordSimulation('/tmp', model=model, params=params)
#cp /tmp/???/model.h5 modelname.split('.')[0]+'.h5'
#sim2=aju.xml.NeurordResult('Model_syngap_ras.h5')
#print(fitness(sim2, exp))
################

fit = aju.optimize.Fit(tmpdir, exp, model_set, None, fitness, params,
                       _make_simulation=aju.xml.NeurordSimulation.make,
                       _result_constructor=aju.xml.NeurordResult)
fit.load()
fit.do_fit(iterations, popsize=popsize,sigma=1.0,seed=seed)
mean_dict,std_dict,CV=converge.iterate_fit(fit,test_size,popsize)

########################################### Done with fitting

#to look at centroid [0] or stdev [6] of cloud of good results:
for i,p in enumerate(fit.params.unscale(fit.optimizer.result()[0])):
    print(fit.param_names()[i],'=',p, '+/-', fit.params.unscale(fit.optimizer.result()[6])[i])

#to look at fit history
aju.drawing.plot_history(fit,fit.measurement)

startgood=0
threshold=20
save_params.save_params(fit, startgood, threshold)
Пример #2
0
            # set-up and do the optimization
            fit = fc.fit_commands(dirname,
                                  exp_to_fit,
                                  modeltype,
                                  ntype,
                                  fitness,
                                  params1,
                                  generations,
                                  popsiz,
                                  seed,
                                  test_size,
                                  map_func=None)
            if test_size > 0:
                mean_dict, std_dict, CV = converge.iterate_fit(
                    fit,
                    test_size,
                    popsiz,
                    std_crit=0.01,
                    slope_crit=1e-3,
                    max_evals=100000)

            ###########look at results
            #from ajustador import drawing
            #drawing.plot_history(fit1, fit1.measurement)

            startgood = 0  #set to 0 to print all
            threshold = 10  #set to large number to print all
            save_params.save_params(fit1, startgood, threshold)
            #save_params.persist(fit1,'.')
Пример #3
0
                        modeltype, ntype,
                        fitness, params,
                        _make_simulation=aju.optimize.MooseSimulation.make,
                        _result_constructor=aju.optimize.MooseSimulationResult)

fit.load()

fit.do_fit(generations, popsize=popsiz, seed=seed)

startgood=1000  #set to 0 to print all
threshold=0.8  #set to large number to print all
s_crt = 2E-3
max_eval = 5000

while(True):
    mean_dict,std_dict,CV=converge.iterate_fit(fit,test_size,popsiz, slope_crit=s_crt, max_evals=max_eval)
    save_params.save_params(fit, startgood, threshold)
    char = input("plot_history opt (Y/N):")
    if char.upper() == 'Y':
        drawing.plot_history(fit, fit.measurement)
    char = input("Continue opt (Y/N):")
    if char.upper() == 'N':
        break
    else:
        s_crt = np.float32(input("slope_criteria old_cirterial is {}?".format(s_crt)))
        max_eval = np.long(input("Maximum evaluations must be > {}?".format(len(fit))))
        continue
#Save parameters of good results from end of optimization, and all fitness values
#startgood=1000  #set to 0 to print all
#threshold=0.8  #set to large number to print all
#save_params.save_params(fit, startgood, threshold)
Пример #4
0
dirname = 'cond_' + dataname + str(seed) + str(popsiz)
if not dirname in os.listdir(rootdir):
    os.mkdir(rootdir + dirname)
os.chdir(rootdir + dirname)

######## set up parameters and fitness to be used for all opts  ############
params1, fitness = pfc.params_fitness(morph_file, ntype, modeltype)

# set-up and do the optimization
fit1 = fit_commands.fit_commands(dirname, exp_to_fit, modeltype, ntype,
                                 fitness, params1, generations, popsiz, seed,
                                 test_size)
if test_size > 0:
    mean_dict, std_dict, CV = converge.iterate_fit(fit1,
                                                   test_size,
                                                   popsiz,
                                                   std_crit=0.02,
                                                   max_evals=12000)

###########look at results
drawing.plot_history(fit1, fit1.measurement)

#Save parameters of good results toward the end, and all fitness values
startgood = 0  #set to 0 to print/save all
threshold = 10  #set to high value to print/save all

save_params.save_params(fit1, startgood, threshold)
#save_params.persist(fit1,'.')
'''
Repeat arky120, arky140, proto079,proto122 optimizations with channel kinetics allowed to vary 
but with limitations to axonal conductance parameters described in Lindroos
Пример #5
0
                                         spike_range_y_histogram=1)

########### Neuron and fit specific commands ############
fit1 = aju.optimize.Fit(tmpdir,
                        exp_to_fit,
                        modeltype,
                        ntype,
                        fitness,
                        params1,
                        _make_simulation=aju.optimize.MooseSimulation.make,
                        _result_constructor=aju.optimize.MooseSimulationResult)

fit1.load()

fit1.do_fit(generations, popsize=popsiz, seed=seed)
mean_dict1, std_dict1, CV1 = converge.iterate_fit(fit1, test_size, popsiz)

#look at results
drawing.plot_history(fit1, fit1.measurement)

#Save parameters of good results toward the end, and all fitness values
startgood = 1500  #set to 0 to print all
threshold = 0.4  #set to large number to print all

save_params.save_params(fit1, startgood, threshold)
#save_params.persist(fit1,'.')

################## Next neuron #############
dataname = 'proto122'
exp_to_fit = gpe.data[dataname + '-2s'][[0, 1, 4]]
Пример #6
0
                                         spike_ahp=1,
                                         ahp_curve=4,
                                         charging_curve=1,
                                         spike_range_y_histogram=1)

########### Neuron and fit specific commands ############
fit1 = aju.optimize.Fit(tmpdir,
                        exp_to_fit,
                        modeltype,
                        ntype,
                        fitness,
                        params1,
                        _make_simulation=aju.optimize.MooseSimulation.make,
                        _result_constructor=aju.optimize.MooseSimulationResult)

fit1.load()

fit1.do_fit(generations, popsize=popsiz, seed=seed)
mean_dict, std_dict, CV = converge.iterate_fit(fit1, test_size, popsiz)

#look at results
drawing.plot_history(fit1, fit1.measurement)

#Save parameters of good results from end of optimization, and all fitness values
startgood = 1000  #set to 0 to print all
threshold = 0.8  #set to large number to print all
save_params.save_params(fit1, startgood, threshold)

#to save the fit object
#save_params.persist(fit1,'.')
Пример #7
0
                                         spike_range_y_histogram=1)

########### Neuron and fit specific commands ############
fit1 = aju.optimize.Fit(tmpdir,
                        exp_to_fit,
                        modeltype,
                        ntype,
                        fitness,
                        params1,
                        _make_simulation=aju.optimize.MooseSimulation.make,
                        _result_constructor=aju.optimize.MooseSimulationResult)

fit1.load()

fit1.do_fit(generations, popsize=popsiz, seed=seed)
mean_dict1, std_dict1, CV1 = converge.iterate_fit(fit1, test_size, popsiz)

#look at results
drawing.plot_history(fit1, fit1.measurement)

#Save parameters of good results toward the end, and all fitness values
startgood = 1500  #set to 0 to print all
threshold = 0.40  #median
save_params.save_params(fit1, startgood, threshold)
#save_params.persist(fit1,'.')

################## Next neuron #############
dataname = 'proto154'
exp_to_fit = gpe.data[dataname + '-2s'][[0, 2, 4]]

dirname = dataname + 'F_' + str(seed)
Пример #8
0
#sim = aju.xml.NeurordSimulation('/tmp', model=model, params=params)
#sim2=aju.xml.NeurordResult('Model_syngap_ras.h5')
#print(fitness(sim2, exp))
################

fit = aju.optimize.Fit(tmpdir,
                       exp,
                       model_set,
                       None,
                       fitness,
                       params,
                       _make_simulation=aju.xml.NeurordSimulation.make,
                       _result_constructor=aju.xml.NeurordResult)
fit.load()
fit.do_fit(iterations, popsize=popsize, sigma=0.3)
mean_dict, std_dict, CV = converge.iterate_fit(fit, test_size, popsize)

########################################### Done with fitting

#to look at fit history
aju.drawing.plot_history(fit, fit.measurement)

#print centroid [0] and stdev [6] of cloud of good results:
for i, p in enumerate(fit.params.unscale(fit.optimizer.result()[0])):
    print(fit.param_names()[i], '=', p, '+/-',
          fit.params.unscale(fit.optimizer.result()[6])[i])

save_params.save_params(fit, 0, 1)

########################################## Next model
model_set = 'Model-CKnew-ss'  #uses 6 parameter optimization