Ejemplo n.º 1
0
def run_ga(model_params, nparams):
    grid_points = exhaustive_search.create_grid(npoints=1, nparams=nparams)
    td = list(grid_points[0].keys())
    subset = reduce_params(model_params, nparams)
    DO = DEAPOptimisation(error_criterion=electro_tests[0][0],
                          selection=str('selNSGA'),
                          provided_dict=subset,
                          elite_size=3)
    MU = int(np.floor(npoints / 2.0))
    max_ngen = int(np.floor(nparams / 2.0))
    # make initial samples uniform on grid points
    # 3
    # 3 minimum number of points to define this hypercube.
    # Create a lattice, using the exhaustive

    # make a movie,
    assert (MU * max_ngen) < (npoints * nparams)
    ga_out = DO.run(offspring_size=MU,
                    max_ngen=6,
                    cp_frequency=1,
                    cp_filename=str('regular.p'))
    # pop, hof_py, pf, log, history, td_py, gen_vs_hof = ga_out
    with open('all_ga_cell.p', 'wb') as f:
        pickle.dump(ga_out, f)
    return ga_out
Ejemplo n.º 2
0
 def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1, 10):
         for j in range(1, 10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams=i, provided_keys=provided_keys)
Ejemplo n.º 3
0
 def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1,10):
         for j in range(1,10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams = i, provided_keys = provided_keys)
Ejemplo n.º 4
0
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams=nparams, provided_keys=provided_keys)
        list_check_increase = []
        for NGEN in range(1, 30):
            for MU in range(5, 40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(
                    offspring_size=MU,
                    max_ngen=NGEN,
                    cp_frequency=4,
                    cp_filename='checkpointedGA.p')
                avgf = np.mean([h.fitness for h in hof])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase), old_avg)
                    self.assertGreater(avgf, old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old = avgf
                    old_avg = np.mean(list_check_increase)

                cnt += 1
                print(old, cnt)
Ejemplo n.º 5
0
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams = nparams, provided_keys = provided_keys)
        list_check_increase = []
        for NGEN in range(1,30):
            for MU in range(5,40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(offspring_size = MU, max_ngen = NGEN, cp_frequency=4,cp_filename='checkpointedGA.p')
                avgf = np.mean([ h.fitness for h in hof ])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase),old_avg)
                    self.assertGreater(avgf,old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old = avgf
                    old_avg = np.mean(list_check_increase)



                cnt += 1
                print(old,cnt)
Ejemplo n.º 6
0
def run_ga(model_params, npoints, test, provided_keys=None, nr=None):
    # https://stackoverflow.com/questions/744373/circular-or-cyclic-imports-in-python
    # These imports need to be defined with local scope to avoid circular importing problems
    # Try to fix local imports later.
    from bluepyopt.deapext.optimisations import DEAPOptimisation
    from neuronunit.optimization.exhaustive_search import create_grid
    from neuronunit.optimization.exhaustive_search import reduce_params

    ss = {}
    for k in provided_keys:
        ss[k] = model_params[k]
    MU = int(np.floor(npoints))
    max_ngen = int(np.floor(npoints))
    selection = str('selNSGA')
    DO = DEAPOptimisation(offspring_size=MU,
                          error_criterion=test,
                          selection=selection,
                          provided_dict=ss,
                          elite_size=2)
    ga_out = DO.run(offspring_size=MU, max_ngen=max_ngen)
    #with open('all_ga_cell.p','wb') as f:
    #    pickle.dump(ga_out,f)
    return ga_out
Ejemplo n.º 7
0
# TODO move to unit testing
##

start_time = timeit.default_timer()
sel = [str('selNSGA2'), str('selIBEA')]

flat_iter = [(cnt, s, test, observation)
             for cnt, (test, observation) in enumerate(electro_tests)
             for s in sel]
print(flat_iter)

for (cnt, s, test, observation) in flat_iter:
    dic_key = str(list(pipe[cnt].values())[0])
    init_time = timeit.default_timer()
    DO = DEAPOptimisation(error_criterion=test,
                          selection=sel,
                          provided_dict=model_params,
                          elite_size=3)
    package = DO.run(offspring_size=MU,
                     max_ngen=6,
                     cp_frequency=1,
                     cp_filename=str(dic_key) + '.p')
    pop, hof, pf, log, history, td_py, gen_vs_hof = package
    finished_time = timeit.default_timer()
    pipe_results[dic_key] = {}
    pipe_results[dic_key]['sel'] = sel

    pipe_results[dic_key]['sel']['duration'] = finished_time - init_time
    pipe_results[dic_key]['sel']['pop'] = copy.copy(pop)
    pipe_results[dic_key]['sel']['hof'] = copy.copy(hof[::-1])
    pipe_results[dic_key]['sel']['pf'] = copy.copy(pf[::-1])
    pipe_results[dic_key]['sel']['log'] = copy.copy(log)
Ejemplo n.º 8
0
assert os.path.isfile(electro_path) == True
with open(electro_path, 'rb') as f:
    electro_tests = pickle.load(f)

MU = 6
NGEN = 6
CXPB = 0.9
USE_CACHED_GA = False

#provided_keys = list(model_params.keys())
USE_CACHED_GS = False
from bluepyopt.deapext.optimisations import DEAPOptimisation
npoints = 2
nparams = 10

from dask import distributed
test = electro_tests[0][0]

#for test, observation in electro_tests:
DO = DEAPOptimisation(error_criterion=test,
                      selection='selIBEA',
                      nparams=10,
                      provided_dict=model_params)
#DO = DEAPOptimisation(error_criterion = test, selection = 'selIBEA', backend = 'glif')
#DO.setnparams(nparams = nparams, provided_keys = provided_keys)
pop, hof_py, log, history, td_py, gen_vs_hof = DO.run(
    offspring_size=MU,
    max_ngen=NGEN,
    cp_frequency=0,
    cp_filename='ga_dumpnifext_50.p')
Ejemplo n.º 9
0
# If any time is left over, may as well compute a more accurate grid, to better quantify GA performance in the future.

# In[ ]:

# In[ ]:

from neuronunit.optimization import get_neab
#fi_basket = {'nlex_id':'NLXCELL:100201'}
neuron = {'nlex_id': 'nifext_50'}

error_criterion, inh_observations = get_neab.get_neuron_criteria(fi_basket)
print(error_criterion)

from bluepyopt.deapext.optimisations import DEAPOptimisation

DO = DEAPOptimisation(error_criterion=error_criterion)
DO.setnparams(nparams=nparams, provided_keys=provided_keys)
pop, hof, log, history, td, gen_vs_hof = DO.run(offspring_size=MU,
                                                max_ngen=NGEN,
                                                cp_frequency=4,
                                                cp_filename='checkpointedGA.p')
with open('ga_dump.p', 'wb') as f:
    pickle.dump([pop, log, history, hof, td], f)

# In[ ]:

# Layer V pyramidal cell

# In[ ]:

# In[ ]:
Ejemplo n.º 10
0
        if value != pipe_new[key]:
            bool = True
        print(value, pipe_new[key])

    return bool


start_time = timeit.default_timer()
# code you want to evaluate

for test, observation in electro_tests:
    dic_key = str(list(pipe[cnt].values())[0])
    init_time = timeit.default_timer()
    #print(cnt,len(electro_tests))
    DO = DEAPOptimisation(error_criterion=test,
                          selection='selIBEA',
                          provided_dict=model_params)
    package = DO.run(offspring_size=MU,
                     max_ngen=NGEN,
                     cp_frequency=4,
                     cp_filename=str(dic_key) + '.p')
    pop, hof, log, history, td_py, gen_vs_hof = package
    finished_time = timeit.default_timer()
    pipe_results[dic_key] = {}
    pipe_results[dic_key]['duration'] = finished_time - init_time
    pipe_results[dic_key]['pop'] = copy.copy(pop)

    pipe_results[dic_key]['hof'] = copy.copy(hof)
    pipe_results[dic_key]['log'] = copy.copy(log)
    pipe_results[dic_key]['history'] = copy.copy(history)
    pipe_results[dic_key]['td_py'] = copy.copy(td_py)
Ejemplo n.º 11
0
CXPB = 0.9

USE_CACHED_GA = False
provided_keys = list(model_params.keys())

npoints = 2
nparams = 10

electro_tests = pre_process(electro_tests)

cnt = 0
pipe_results = {}
for test, observation in electro_tests:
    dic_key = str(list(pipe[cnt].values())[0])
    DO = DEAPOptimisation(error_criterion=test,
                          selection='selIBEA',
                          provided_dict=model_params)
    pop, hof_py, log, history, td_py, gen_vs_hof = DO.run(
        offspring_size=MU,
        max_ngen=NGEN,
        cp_frequency=1,
        cp_filename=str(dic_key) + '.p')
    #with open(str(dic_key)+'.p','rb') as f:
    #    check_point = pickle.load(f)

    pipe_results[dic_key] = {}
    pipe_results[dic_key]['pop'] = pop  # check_point['population']
    pipe_results[dic_key]['hof_py'] = hof_py
    pipe_results[dic_key]['log'] = log  # check_point['logbook']
    pipe_results[dic_key]['history'] = history  #check_point['history']
    pipe_results[dic_key]['td_py'] = td_py