def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1, 10):
         for j in range(1, 10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams=i, provided_keys=provided_keys)
 def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1,10):
         for j in range(1,10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams = i, provided_keys = provided_keys)
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams=nparams, provided_keys=provided_keys)
        list_check_increase = []
        for NGEN in range(1, 30):
            for MU in range(5, 40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(
                    offspring_size=MU,
                    max_ngen=NGEN,
                    cp_frequency=4,
                    cp_filename='checkpointedGA.p')
                avgf = np.mean([h.fitness for h in hof])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase), old_avg)
                    self.assertGreater(avgf, old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old = avgf
                    old_avg = np.mean(list_check_increase)

                cnt += 1
                print(old, cnt)
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams = nparams, provided_keys = provided_keys)
        list_check_increase = []
        for NGEN in range(1,30):
            for MU in range(5,40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(offspring_size = MU, max_ngen = NGEN, cp_frequency=4,cp_filename='checkpointedGA.p')
                avgf = np.mean([ h.fitness for h in hof ])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase),old_avg)
                    self.assertGreater(avgf,old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old = avgf
                    old_avg = np.mean(list_check_increase)



                cnt += 1
                print(old,cnt)
Beispiel #5
0
# In[ ]:

# In[ ]:

from neuronunit.optimization import get_neab
#fi_basket = {'nlex_id':'NLXCELL:100201'}
neuron = {'nlex_id': 'nifext_50'}

error_criterion, inh_observations = get_neab.get_neuron_criteria(fi_basket)
print(error_criterion)

from bluepyopt.deapext.optimisations import DEAPOptimisation

DO = DEAPOptimisation(error_criterion=error_criterion)
DO.setnparams(nparams=nparams, provided_keys=provided_keys)
pop, hof, log, history, td, gen_vs_hof = DO.run(offspring_size=MU,
                                                max_ngen=NGEN,
                                                cp_frequency=4,
                                                cp_filename='checkpointedGA.p')
with open('ga_dump.p', 'wb') as f:
    pickle.dump([pop, log, history, hof, td], f)

# In[ ]:

# Layer V pyramidal cell

# In[ ]:

# In[ ]: