def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1, 10):
         for j in range(1, 10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams=i, provided_keys=provided_keys)
 def test_set_model_attrs(self):
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     from bluepyopt.deapext.optimisations import DEAPOptimisation
     DO = DEAPOptimisation()
     for i in range(1,10):
         for j in range(1,10):
             provided_keys = list(model_params.keys())[j]
             DO.setnparams(nparams = i, provided_keys = provided_keys)
    def test_grid_dimensions(self):
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        USE_CACHED_GS = False
        from neuronunit.optimization import exhaustive_search
        from neuronunit.optimization.optimization_management import map_wrapper
        import dask.bag as db
        npoints = 2
        nparams = 3
        for i in range(1, 10):
            for j in range(1, 10):
                grid_points = exhaustive_search.create_grid(npoints=i,
                                                            nparams=j)
                b0 = db.from_sequence(grid_points[0:2], npartitions=8)
                dtcpop = list(
                    db.map(exhaustive_search.update_dtc_grid, b0).compute())
                self.assertEqual(i * j, len(dtcpop))
                self.assertNotEqual(dtcpop, None)
                dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,
                                             grid_points[0:2])
                self.assertNotEqual(dtcpop_compare, None)
                self.assertEqual(len(dtcpop_compare), len(dtcpop))
                for i, j in enumerate(dtcpop):
                    for k, v in dtcpop_compare[i].attrs.items():
                        print(k, v, i, j)
                        self.assertEqual(j.attrs[k], v)

        return True
    def test_grid_dimensions(self):
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        USE_CACHED_GS = False
        from neuronunit.optimization import exhaustive_search
        from neuronunit.optimization.optimization_management import map_wrapper
        import dask.bag as db
        npoints = 2
        nparams = 3
        for i in range(1,10):
            for j in range(1,10):
                grid_points = exhaustive_search.create_grid(npoints = i, nparams = j)
                b0 = db.from_sequence(grid_points[0:2], npartitions=8)
                dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
                self.assertEqual(i*j,len(dtcpop))
                self.assertNotEqual(dtcpop,None)
                dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,grid_points[0:2])
                self.assertNotEqual(dtcpop_compare,None)
                self.assertEqual(len(dtcpop_compare),len(dtcpop))
                for i,j in enumerate(dtcpop):
                    for k,v in dtcpop_compare[i].attrs.items():
                        print(k,v,i,j)
                        self.assertEqual(j.attrs[k],v)

        return True
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams=nparams, provided_keys=provided_keys)
        list_check_increase = []
        for NGEN in range(1, 30):
            for MU in range(5, 40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(
                    offspring_size=MU,
                    max_ngen=NGEN,
                    cp_frequency=4,
                    cp_filename='checkpointedGA.p')
                avgf = np.mean([h.fitness for h in hof])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase), old_avg)
                    self.assertGreater(avgf, old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([h.fitness for h in hof])
                    old = avgf
                    old_avg = np.mean(list_check_increase)

                cnt += 1
                print(old, cnt)
    def test_opt_set_GA_params(self):
        '''
        Test to check if making the population size bigger, or increasing the run_number
        of generations the GA consumes actually increases the fitness.

        Note only test for better fitness every 3rd count, because GAs are stochastic,
        and by chance occassionally fitness may not increase, just because the GA has
        more genes to sample with.

        A fairer test of the GA would probably test if the mean of mean fitness is improving
        or use a sliding window.
        '''
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        from bluepyopt.deapext.optimisations import DEAPOptimisation
        DO = DEAPOptimisation()
        nparams = 5
        cnt = 0
        provided_keys = list(model_params.keys())[nparams]
        DO.setnparams(nparams = nparams, provided_keys = provided_keys)
        list_check_increase = []
        for NGEN in range(1,30):
            for MU in range(5,40):
                pop, hof, log, history, td, gen_vs_hof = DO.run(offspring_size = MU, max_ngen = NGEN, cp_frequency=4,cp_filename='checkpointedGA.p')
                avgf = np.mean([ h.fitness for h in hof ])
                list_check_increase.append(avgf)

                if cnt % 3 == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old_avg = np.mean(list_check_increase)
                    self.assertGreater(np.mean(list_check_increase),old_avg)
                    self.assertGreater(avgf,old)

                    old = avgf
                elif cnt == 0:
                    avgf = np.mean([ h.fitness for h in hof ])
                    old = avgf
                    old_avg = np.mean(list_check_increase)



                cnt += 1
                print(old,cnt)
Exemple #7
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints = npoints,nparams = nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
    assert dtcpop is not None
    return dtcpop
Exemple #8
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints=npoints,
                                                nparams=nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid, b0).compute())
    assert dtcpop is not None
    return dtcpop
Exemple #9
0
 def test_map_wrapper(self):
     npoints = 2
     nparams = 3
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     USE_CACHED_GS = False
     from neuronunit.optimization import exhaustive_search
     from neuronunit.optimization.optimization_management import map_wrapper
     grid_points = exhaustive_search.create_grid(npoints = npoints,nparams = nparams)
     b0 = db.from_sequence(grid_points[0:2], npartitions=8)
     dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
     assert dtcpop is not None
     dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,grid_points[0:2])
     for i,j in enumerate(dtcpop):
         for k,v in dtcpop_compare[i].attrs.items():
             print(k,v,i,j)
             self.assertEqual(j.attrs[k],v)
     return True
Exemple #10
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    electro_path = 'pipe_tests.p'
    import pickle
    assert os.path.isfile(electro_path) == True
    with open(electro_path, 'rb') as f:
        electro_tests = pickle.load(f)
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints=npoints,
                                                nparams=nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid, b0).compute())
    assert dtcpop is not None
    return dtcpop
Exemple #11
0
def reduce_params(model_params, nparams):
    key_list = list(model_params.keys())
    reduced_key_list = key_list[0:nparams]
    subset = {k: model_params[k] for k in reduced_key_list}
    return subset
Exemple #12
0
    with open('grid_cell_results.p', 'wb') as f:
        pickle.dump(results, f)
    with open('iterator_state.p', 'wb') as f:
        pickle.dump([sub_pop, test, observation, cnt], f)
    cnt += 1
    print('done cell: ', cnt)
print('done all')

from neuronunit.optimization import get_neab
from neuronunit.optimization.model_parameters import model_params
from bluepyopt.deapext.optimisations import DEAPOptimisation
from neuronunit.optimization.optimization_management import write_opt_to_nml
from neuronunit.optimization import optimization_management
from neuronunit.optimization import optimization_management as om

key_list = list(model_params.keys())
reduced_key_list = key_list[0:nparams]
subset = {k: smaller[k] for k in reduced_key_list}
DO = DEAPOptimisation(error_criterion=test,
                      selection=str('selIBEA'),
                      provided_dict=model_params,
                      elite_size=3)
package = DO.run(offspring_size=MU,
                 max_ngen=6,
                 cp_frequency=1,
                 cp_filename=str(dic_key) + '.p')
pop, hof_py, pf, log, history, td_py, gen_vs_hof = package

with open('all_ga_cell.p', 'wb') as f:
    pickle.dump(package, f)
    pickle.dump(pipe,handle)
contents = pickle.load(open('ne_neuron_criteria.p','rb'))
pvis_criterion, inh_criterion = contents
electro_tests = []
contents[0][0].observation
for p in pipe:
   p_tests, p_observations = get_neab.get_neuron_criteria(p)
   electro_tests.append((p_tests, p_observations))
with open('pipe_tests.p','wb') as f:
   pickle.dump(electro_tests,f)
'''
MU = 6; NGEN = 6; CXPB = 0.9
USE_CACHED_GA = False
print(get_neab)
from neuronunit.optimization.model_parameters import model_params
provided_keys = list(model_params.keys())
USE_CACHED_GS = False
from bluepyopt.deapext.optimisations import DEAPOptimisation
npoints = 2
nparams = 10

from dask import distributed
#c = distributed.Client()
#NCORES = len(c.ncores().values())-2
for test, observation in electro_tests:
    DO = DEAPOptimisation(error_criterion = test, selection = 'selIBEA')
    DO.setnparams(nparams = nparams, provided_keys = provided_keys)
    pop, hof_py, log, history, td_py, gen_vs_hof = DO.run(offspring_size = MU, max_ngen = NGEN, cp_frequency=0,cp_filename='ga_dumpnifext_50.p')

#with open('ga_dump_NLXCELL:100201.p','wb') as f:
#   pickle.dump([pop, log, history, hof_py, td_py],f)