Esempio n. 1
0
    def test_grid_dimensions(self):
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        USE_CACHED_GS = False
        from neuronunit.optimization import exhaustive_search
        from neuronunit.optimization.optimization_management import map_wrapper
        import dask.bag as db
        npoints = 2
        nparams = 3
        for i in range(1,10):
            for j in range(1,10):
                grid_points = exhaustive_search.create_grid(npoints = i, nparams = j)
                b0 = db.from_sequence(grid_points[0:2], npartitions=8)
                dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
                self.assertEqual(i*j,len(dtcpop))
                self.assertNotEqual(dtcpop,None)
                dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,grid_points[0:2])
                self.assertNotEqual(dtcpop_compare,None)
                self.assertEqual(len(dtcpop_compare),len(dtcpop))
                for i,j in enumerate(dtcpop):
                    for k,v in dtcpop_compare[i].attrs.items():
                        print(k,v,i,j)
                        self.assertEqual(j.attrs[k],v)

        return True
    def test_grid_dimensions(self):
        from neuronunit.optimization.model_parameters import model_params
        provided_keys = list(model_params.keys())
        USE_CACHED_GS = False
        from neuronunit.optimization import exhaustive_search
        from neuronunit.optimization.optimization_management import map_wrapper
        import dask.bag as db
        npoints = 2
        nparams = 3
        for i in range(1, 10):
            for j in range(1, 10):
                grid_points = exhaustive_search.create_grid(npoints=i,
                                                            nparams=j)
                b0 = db.from_sequence(grid_points[0:2], npartitions=8)
                dtcpop = list(
                    db.map(exhaustive_search.update_dtc_grid, b0).compute())
                self.assertEqual(i * j, len(dtcpop))
                self.assertNotEqual(dtcpop, None)
                dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,
                                             grid_points[0:2])
                self.assertNotEqual(dtcpop_compare, None)
                self.assertEqual(len(dtcpop_compare), len(dtcpop))
                for i, j in enumerate(dtcpop):
                    for k, v in dtcpop_compare[i].attrs.items():
                        print(k, v, i, j)
                        self.assertEqual(j.attrs[k], v)

        return True
Esempio n. 3
0
def run_ga(model_params, nparams):
    grid_points = exhaustive_search.create_grid(npoints=1, nparams=nparams)
    td = list(grid_points[0].keys())
    subset = reduce_params(model_params, nparams)
    DO = DEAPOptimisation(error_criterion=electro_tests[0][0],
                          selection=str('selNSGA'),
                          provided_dict=subset,
                          elite_size=3)
    MU = int(np.floor(npoints / 2.0))
    max_ngen = int(np.floor(nparams / 2.0))
    # make initial samples uniform on grid points
    # 3
    # 3 minimum number of points to define this hypercube.
    # Create a lattice, using the exhaustive

    # make a movie,
    assert (MU * max_ngen) < (npoints * nparams)
    ga_out = DO.run(offspring_size=MU,
                    max_ngen=6,
                    cp_frequency=1,
                    cp_filename=str('regular.p'))
    # pop, hof_py, pf, log, history, td_py, gen_vs_hof = ga_out
    with open('all_ga_cell.p', 'wb') as f:
        pickle.dump(ga_out, f)
    return ga_out
Esempio n. 4
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints = npoints,nparams = nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
    assert dtcpop is not None
    return dtcpop
Esempio n. 5
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints=npoints,
                                                nparams=nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid, b0).compute())
    assert dtcpop is not None
    return dtcpop
Esempio n. 6
0
 def test_map_wrapper(self):
     npoints = 2
     nparams = 3
     from neuronunit.optimization.model_parameters import model_params
     provided_keys = list(model_params.keys())
     USE_CACHED_GS = False
     from neuronunit.optimization import exhaustive_search
     from neuronunit.optimization.optimization_management import map_wrapper
     grid_points = exhaustive_search.create_grid(npoints = npoints,nparams = nparams)
     b0 = db.from_sequence(grid_points[0:2], npartitions=8)
     dtcpop = list(db.map(exhaustive_search.update_dtc_grid,b0).compute())
     assert dtcpop is not None
     dtcpop_compare = map_wrapper(exhaustive_search.update_dtc_grid,grid_points[0:2])
     for i,j in enumerate(dtcpop):
         for k,v in dtcpop_compare[i].attrs.items():
             print(k,v,i,j)
             self.assertEqual(j.attrs[k],v)
     return True
Esempio n. 7
0
def grid_points():
    npoints = 2
    nparams = 10
    from neuronunit.optimization.model_parameters import model_params
    provided_keys = list(model_params.keys())
    USE_CACHED_GS = False
    electro_path = 'pipe_tests.p'
    import pickle
    assert os.path.isfile(electro_path) == True
    with open(electro_path, 'rb') as f:
        electro_tests = pickle.load(f)
    from neuronunit.optimization import exhaustive_search
    grid_points = exhaustive_search.create_grid(npoints=npoints,
                                                nparams=nparams)
    import dask.bag as db
    b0 = db.from_sequence(grid_points[0:2], npartitions=8)
    dtcpop = list(db.map(exhaustive_search.update_dtc_grid, b0).compute())
    assert dtcpop is not None
    return dtcpop
Esempio n. 8
0
    def grid_sample_init(self, nparams):
        from neuronunit.optimization import exhaustive_search as es
        npoints = self.offspring_size**(1.0 / len(list(self.params)))
        npoints = np.ceil(npoints)
        nparams = len(self.params)
        provided_keys = list(self.params.keys())
        dic_grid, _ = es.create_grid(npoints=npoints,
                                     provided_keys=self.params)
        delta = int(np.abs(len(dic_grid) - (npoints**len(list(self.params)))))
        pop = []

        for dg in dic_grid:
            temp = list(dg.values())
            pop.append(temp)

        for d in range(0, delta):
            impute = []
            for i in range(0, len(pop[0])):
                impute.append(np.mean([p[i] for p in pop]))
            pop.append(impute)
        print(len(pop), npoints**len(list(self.params)))
        assert len(pop) == int(npoints**len(list(self.params)))
        return pop
Esempio n. 9
0
def build_chunk_grid(npoints, nparams):
    grid_points = exhaustive_search.create_grid(npoints=npoints,
                                                nparams=nparams)
    tds = [list(g.keys()) for g in grid_points]
    td = tds[0]

    pops = []
    for g in grid_points:
        pre_pop = list(g.values())
        pops.extend(pre_pop)
        pop = WSListIndividual(pops)

    # divide population into chunks that reflect the number of CPUs.
    if len(pops) % npartitions != 1:
        pops_ = chunks(pops, npartitions)
    else:
        pops_ = chunks(pops, npartitions - 2)
    try:
        assert pops_[0] != pops_[1]
    except:
        import pdb
        pdb.set_trace()
    return pops_, td
Esempio n. 10
0
ca1_pyr = {'nlex_id': '830368389'}

pipe = [fi_basket, pvis_cortex, olf_mitral, ca1_pyr, purkinje]


class WSListIndividual(list):
    """Individual consisting of list with weighted sum field"""
    def __init__(self, *args, **kwargs):
        """Constructor"""
        self.rheobase = None
        super(WSListIndividual, self).__init__(*args, **kwargs)


nparams = 2

grid_points = exhaustive_search.create_grid(npoints=5, nparams=nparams)
tds = [list(g.keys()) for g in grid_points]
td = tds[0]

pops = []
for g in grid_points:
    pre_pop = list(g.values())
    pop = [WSListIndividual(pre_pop)]
    pops.extend(pop)


def chunks(l, n):
    # For item i in a range that is a length of l,
    ch = []
    for i in range(0, len(l), n):
        # Create an index range for l of n items:
def make_report(grid_results, ga_out, nparams, pop = None):
    from neuronunit.optimization.exhaustive_search import create_grid
    grid_points = create_grid(npoints = 2,nparams = nparams)
    td = list(grid_points[0][0].keys())

    reports = {}
    reports[nparams] = {}

    mini = min_max(grid_results)[0][1]
    maxi = min_max(grid_results)[1][1]
    if type(pop) is not type(None):
        miniga = min_max(pop)[0][1]
    else:
        miniga = min_max(ga_out)[0][1]

    reports[nparams]['miniga'] = miniga
    reports[nparams]['minigrid'] = mini
    quantize_distance = list(np.linspace(mini,maxi,21))
    success = bool(miniga < quantize_distance[2])
    better = bool(miniga < quantize_distance[0])

    print('Report: ')
    print('did it work? {0} was it better {1}'.format(success,better))


    reports[nparams]['success'] = success
    reports[nparams]['better'] = better
    dtc_ga = min_max(ga_out)[0][0]
    attrs_grid = min_max(grid_results)[0][0]
    attrs_ga = min_max(ga_out)[0][0]
    reports[nparams]['attrs_ga'] = attrs_ga
    reports[nparams]['attrs_grid'] = attrs_grid



    reports[nparams]['p_dist'] = param_distance(attrs_ga,attrs_grid,td)
    ##
    # mistake here
    ##
    dtc_grid = dtc_ga = min_max(ga_out)[0][2]
    dom_grid, dom_ga = error_domination(dtc_ga,dtc_grid)
    reports[nparams]['vind_domination'] = False
    # Was there vindicating domination in grid search but not GA?
    if dom_grid == True and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    elif dom_grid == False and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    # Was there incriminating domination in GA but not the grid, or in GA and Grid
    elif dom_grid == True and dom_ga == True:
        reports[nparams]['inc_domination'] = False
    elif dom_grid == False and dom_ga == True:
        reports[nparams]['inc_domination'] = False


    #reports[nparams]['success'] = bool(miniga < quantize_distance[2])
    dtc_ga = min_max(ga_out)[0][0]
    attrs_grid = min_max(grid_results)[0][0]
    attrs_ga = min_max(ga_out)[0][0]

    grid_points = create_grid(npoints = 1,nparams = nparams)#td = list(grid_points[0].keys())
    td = list(grid_points[0][0].keys())

    reports[nparams]['p_dist'] = param_distance(attrs_ga,attrs_grid,td)
    dtc_grid = dtc_ga = min_max(ga_out)[0][2]
    dom_grid, dom_ga = error_domination(dtc_ga,dtc_grid)
    reports[nparams]['vind_domination'] = False
    # Was there vindicating domination in grid search but not GA?
    if dom_grid == True and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    elif dom_grid == False and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    # Was there incriminating domination in GA but not the grid, or in GA and Grid
    elif dom_grid == True and dom_ga == True:
        reports[nparams]['inc_domination'] = False
    elif dom_grid == False and dom_ga == True:
        reports[nparams]['inc_domination'] = False


    with open('reports.p','wb') as f:
        pickle.dump(reports,f)
    return reports
Esempio n. 12
0
    pdb.set_trace()
    ga_out = run_ga(model_params, nparams)
    miniga = min_max(ga_out[0])[0][1]
    mini = min_max(grid_results)[0][1]
    maxi = min_max(grid_results)[1][1]
    quantize_distance = list(np.linspace(mini, maxi, 21))
    worked = bool(miniga < quantize_distance[2])
    print('Report: ')
    print('did it work? {0}'.format(worked))
    reports[nparams] = {}
    reports[nparams]['success'] = bool(miniga < quantize_distance[2])
    dtc_ga = min_max(ga_out[0])[0][0]
    attrs_grid = min_max(grid_results)[0][0]
    attrs_ga = min_max(ga_out[0])[0][0]

    grid_points = exhaustive_search.create_grid(
        npoints=1, nparams=nparams)  #td = list(grid_points[0].keys())
    td = list(grid_points[0].keys())

    reports[nparams]['p_dist'] = param_distance(attrs_ga, attrs_grid, td)
    dtc_grid = dtc_ga = min_max(ga_out[0])[0][2]
    dom_grid, dom_ga = error_domination(dtc_ga, dtc_grid)

    # Was there vindicating domination in grid search but not GA?
    if dom_grid == True and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    elif dom_grid == False and dom_ga == False:
        reports[nparams]['vind_domination'] = True
    # Was there incriminating domination in GA but not the grid, or in GA and Grid
    elif dom_grid == True and dom_ga == True:
        reports[nparams]['inc_domination'] = False
    elif dom_grid == False and dom_ga == True: