Ejemplo n.º 1
0
def fitness_from_points(points):
    '''Takes some points, builds a map and returns the fitness value
    '''
    mymap, _ = create_map(mp.feature_resolution, mp.domain)
    mymap, _ = niche_compete(points, mymap, mp.domain)
    fitness = np.nansum(mymap.fitness.flatten())
    return (fitness)
Ejemplo n.º 2
0
def dotrue(file,n):
    points = getdata(file)[:n]
    mp.map = create_map(mp.feature_resolution, mp.domain) 
    for point in points:
        mp.map.updatemap( [point] )
    print(np.nansum(mp.map.fitness.flatten()))
    return(np.nansum(mp.map.fitness.flatten()))
Ejemplo n.º 3
0
def pred_true_map(pred_map):
    truemap,_ = create_map( mp.feature_resolution , mp.domain )
    rows = np.linspace(0,len(pred_map[0])-1,len(pred_map[0]))
    cols = np.linspace(0,len(pred_map)-1,len(pred_map))
    indexes = list(it.product(rows, cols))
    for i in indexes:
        j = tuple([int(a) for a in i])
        truemap.fitness[j] = mp.fitness_fun(pred_map[j])
        truemap.genomes[j] = pred_map[j]
    return(truemap)
Ejemplo n.º 4
0
def evalgenomes(genomes):
    truemap, _ = create_map(mp.feature_resolution, mp.domain)
    rows = np.linspace(0, 24, 25)
    cols = np.linspace(0, 24, 25)
    indexes = list(it.product(rows, cols))
    for count, g in enumerate(genomes):
        j = tuple([int(a) for a in indexes[count]])
        truemap.fitness[j] = mp.fitness_fun(g)
        truemap.genomes[j] = g
    return (truemap)
Ejemplo n.º 5
0
def pred_true_map(pred_map):
    '''Takes a prediction map as input and outputs a map with those points and 
    their true fitness values
    '''
    truemap = create_map(mp.feature_resolution, mp.domain)
    rows = np.linspace(0, len(pred_map[0]) - 1, len(pred_map[0]))
    cols = np.linspace(0, len(pred_map) - 1, len(pred_map))
    indexes = list(it.product(rows, cols))
    for i in indexes:
        j = tuple([int(a) for a in i])
        truemap.fitness[j] = mp.fitness_fun(list(pred_map[j]))
        truemap.genomes[j] = pred_map[j]
    return (truemap)
Ejemplo n.º 6
0
def create_prediction_map(
    model,
    observed,
    UCB=False,
    means=True,
    bop=False,
    *args,
):
    '''create_prediction_map creates a prediction map by using the map made 
    from all the current best points and then using map-elites to illuminate it
    using the current acquisition function.

    Example: prediction_map = create_prediction_map( gp , points)

    Input:
        model    - [ GPy model ]                - the current posterior GP
        observed - [ n*[ [ x ],[ y ],[ f ] ] ]  - all evaluated points
        *args    - extra args

    Output:
    prediction_map  -   [ Map Class ]   - The best map with current data.

    Code Author: Paul Kent 
    Warwick University
    email: [email protected]
    Oct 2020; Last revision: 14-Oct-2020 
    '''
    #Make acquisition
    acq_fun = build_pytorch_acq_fun(model, UCBflag=UCB, meansflag=means)
    #seed map with precise evaluations
    prediction_map = create_map(mp.feature_resolution, mp.domain)
    prediction_map.initialisemap(observed)
    if bop:
        prediction_map.initialisemeans(observed)
        prediction_map.estimatemeans()
        #prediction_map.generate_niche_models(model)
        prediction_map.calcstdmeans(observed,
                                    model.mean_module.constant.item())
        acq_fun = build_bop_pred_acq_fun(model,
                                         prediction_map,
                                         UCBflag=UCB,
                                         meansflag=means)
    prediction_map = map_elites(mp.domain,
                                init_map=prediction_map,
                                feat_fun=mp.feature_fun,
                                fit_fun=acq_fun,
                                plot=False,
                                me_params=mp.PM_params)
    return (prediction_map)
Ejemplo n.º 7
0
def create_prediction_map(
    model,
    observed,
    UCB=False,
    means=True,
    *args,
):
    '''create_prediction_map creates a prediction map by using the map made 
    from all the current best points and then using map-elites to illuminate it
    using the current acquisition function.

    Example: prediction_map = create_prediction_map( gp , points)

    Input:
        model    - [ GPy model ]                - the current posterior GP
        observed - [ n*[ [ x ],[ y ],[ f ] ] ]  - all evaluated points
        *args    - extra args

    Output:
    prediction_map  -   [ Map Class ]   - The best map with current data.

    Code Author: Paul Kent 
    Warwick University
    email: [email protected]
    Oct 2020; Last revision: 14-Oct-2020 
    '''
    fdims = len(mp.feature_resolution)
    mins = mp.domain.feat_mins
    maxs = mp.domain.feat_maxs
    xdims = len(mp.example)
    #Make acquisition
    acq_fun = build_pytorch_acq_fun(model, UCBflag=UCB, meansflag=means)
    #seed map with precise evaluations
    prediction_map, _ = create_map(mp.feature_resolution, mp.domain)
    prediction_map, _ = niche_compete(points=observed,
                                      map=prediction_map,
                                      domain=mp.domain)
    prediction_map = map_elites(mp.domain,
                                init_map=prediction_map,
                                feat_fun=mp.feature_fun,
                                fit_fun=acq_fun,
                                plot=False,
                                me_params=mp.PM_params)
    return (prediction_map)
Ejemplo n.º 8
0
def evaluate_points(points):
    truemap , _ = create_map( mp.feature_resolution , mp.domain )
    truemap , _ = niche_compete(points,truemap,mp.domain)
    fitness = np.nansum(truemap.fitness.flatten())
    print(fitness)
    return(truemap , fitness)
Ejemplo n.º 9
0
#     sobolmap,fitness = getpredmap(initial_samples)
#     sobol_fit_list.append(fitness)
    
     
#     pickle.dump(sobol_fit_list, filehandler)


####
### Random Samples

def random_point(n, dims):
    x=[]
    for i in range(n):
        x.append(np.random.uniform(0,1,dims))
    return(x)

random_fit_list = []

for i in range(1):
    ss.initseed(i)
    x = random_point(10,10)
    rand_points = [[x[i],mp.fitness_fun(x[i]),mp.feature_fun(x[i])] for i in range(len(x))]
    for i in range(len(rand_points)):
        rand_points[i][1] = np.nansum(rand_points[i][1])
    filehandler = open('Results/Init-Test/Random/fitness.csv', 'wb')
    mp.map , edges = create_map(mp.feature_resolution, mp.domain )
    initialisemap(rand_points)
    randommap,fitness = getpredmap(rand_points)
    random_fit_list.append(fitness)

    pickle.dump(sobol_fit_list, filehandler)
Ejemplo n.º 10
0
def map_elites(domain,
               init_map=None,
               fit_fun=mp.domain.fit_fun,
               feat_fun=mp.domain.feat_fun,
               experiment=False,
               number_evals=mp.n_gens,
               plot=False,
               verbose=False):

    num_cores = multiprocessing.cpu_count()
    pool = multiprocessing.Pool(num_cores)

    def sample(n,
               sample_map=None,
               init=False,
               fitness_fun=mp.domain.fit_fun,
               feature_fun=mp.domain.feat_fun,
               plot=True,
               sample_pool=pool):
        '''Sample performs the calls to the objective and feature functions and
        returns them in the correct format for Map-Elites 
        '''
        if sample_map == None:
            sample_map = mp.map
        population = np.array(
            create_children(batchsize=10**4,
                            map=sample_map,
                            initialise=init,
                            plot=plot))
        fitness = np.array(parallel_eval(fitness_fun, population, sample_pool))
        #print(fitness)
        behaviour = []
        # for pop in population:
        #     behaviour.append(eval( 'feature_fun' )( pop ))
        behaviour = np.array(
            parallel_eval(feature_fun, population, sample_pool))
        # behaviour = np.array( behaviour )
        sampled_points = [[population[i], fitness[i], behaviour[i]]
                          for i in range(n)]
        return (sampled_points)


#######################################################################
################# Initialising ########################################
#######################################################################

    if init_map == None:
        experiment = True
        if mp.map is None:
            mp.map, edges = create_map(mp.feature_resolution, mp.example)

    mymap = init_map

    init_n = mp.n_children  # init_n : initial population size

    sampled_points = sample(n=init_n,
                            sample_map=mymap,
                            init=True,
                            fitness_fun=fit_fun,
                            plot=plot,
                            sample_pool=pool)

    mymap, improvement_percentage = niche_compete(sampled_points, mymap)

    #######################################################################
    ################### Map-Elites #######################################
    #######################################################################
    generation = 1
    terminate = False
    popsize = mp.n_children

    while terminate != True:
        if experiment:
            mymap = None
        new_samples = sample(n=popsize,
                             sample_map=mymap,
                             fitness_fun=fit_fun,
                             plot=plot)

        sampled_points.extend(new_samples)
        mymap, improvement_percentage = niche_compete(points=new_samples,
                                                      map=mymap)
        if verbose:
            print('generation ', generation, ' accepted ',
                  improvement_percentage, ' % of points')
        generation += 1
        if generation > number_evals:
            terminate = True

    sampled = [s[0] for s in sampled_points]
    # x1 = [x[0] for x in sampled]
    # x2 = [x[1] for x in sampled]
    # plt.scatter(x1,x2)
    # plt.show()

    if experiment:
        return ()

    return (mymap)
Ejemplo n.º 11
0
def bop_elites(max_evals=me,
               unknown_features=False,
               preload=None,
               n_initial_samples=20):
    '''bop_elites (THIS IS V0.9 RUNNING ON MULTIDIMENSIONAL DETERMINISTIC FEATURES )

    bop-elites performs Bayesian Optimisation of Phenotypic Elites by means of the 
    ensemble expected improvement algorithm. On domains where the features are 
    deterministic and calculable this simplifies to the classic expected improvement
    algorithm.

    Example: output_predictions = bop_elites( max_evals = 500 )  

    Inputs (arguments): 
        max_evals           - [ Integer ] - Evaluation budget 
        unknown_features    - [ Boolean ] - Flag to activate Ensemble EI
        preload             - [ Array   ] - Preloaded evaluations
        n_initial_samples   - [ Integer ] - Number of samples to initialise

    Inputs (from config file): 
        mp.domain           - [ Domain Class ]   - Domain specific information
        mp.map              - [ Map Class ]      - Global map (for map-elites)
        mp                  - [ Script ]         - Contains many variables

    Outputs
        map                 - [ Map Class ]      - Final optimised map
        objvallist          - [ List ]           - Objective values of points in map

    Code Author: Paul Kent 
    Warwick University
    email: [email protected]
    Oct 2020; Last revision: 20-01-2021
    '''

    ss.myseed = seed
    ss.initseed(ss.myseed)
    print('starting experiment with seed: ', seed, ' niche_size: ', niche_size)
    ## Initialise main map
    mp.feature_resolution = [niche_size, niche_size]
    mp.map = create_map(mp.feature_resolution, mp.domain, GMLM=GMLM)

    ## Set up for Data-Collection
    DC_training_times = []
    DC_niches_filled = []

    ## Create a Readme file and the directory for storage
    mydir, logger = readme(domain_flag, n_initial_samples, max_evals, seed,
                           mp.penalty, info, optn)
    printinfo('intro',
              max_evals,
              n_initial_samples,
              domain_flag,
              seed,
              logger=logger)  ## pretty print to terminal

    ## Set the Fitness and feature functions from config file
    ofun = mp.fitness_fun
    ffun = mp.feature_fun

    #### Initial sampling
    if preload == None:
        observations = initial_sampling(n_initial_samples)
        printinfo(type='sampling', logger=logger)
    else:
        directory = preload  #Must be the parent directory
        pointfile = open(directory + 'points.b', 'rb')
        data = pickle.load(pointfile)
        observations = data
        nanfile = open(directory + 'nanpoints.b', 'rb')
        nandata = pickle.load(nanfile)
        mp.map.nanmap = nandata
        print(str(len(observations)) + ' samples loaded')
        n_initial_samples = len(observations)

    ## Set counters
    n_samples = n_initial_samples
    itercount = 1
    mp.map.initialisemap(observations)  # Fit initial samples into map
    mp.map.initialisemeans(observations)
    mp.map.estimatemeans()
    mp.nancounter = 0
    mp.observations = observations

    #mp.map.updatemeans(observations[-1])

    DC_niches_filled.append(count_niches_filled())  #Keep track of niches

    ## Build GP for fitness
    printinfo('train_gp', n_samples, logger=logger)
    retrain = True  #Initial value to train new GP hyperparameters
    omodel, training_time, hypers = buildpymodel2(observations,
                                                  n_initial_samples)

    tic = time.time()
    #mp.map.generate_niche_models(omodel)
    print('models generated in ', time.time() - tic)
    DC_training_times.append(training_time)  #Keep track of training times

    mp.map.calcstdmeans(observations,
                        omodel.mean_module.constant.item())  # Update means

    if unknown_features:
        #TODO - reimplement unknown features
        # # obs_f  = np.reshape( [ feat for feat in mp.map. flatten() ],[ -1 , xdims ] )
        # # fmodel = buildmodel(obs_x,obs_fy)
        pass

    # Calculate initial solution value
    objvallist = []
    objval = calc_objval()
    objvallist.append(objval)
    printinfo('initscore', objval, logger=logger)
    printinfo('line', logger=logger)

    ## Main Acquisition Loop
    for i in range(max_evals - n_initial_samples):

        printinfo('acquisition', itercount, n_samples, logger=logger)

        ## Perform BOP-Elites
        nextpoint, _, candidatelist = DoContinuousBOPELITES(
            omodel, optn, acq='BOP')  #This performs the actual BOP-Elites
        triallist = np.copy(candidatelist)
        valuefound = False
        while len(triallist) > 0:
            nextpoint = triallist[-1]
            #print(nextpoint)
            newobs = new_sample(ofun, ffun, nextpoint)
            if newobs[-1] == False:
                triallist = []
                observations.append(newobs[:-1])
                valuefound = True
            else:
                #obslist.append(newobs[:-1])
                print(mp.nancounter)
                mp.saved_points = np.delete(mp.saved_points, -1)

            triallist = triallist[:-1]

        if not valuefound:
            print(observations[0][0])
            print(candidatelist[-1])
            mp.map.store_nan(candidatelist[-1])
            savenans(mp.map.nanmap, mydir)
            newobs = expand_nan(ofun, ffun, candidatelist[-1])
            observations.append(newobs[:-1])

        printinfo('nans', mp.nancounter, logger=logger)
        printinfo('new point', newobs[0], observations, logger=logger)

        ## Update GP
        printinfo('train_gp', itercount, logger=logger)

        if valuefound:
            omodel, training_time, hypers = buildpymodel2(
                observations, n_initial_samples)

            fantasies = mp.map.fantasize_nans()

            # else:
            #     observations_plus = observations
            #, training_time , hypers = buildpymodel2( observations_plus ,
            #                                      n_initial_samples)#,
            #*hypers ,
            #retrain = retrain)
            retrain = not retrain  #Toggle flag ie- Don't retrain Hyper-parameters every iteration

            ## Update the main map of solutions.
            mp.map.updatemap(observations)
            mp.map.updatepointmean([observations[-1]])
            mp.map.updatemeans(observations[-1], omodel, mp.feature_resolution)
            #mp.map.generate_niche_models(omodel)

        if len(fantasies) > 0:
            #obsplus = np.vstack([observations , fantasies])
            #print(obsplus[:-3])
            observations_plus = np.vstack([observations, np.array(fantasies)])
            omodel = update_with_nans(observations_plus, omodel)
            #omodel , training_time , hypers = buildpymodel2( obsplus, n_initial_samples)
        #mp.map.generate_niche_models(omodel)

        mp.map.calcstdmeans(observations,
                            omodel.mean_module.constant.item())  # Update means

        #Data_Collection
        nniches = count_niches_filled()
        DC_niches_filled.append(nniches)
        DC_training_times.append(training_time)
        objval = calc_objval()
        objvallist.append(objval)
        printinfo('current_value', objval, logger=logger)
        printinfo('line', logger=logger)
        savepoints(observations, mydir)  #Save current observations

        itercount += 1
        n_samples += 1

    printinfo('final_value', str(np.nansum(mp.map.fitness)), logger=logger)

    print(f'\n Illuminating Final Prediction Map')

    pred_map, pred_map_value = getpredmap(omodel, observations, logger)

    save_data(DC_training_times, DC_niches_filled, objvallist, pred_map_value,
              mp.map.fitness, pred_map.genomes, seed, observations, mydir)

    return (mp.map, objvallist)
Ejemplo n.º 12
0
from sail_lib import *
from create_prediction_map import create_prediction_map
from mapelites.mapelites import map_elites
from mapelites.createmap import create_map


seed = mp.seed
np.random.seed(seed)

# Initialisation parameters
max_evals = 30
preload = None
n_initial_samples = 20

initial_samples = initial_sampling(n_initial_samples)
mp.map, edges = create_map(mp.feature_resolution, mp.example)
initialisemap(initial_samples)

##########

mp.n_children = 10
solution_value = 0

def calculate_final_score(mymap):
    '''This function takes the predictive map and calculates the predicted score
    by assessing their value on the real functions.
    '''

    xdims = len(mp.domain.valid_ranges)
    genomes = mymap.genomes[~np.isnan(mymap.fitness)].flatten()
    truevals = [mp.fitness_fun(x) for x in np.reshape(genomes, [-1, xdims])]
Ejemplo n.º 13
0
def bop_elites(max_evals=500,
               unknown_features=False,
               preload=None,
               n_initial_samples=40):
    '''bop_elites (THIS IS V0.9 RUNNING ON MULTIDIMENSIONAL DETERMINISTIC FEATURES )

    bop-elites performs Bayesian Optimisation of Phenotypic Elites by means of the 
    ensemble expected improvement algorithm. On domains where the features are 
    deterministic and calculable this simplifies to the classic expected improvement
    algorithm.

    Example: output_predictions = bop_elites( max_evals = 500 )  

    Inputs (arguments): 
        max_evals           - [ Integer ] - Evaluation budget 
        unknown_features    - [ Boolean ] - Flag to activate Ensemble EI
        preload             - [ Array   ] - Preloaded evaluations
        n_initial_samples   - [ Integer ] - Number of samples to initialise

    Inputs (from config file): 
        mp.domain           - [ Domain Class ]   - Domain specific information
        mp.map              - [ Map Class ]      - Global map (for map-elites)
        mp                  - [ Script ]         - Contains many variables

    Outputs
        map                 - [ Map Class ]      - Final optimised map
        objvallist          - [ List ]           - Objective values of points in map

    Code Author: Paul Kent 
    Warwick University
    email: [email protected]
    Oct 2020; Last revision: 20-01-2021
    '''

    ## Initialise main map
    mp.map = create_map(mp.feature_resolution, mp.domain)

    ## Set up for Data-Collection
    DC_training_times = []
    DC_niches_filled = []

    ## Create a Readme file and the directory for storage
    mydir, logger = readme(domain_flag, n_initial_samples, max_evals, seed,
                           mp.penalty, info)
    printinfo('intro',
              max_evals,
              n_initial_samples,
              domain_flag,
              seed,
              logger=logger)  ## pretty print to terminal

    ## Set the Fitness and feature functions from config file
    ofun = mp.fitness_fun
    ffun = mp.feature_fun

    ## Set counters
    n_samples = n_initial_samples
    itercount = 1

    ### Initial sampling
    if preload == None:
        observations = initial_sampling(n_initial_samples)
        printinfo(type='sampling', logger=logger)
    else:
        directory = preload
        filehandler = open(directory, 'rb')
        data = pickle.load(filehandler)
        observations = data
        print(str(len(observations)) + ' samples loaded')
        n_initial_samples = len(observations)

    # mp.map.initialisemap(observations ) # Fit initial samples into map
    # mp.map.initialisemeans(observations)
    # mp.map.estimatemeans()
    # mp.map.updatemeans()

    # DC_niches_filled.append(count_niches_filled() ) #Keep track of niches

    # ## Build GP for fitness
    # printinfo('train_gp' , n_samples, logger = logger)
    # retrain = True #Initial value to train new GP hyperparameters
    # omodel , training_time , hypers = buildpymodel2( observations , n_initial_samples )

    # tic = time.time()
    # mp.map.generate_niche_models(omodel)
    # print( 'models generated in ', time.time()-tic)
    # DC_training_times.append( training_time ) #Keep track of training times

    # if unknown_features:
    #     #TODO - reimplement unknown features
    #     # # obs_f  = np.reshape( [ feat for feat in mp.map. flatten() ],[ -1 , xdims ] )
    #     # # fmodel = buildmodel(obs_x,obs_fy)
    #     pass

    # # Calculate initial solution value
    # objvallist = [] ; objval = calc_objval() ; objvallist.append( objval )
    # printinfo( 'initscore' , objval , logger = logger) ; printinfo( 'line', logger = logger )

    # ## Main Acquisition Loop
    # for i in range(max_evals-n_initial_samples):

    #     printinfo( 'acquisition' , itercount , n_samples , logger = logger )

    #     ## Perform BOP-Elites
    #     nextpoint, _ = DoContinuousBOPELITES(omodel, 20)  #This performs the actual BOP-Elites
    nextpoint = [
        0.35589097848392837, 0.12011312106715394, 0.07221528184130153,
        0.6026451701847786, 0.7502119783418484, 9.356622763723033e-07,
        0.41690933761766574, 0.18620351698501855, 0.947183335541234,
        0.6021678002258849
    ]
    observations.append(new_sample(ofun, ffun, nextpoint))
    printinfo('new point', nextpoint, observations, logger=logger)

    #     ## Update GP
    #     printinfo('train_gp' , itercount, logger = logger)
    #     omodel , training_time , hypers = buildpymodel2( observations ,
    #                                                     n_initial_samples)#,
    #                                                     #*hypers ,
    #                                                     #retrain = retrain)
    #     retrain = not retrain #Toggle flag ie- Don't retrain Hyper-parameters every iteration

    #     ## Update the main map of solutions.
    #     mp.map.updatemap( observations )
    #     mp.map.updatepointmean([observations[-1]])
    #     mp.map.estimatemeans()
    #     mp.map.updatemeans(  )
    #     mp.map.generate_niche_models(omodel)

    #     #Data_Collection
    #     DC_niches_filled.append( count_niches_filled( ) )
    #     DC_training_times.append( training_time )
    #     objval = calc_objval()
    #     objvallist.append( objval )
    #     printinfo('current_value' , objval , logger = logger) ; printinfo( 'line', logger = logger)

    #     savepoints(observations , mydir ) #Save current observations

    #     itercount += 1 ; n_samples += 1

    # printinfo('final_value' , str( np.nansum( mp.map.fitness ) ), logger = logger )

    # print(f'\n Illuminating Final Prediction Map')

    # pred_map, pred_map_value = getpredmap(omodel, observations)

    # save_data(  DC_training_times,
    #             DC_niches_filled,
    #             objvallist ,
    #             pred_map_value ,
    #             mp.map.fitness ,
    #             pred_map.genomes ,
    #             seed,
    #             observations,
    #             mydir )

    return (mp.map)