コード例 #1
0
ファイル: surrogatemodel.py プロジェクト: FriedSock/MLO-1
class ProperSurrogateModel(SurrogateModel):

    def __init__(self, fitness, configuration, controller):
        super(ProperSurrogateModel, self).__init__(fitness, configuration,
                                                   controller)

        if configuration.classifier == 'SupportVectorMachine':
            self.classifier = SupportVectorMachineClassifier()
        else:
            logging.error('Classifier type {} not found'.format(
                configuration.classifier))

        if configuration.regressor == 'GaussianProcess':
            self.regressor = GaussianProcessRegressor(controller)
        else:
            logging.error('Regressor type {} not found'.format(
                configuration.regressor))

    def train(self, pop):
        dimensions = self.fitness.dimensions
        return self.classifier.train(pop) and self.regressor.train(
            pop, self.configuration, dimensions)

    def add_training_instance(self, part, code, fitness):
        self.classifier.add_training_instance(part, code)
        self.regressor.add_training_instance(part, fitness)
コード例 #2
0
ファイル: surrogatemodel.py プロジェクト: FriedSock/MLO-1
    def __init__(self, fitness, configuration, controller):
        super(ProperSurrogateModel, self).__init__(fitness, configuration,
                                                   controller)

        if configuration.classifier == 'SupportVectorMachine':
            self.classifier = SupportVectorMachineClassifier()
        else:
            logging.error('Classifier type {} not found'.format(
                configuration.classifier))

        if configuration.regressor == 'GaussianProcess':
            self.regressor = GaussianProcessRegressor(controller)
        else:
            logging.error('Regressor type {} not found'.format(
                configuration.regressor))
コード例 #3
0
 def __init__(self, configuration, controller, fitness):
     super(ProperSurrogateModel, self).__init__(configuration,
                                                controller,
                                                fitness)
                                                
     if configuration.classifier == 'SupportVectorMachine':
         self.classifier = SupportVectorMachineClassifier()
     else:
         logging.error('Classifier type ' + str(configuration.classifier) + '  not found')
     self.regressor = self.regressor_constructor()
     
     try:
         if self.configuration.sample_on == "ei":
             self.max_uncertainty = self.max_ei
         elif self.configuration.sample_on == "s":
             self.max_uncertainty = self.max_s2
     except:
         if self.max_uncertainty:
             pass
         else:
             logging.debug("Sampling scheme wasnt specified, using Expected Improvment")
             self.max_uncertainty = self.max_ei
コード例 #4
0
class ProperSurrogateModel(SurrogateModel):

    def __init__(self, configuration, controller, fitness):
        super(ProperSurrogateModel, self).__init__(configuration,
                                                   controller,
                                                   fitness)
                                                   
        if configuration.classifier == 'SupportVectorMachine':
            self.classifier = SupportVectorMachineClassifier()
        else:
            logging.error('Classifier type ' + str(configuration.classifier) + '  not found')
        self.regressor = self.regressor_constructor()
        
        try:
            if self.configuration.sample_on == "ei":
                self.max_uncertainty = self.max_ei
            elif self.configuration.sample_on == "s":
                self.max_uncertainty = self.max_s2
        except:
            if self.max_uncertainty:
                pass
            else:
                logging.debug("Sampling scheme wasnt specified, using Expected Improvment")
                self.max_uncertainty = self.max_ei
        
    def get_regressor(self):
        return self.regressor
                                  
    def get_classifier(self):
        return self.classifier
        
    def get_copy(self):
        model_copy = ProperSurrogateModel(self.configuration, self.controller)
        model_copy.set_state_dictionary(self.get_state_dictionary())
        return model_copy
            
    def predict(self, particles):
        try:
            #logging.debug("Using tranformation function for the regressor")
            trans_particles = particles
        except:
            trans_particles = [self.fitness.transformation_function(part) for part in particles]
        MU, S2, EI, P = self.regressor.predict(trans_particles)
        return self.classifier.predict(particles), MU, S2, EI, P

    def train(self, hypercube=None):
        self.was_trained = True
        if self.classifier.train() and self.regressor.train():
            logging.info("Trained Surrogate Model")
        else:
            logging.info("Couldnt Train Surrogate Model")
            return False
            
    def regressor_constructor(self):
        controller = self.controller
        configuration = self.configuration
        if self.configuration.regressor == 'GaussianProcess':
            return GaussianProcessRegressor(controller, configuration)
        elif self.configuration.regressor == 'GaussianProcess2':
            return GaussianProcessRegressor2(controller, configuration)          
        elif self.configuration.regressor == 'GaussianProcess3':
            return GaussianProcessRegressor3(controller, configuration)        
        elif self.configuration.regressor == 'KMeansGaussianProcessRegressor':
            return KMeansGaussianProcessRegressor(controller, configuration)        
        elif self.configuration.regressor == 'DPGMMGaussianProcessRegressor':
            return DPGMMGaussianProcessRegressor(controller, configuration)
        elif self.configuration.regressor == 'R':
            return GaussianProcessRegressorRpy(controller, configuration)
        else:
            raise Exception('Regressor type ' + str(configuration.regressor) + '  not found')
        
    def add_training_instance(self, part, code, fitness, addReturn):
        self.classifier.add_training_instance(part, code)
        if addReturn[0] == 0: ## only update regressor if the fitness function produced a result
            try:
                trans_part = self.fitness.transformation_function(part)
                #logging.debug("Using tranformation function for the regressor")
            except:
                trans_part = part
            self.regressor.add_training_instance(trans_part, fitness)
        
    def contains_training_instance(self, part):
        try:
            trans_part = self.fitness.transformation_function(part)
            #logging.debug("Using tranformation function for the regressor")
        except:
            trans_part = part
        return self.regressor.contains_training_instance(trans_part) or self.classifier.contains_training_instance(part)  

    def get_training_instance(self, part):
        code = self.classifier.get_training_instance(part) 
        fitness = None
        if self.regressor.contains_training_instance(part):
            fitness = self.regressor.get_training_instance(part)            
        return code, fitness
        
    def model_failed(self, part):
        return False
        
    def max_ei(self, designSpace, hypercube=None, npts=10):
        D = len(designSpace)
        n_bins = npts*ones(D)
        
        grid = False
        if grid:
            if hypercube:
                result = mgrid[[slice(h_min, h_max, npts*1.0j) for h_max, h_min , n in zip(hypercube[0],hypercube[1], n_bins)]]
                z = result.reshape(D,-1).T
            else:
                bounds = [(d["min"],d["max"]) for d in designSpace]
                result = mgrid[[slice(row[0], row[1], npts*1.0j) for row, n in zip(bounds, n_bins)]]
                z = result.reshape(D,-1).T
                '''
                x,y,v = mgrid[designSpace[0]["min"]:designSpace[0]["max"]:(int(designSpace[0]["max"]-designSpace[0]["min"])+1)*1.0j,designSpace[1]["min"]:designSpace[1]["max"]:(int(designSpace[1]["max"]-designSpace[1]["min"])+1)*1.0j , designSpace[2]["min"]:designSpace[2]["max"]:(int(designSpace[2]["max"]-designSpace[2]["min"])+1)*1.0j]
                x=reshape(x,-1)
                y=reshape(y,-1)
                v=reshape(v,-1)
                z = array([[a,b,c] for (a,b,c) in zip(x,y,v)])
                '''
            try:             
                zClass, MU, S2, EI, P = self.predict(z)
                filteredEI=[]
                filteredZ=[]
                for i,ei in enumerate(EI):
                    if zClass[i]==0:
                        filteredEI.append(ei)
                        filteredZ.append(z[i])
                EI = array(filteredEI) 
                return filteredZ[argmax(EI)]
            except Exception,e:
                logging.error("Finding max S2 failed: " + str(e))
                return None
        else: ## more memory efficient yet slower