def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent = True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): print("Training for " +str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: ttest = self.Quser == None except : self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q=self.Quser else: Q = 2 if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) self.SAMObject.kernelString = kernelStr else: kernel = None self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: print("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: print("Loading " + fname + " failed.\nParameters not valid. Training new model") self.SAMObject.learn(optimizer='bfgs',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='bfgs',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: print("Loading SAMOBject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle'): print "Training..." if self.X is not None: Q = self.X.shape[1] else: Q=2 if Q > 100: kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='scg',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) print "Saving SAMObject" if save_model: SAMCore.save_pruned_model(self.SAMObject, fname) else: print "Loading SAMOBject" self.SAMObject = SAMCore.load_pruned_model(fname)
fname = modelList[i] if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events cur.SAMObject.store(observed=cur.Y, inputs=cur.X, Q=Q, kernel=kernel, num_inducing=model_num_inducing) SAMCore.load_pruned_model(fname, economy_save, cur.SAMObject.model) mm.append(cur) #open ports yarp.Network.init() sect = splitPath[0].split('/')[-1].lower() parser2 = SafeConfigParser() parser2.read(interactionConfPath) portNameList = parser2.items(sect) print portNameList portsList = [] for j in range(len(portNameList)): if (portNameList[j][0] == 'rpcbase'): portsList.append(yarp.RpcServer())
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): """ Method to train, store and load the learned model This method tries reloading the model in fname. If unsuccessful or loaded model has mismatching parameters, trains a new model from scratch. Args: modelNumInducing : Integer number of inducing parameters. modelNumIterations : Integer number of training iterations. modelInitIterations : Integer number of initialisation iterations. fname : Filename to save model to. save_model : Boolean to turn saving of the model on or off. economy_save : Boolean to turn economy saving on or off. Economy save saves smaller models by not storing data inside the model but keeping it stored in the data path. keepIfPresent : Boolean to enable or disable loading of a model when one is available. kernelStr : Kernel string with the requested kernel. If `None` the default kernel is used. Returns: None """ self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression( numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error( "Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn( optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): print("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: ttest = self.Quser == None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF( Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) self.SAMObject.kernelString = kernelStr else: kernel = None self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: print("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: print( "Loading " + fname + " failed.\nParameters not valid. Training new model") self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: print("Loading SAMOBject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression( numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error( "Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn( optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
cur.Y = None cur.Y = {'Y':Yn_cur} cur.Ytestn = {'Ytest':Ytestn_cur} cur.Ltest = {'Ltest':Ltest_cur} fname = modelList[i] if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events cur.SAMObject.store(observed=cur.Y, inputs=cur.X, Q=Q, kernel=kernel, num_inducing=model_num_inducing) SAMCore.load_pruned_model(fname, economy_save, cur.SAMObject.model) mm.append(cur) print len(mm) #open ports yarp.Network.init() sect = splitPath[0].split('/')[-1].lower() print sect parser2 = SafeConfigParser() parser2.read(interactionConfPath) portNameList = parser2.items(sect) print portNameList portsList = [] for j in range(len(portNameList)):
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression(numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error("Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize('bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize('bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)