def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle'): print "Training..." if self.X is not None: Q = self.X.shape[1] else: Q=2 if Q > 100: kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='scg',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) print "Saving SAMObject" if save_model: SAMCore.save_pruned_model(self.SAMObject, fname) else: print "Loading SAMOBject" self.SAMObject = SAMCore.load_pruned_model(fname)
def __init__(self): """ Initialisation for SAMDriver. """ self.SAMObject = SAMCore.LFM() self.Y = None self.L = None self.X = None self.Ytest = None self.Ltest = None self.Ytestn = None self.Ltestn = None self.Ymean = None self.Ystd = None self.Yn = None self.Ln = None self.segTrainConf = None self.segTrainPerc = None self.segTestConf = None self.segTestPerc = None self.data_labels = None self.paramsDict = dict() self.verbose = False self.model_mode = False self.Quser = None self.listOfModels = [] self.model_type = None self.modelLabel = None self.textLabels = None self.participantList = None self.varianceThreshold = None self.fname = None self.optimiseRecall = True self.modelLoaded = False self.parallelOperation = False self.calibrated = False self.rawTextData = None self.rawData = None self.Yall = None self.Lall = None self.YtestAll = None self.LtestAll = None self.classifiers = None self.classif_thresh = None self.model_num_inducing = 0 self.model_num_iterations = 0 self.model_init_iterations = 0 self.additionalParametersList = []
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent = True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): print("Training for " +str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: ttest = self.Quser == None except : self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q=self.Quser else: Q = 2 if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) self.SAMObject.kernelString = kernelStr else: kernel = None self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: print("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: print("Loading " + fname + " failed.\nParameters not valid. Training new model") self.SAMObject.learn(optimizer='bfgs',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='bfgs',max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: print("Loading SAMOBject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
def __init__(self, isYarpRunning=isYarpRunningGlobal, imgH=200, imgW=200, imgHNew=200, imgWNew=200, inputImagePort="/visionDriver/image:o"): if not isYarpRunningGlobal and isYarpRunning: isYarpRunning = False print 'Warning! yarp was not found in the system.' self.isYarpRunning = isYarpRunning self.inputImagePort = inputImagePort self.SAMObject = SAMCore.LFM() self.imgHeight = imgH self.imgWidth = imgW self.imgHeightNew = imgHNew self.imgWidthNew = imgWNew self.image_suffix = ".ppm" self.Y = None self.L = None self.X = None self.Ytest = None self.Ltest = None self.Ytestn = None self.Ltestn = None self.Ymean = None self.Ystd = None self.Yn = None self.Ln = None self.data_labels = None self.participant_index = None self.model_num_inducing = 0 self.model_num_iterations = 0 self.model_init_iterations = 0 if (self.isYarpRunning == True): print 'Initialising Yarp...' yarp.Network.init() self.createPorts() self.openPorts() self.createImageArrays()
def __init__(self): self.SAMObject = SAMCore.LFM() self.Y = None self.L = None self.X = None self.Ytest = None self.Ltest = None self.Ytestn = None self.Ltestn = None self.Ymean = None self.Ystd = None self.Yn = None self.Ln = None self.segTrainConf = None self.segTrainPerc = None self.segTestConf = None self.segTestPerc = None self.data_labels = None self.paramsDict = dict() self.verbose = False self.model_mode = False self.Quser = None self.listOfModels = [] self.model_type = None self.modelLabel = None self.participantList = None self.fname = None self.Yall = None self.Lall = None self.YtestAll = None self.LtestAll = None self.classifiers = None self.classif_thresh = None self.model_num_inducing = 0 self.model_num_iterations = 0 self.model_init_iterations = 0 self.additionalParametersList = []
def __init__(self, isYarpRunning=False): if not isYarpRunningGlobal and isYarpRunning: isYarpRunning = False print 'Warning! yarp was not found in the system.' self.isYarpRunning = isYarpRunning self.SAMObject = SAMCore.LFM() self.Y = None self.L = None self.X = None self.Ytest = None self.Ltest = None self.Ytestn = None self.Ltestn = None self.Ymean = None self.Ystd = None self.Yn = None self.Ln = None self.data_labels = None self.model_num_inducing = 0 self.model_num_iterations = 0 self.model_init_iterations = 0
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): """ Method to train, store and load the learned model This method tries reloading the model in fname. If unsuccessful or loaded model has mismatching parameters, trains a new model from scratch. Args: modelNumInducing : Integer number of inducing parameters. modelNumIterations : Integer number of training iterations. modelInitIterations : Integer number of initialisation iterations. fname : Filename to save model to. save_model : Boolean to turn saving of the model on or off. economy_save : Boolean to turn economy saving on or off. Economy save saves smaller models by not storing data inside the model but keeping it stored in the data path. keepIfPresent : Boolean to enable or disable loading of a model when one is available. kernelStr : Kernel string with the requested kernel. If `None` the default kernel is used. Returns: None """ self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression( numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error( "Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn( optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
mm[0].paramsDict['calibrated'] = mm[0].calibrated if numParts == 1: if mm[0].X is None: mm[0].paramsDict['X'] = mm[0].X else: mm[0].paramsDict['X'] = mm[0].X.shape if mm[0].model_mode != 'temporal': mm[0].paramsDict['Y'] = mm[k].Y['Y'].shape mm[0].paramsDict['useMaxDistance'] = mm[0].useMaxDistance elif numParts > 1: # fname = mm[0].listOfModels[k-1] if mm[k].X is None: mm[k].paramsDict['X'] = mm[k].X else: mm[k].paramsDict['X'] = mm[k].X.shape mm[k].paramsDict['Y'] = mm[k].Y['Y'].shape # else: # pass # fname = fnameProto # save model with custom .pickle dictionary by iterating through all nested models logging.info('-------------------') logging.info('Saving: ' + mm[k].fname) mm[k].saveParameters() logging.info('Keys:') logging.info(mm[k].paramsDict.keys()) SAMCore.save_pruned_model(mm[k].SAMObject, mm[k].fname, mm[0].economy_save, extraDict=mm[k].paramsDict)
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): print("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: ttest = self.Quser == None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF( Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) self.SAMObject.kernelString = kernelStr else: kernel = None self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: print("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: print( "Loading " + fname + " failed.\nParameters not valid. Training new model") self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) if save_model: print("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: print("Loading SAMOBject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
else: result = False print str(i).rjust(off3) + '/' + str(allCount) + ' Truth: ' + currLabel.ljust(off1) + ' Model: ' + lab.ljust(off1) + ' with ' + str(confidence)[:6].ljust(off2) + ' confidence: ' + str(result) sstest.append(result) correctVal = sum(sstest) percCorect = correctVal*100/allCount print str(percCorect) + " percent correct for testing data" #save model with custom .pickle dictionary by iterating through all nested models fname_cur = fname print 'Saving: ' + fname_cur extraParams = dict() extraParams['YALL'] = Yall extraParams['LALL'] = Lall extraParams['YTEST'] = YtestAll extraParams['LTEST'] = LtestAll extraParams['objCombs'] = mySAMpy.objCombs extraParams['ignoreLabels'] = ignoreLabels extraParams['ignoreParts'] = ignoreParts extraParams['contactThreshold'] = mySAMpy.contactThreshold extraParams['angleThreshold'] = mySAMpy.angleThreshold extraParams['distanceThreshold'] = mySAMpy.distanceThreshold extraParams['Quser'] = Quser extraParams['percentTestData'] = ratioData extraParams['model_num_iterations'] = model_num_iterations extraParams['model_init_iterations'] = model_init_iterations extraParams['model_type'] = model_type extraParams['textLabels'] = mySAMpy.labelName SAMCore.save_pruned_model(mySAMpy.SAMObject, fname_cur, economy_save, extraDict=extraParams)
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression( numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error( "Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn( optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize( 'bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
cur.Y = None cur.Y = {'Y':Yn_cur} cur.Ytestn = {'Ytest':Ytestn_cur} cur.Ltest = {'Ltest':Ltest_cur} fname = modelList[i] if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events cur.SAMObject.store(observed=cur.Y, inputs=cur.X, Q=Q, kernel=kernel, num_inducing=model_num_inducing) SAMCore.load_pruned_model(fname, economy_save, cur.SAMObject.model) mm.append(cur) print len(mm) #open ports yarp.Network.init() sect = splitPath[0].split('/')[-1].lower() print sect parser2 = SafeConfigParser() parser2.read(interactionConfPath) portNameList = parser2.items(sect) print portNameList portsList = [] for j in range(len(portNameList)):
def training(self, modelNumInducing, modelNumIterations, modelInitIterations, fname, save_model, economy_save, keepIfPresent=True, kernelStr=None): self.model_num_inducing = modelNumInducing self.model_num_iterations = modelNumIterations self.model_init_iterations = modelInitIterations if not os.path.isfile(fname + '.pickle') or economy_save: if not os.path.isfile(fname + '.pickle'): logging.info("Training for " + str(modelInitIterations) + "|" + str(modelNumIterations) + " iterations...") try: self.Quser is None except: self.Quser = None if self.X is not None: Q = self.X.shape[1] elif self.Quser is not None: Q = self.Quser else: Q = 2 kernel = None if Q > 100: if kernelStr is not None: stringKernel = 'kernel = ' + kernelStr else: stringKernel = 'kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q)' exec stringKernel logging.info('stringKernel: ' + str(stringKernel)) self.SAMObject.kernelString = kernelStr else: self.SAMObject.kernelString = '' # Simulate the function of storing a collection of events if self.model_mode != 'temporal': self.SAMObject.store(observed=self.Y, inputs=self.X, Q=Q, kernel=kernel, num_inducing=self.model_num_inducing) elif self.model_mode == 'temporal': self.SAMObject.model = GPy.models.SparseGPRegression(numpy.hstack((self.X, self.L)), self.Y, num_inducing=self.model_num_inducing) # If data are associated with labels (e.g. face identities), # associate them with the event collection if self.data_labels is not None: self.SAMObject.add_labels(self.data_labels) if economy_save and os.path.isfile(fname + '.pickle') and keepIfPresent: try: logging.info("Try loading economy size SAMObject: " + fname) # Load the model from the economy storage SAMCore.load_pruned_model(fname, economy_save, self.SAMObject.model) except ValueError: logging.error("Loading " + fname + " failed. Parameters not valid. Training new model") if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize('bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) elif not os.path.isfile(fname + '.pickle') or not keepIfPresent: # Simulate the function of learning from stored memories, e.g. while sleeping (consolidation). if self.model_mode != 'temporal': self.SAMObject.learn(optimizer='bfgs', max_iters=self.model_num_iterations, init_iters=self.model_init_iterations, verbose=True) elif self.model_mode == 'temporal': self.SAMObject.model.optimize('bfgs', max_iters=self.model_num_iterations, messages=True) self.SAMObject.type = 'mrd' self.SAMObject.__num_views = None self.SAMObject.Q = None self.SAMObject.N = None self.SAMObject.namesList = None self.SAMObject.kernelString = None if save_model: logging.info("Saving SAMObject: " + fname) SAMCore.save_pruned_model(self.SAMObject, fname, economy_save) else: logging.info("Loading SAMObject: " + fname) self.SAMObject = SAMCore.load_pruned_model(fname)
for j in range(len(participantList)): confusionMatrix[i,j] = np.count_nonzero(j == predictions[i])# / float(Ntest) print 'Accuracy for participant',participantList[i], np.count_nonzero(i == predictions[i]) / float(Ntest) print print confusionMatrix # print 'Total correct:', np.count_nonzero(predictions[i,:]==participantList[i]) plot_confusion_matrix(confusion_matrix(np.vstack(participantList[i] for i in Ltest),predictions.flatten()), participantList) plt.figure() mm[0].SAMObject.model.plot_latent(labels=L,which_indices=SAMCore.most_significant_input_dimensions(mm[0].SAMObject.model,None)) raw_input('Press enter when done') # print 'Total correct:', np.count_nonzero(predictions[i,:]==participantList[i]) # print 'Average accuracy of:', np.count_nonzero(int(participantList) == predictions) / float(len(Ytestn)) if visualise_output: plot_confusion_matrix(confusion_matrix(np.vstack(participantList[i] for i in Ltest),predictions.flatten()), participantList) plt.figure() mm[0].SAMObject.model.plot_latent(labels=L,which_indices=SAMCore.most_significant_input_dimensions(mm[0].SAMObject.model,None)) ax = mm[0].SAMObject.model.plot_latent(labels=L) y = mm[0].SAMObject.model.Y[0,:] data_show = GPy.plotting.matplot_dep.visualize.vector_show(y) lvm_dimselect = GPy.plotting.matplot_dep.visualize.lvm(mm[0].SAMObject.model.X.mean[0,:].copy(), mm[0].SAMObject.model, data_show, ax) raw_input('Press enter to finish')
fname = modelList[i] if Q > 100: #one could parse and execute the string kernelStr for kernel instead of line below kernel = GPy.kern.RBF(Q, ARD=False) + GPy.kern.Bias(Q) + GPy.kern.White(Q) else: kernel = None # Simulate the function of storing a collection of events cur.SAMObject.store(observed=cur.Y, inputs=cur.X, Q=Q, kernel=kernel, num_inducing=model_num_inducing) SAMCore.load_pruned_model(fname, economy_save, cur.SAMObject.model) mm.append(cur) #open ports yarp.Network.init() sect = splitPath[0].split('/')[-1].lower() parser2 = SafeConfigParser() parser2.read(interactionConfPath) portNameList = parser2.items(sect) print portNameList portsList = [] for j in range(len(portNameList)): if (portNameList[j][0] == 'rpcbase'): portsList.append(yarp.RpcServer())
print 'Accuracy for participant', participantList[i], np.count_nonzero( i == predictions[i]) / float(Ntest) print print confusionMatrix # print 'Total correct:', np.count_nonzero(predictions[i,:]==participantList[i]) plot_confusion_matrix( confusion_matrix(np.vstack(participantList[i] for i in Ltest), predictions.flatten()), participantList) plt.figure() mm[0].SAMObject.model.plot_latent( labels=L, which_indices=SAMCore.most_significant_input_dimensions( mm[0].SAMObject.model, None)) raw_input('Press enter when done') # print 'Total correct:', np.count_nonzero(predictions[i,:]==participantList[i]) # print 'Average accuracy of:', np.count_nonzero(int(participantList) == predictions) / float(len(Ytestn)) if visualise_output: plot_confusion_matrix( confusion_matrix(np.vstack(participantList[i] for i in Ltest), predictions.flatten()), participantList) plt.figure() mm[0].SAMObject.model.plot_latent( labels=L, which_indices=SAMCore.most_significant_input_dimensions( mm[0].SAMObject.model, None))