Example #1
0
 def createModel(self, svdlearner):
     A = svdlearner.A
     A = self.reducedSetTransformation(A)
     fs = self.X
     if self.basis_vectors != None:
         fs = self.X[self.basis_vectors]
     bias = self.bias
     X = getPrimalDataMatrix(fs, bias)
     #The hyperplane is a linear combination of the feature vectors of the basis examples
     W = X.T * A
     if bias != 0:
         W_biaz = W[W.shape[0] - 1] * math.sqrt(bias)
         W_features = W[range(W.shape[0] - 1)]
         mod = model.LinearModel(W_features, W_biaz)
     else:
         mod = model.LinearModel(W, 0.)
     return mod
Example #2
0
 def getModel(self):
     """Returns the trained model, call this only after training.
     
     Returns
     -------
     model : LinearModel
         prediction function
     """
     return model.LinearModel(self.A, self.b)
Example #3
0
 def getModel(self):
     """Returns the trained model, call this only after training.
     
     Returns
     -------
     model : LinearModel
         prediction function (model.W contains at most "subsetsize" number of non-zero coefficients)
     """
     return model.LinearModel(self.A, self.b)
Example #4
0
 def callback(self, learner):
     m = model.LinearModel(learner.A, learner.b)
     P = m.predict(self.X_valid)
     if self.qids_valid:
         perfs = []
         for query in self.qids_valid:
             try:
                 perf = self.measure(self.Y_valid[query], P[query])
                 perfs.append(perf)
             except UndefinedPerformance, e:
                 pass
         perf = np.mean(perfs)
Example #5
0
 def createModel(self, svdlearner):
     A = svdlearner.A
     A = self.reducedSetTransformation(A)
     #fs = svdlearner.resource_pool[data_sources.TRAIN_FEATURES]
     fs = self.X
     if self.bvectors != None:
         fs = self.X[self.bvectors]
     bias = self.bias
     #if "bias" in svdlearner.resource_pool:
     #    bias = float(svdlearner.resource_pool["bias"])
     #else:
     #    bias = 0.
     X = getPrimalDataMatrix(fs, bias)
     #The hyperplane is a linear combination of the feature vectors of the basis examples
     W = X.T * A
     if bias != 0:
         W_biaz = W[W.shape[0] - 1] * math.sqrt(bias)
         W_features = W[range(W.shape[0] - 1)]
         mod = model.LinearModel(W_features, W_biaz)
     else:
         mod = model.LinearModel(W, 0.)
     return mod
Example #6
0
 def testModel(self):
     
     train_labels = np.random.random((10))
     test_labels = np.random.random((10))
     train_features = np.random.random((10,100))
     test_features = np.random.random((10,100))
     kwargs = {}
     kwargs["train_labels"] = train_labels
     kwargs["train_features"] = train_features
     kwargs["regparam"] = 1
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     print
     #print 'Ten data points, single label '
     model = mod.LinearModel(np.random.random((100)))
     self.all_pred_cases(model)
     
     model = mod.LinearModel(np.random.random((100, 2)))
     self.all_pred_cases(model)
     
     #model = mod.LinearModel(np.random.random((1, 2)))
     #self.all_pred_cases(model)
     
     kwargs["kernel"] = "GaussianKernel"
     train_labels = np.random.random((10))
     kwargs["train_labels"] = train_labels
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     self.all_pred_cases(model)
     
     kwargs["kernel"] = "GaussianKernel"
     train_labels = np.random.random((10,2))
     kwargs["train_labels"] = train_labels
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     self.all_pred_cases(model)
Example #7
0
 def callback(self, learner):
     A = learner.A
     b = learner.bias
     A = learner.X_csr*A
     if b == 0:
         b = np.mat(np.zeros((1,1)))
     else:
         b = sqrt(b)*A[-1]
         A = A[:-1]
     m = model.LinearModel(A,b)
     P = m.predict(self.X_valid)
     perf = self.measure(self.Y_valid,P)
     if self.bestperf == None or (self.measure.iserror == (perf < self.bestperf)):
         self.bestperf = perf
         self.bestA = learner.A
         self.last_update = 0
     else:
         self.iter += 1
         self.last_update += 1
     #print self.iter,  self.last_update, perf
     if self.last_update == self.maxiter:
         learner.A = np.mat(self.bestA)
         raise Finished("Done")
Example #8
0
 def createModel(self, svdlearner):
     A = svdlearner.A
     A = self.reducedSetTransformation(A)
     mod = model.LinearModel(A, 0.)
     return mod
 def getModel(self):
     return model.LinearModel(self.A, self.b)