def train_test(ds_path): data = SparseDataSet(ds_path) g, c, fold = 0.25, 128, 2 ################################################## #### This part of the code does Does statistical #### feature selection ... #labels = np.array([int(n) for n in data.labels.L]) #ranks = rank_feat(data.getMatrix().T, labels) #ranks = [(abs(r),i) for i, r in enumerate(ranks)] #ranks.sort() #ranks.reverse() #feats = [f[1] for f in ranks] #data.keepFeatures(feats[:2662]) data.attachKernel('gaussian', gamma=g) s = SVM(C=c) r = s.cv(data, numFolds=fold) o = open(ds_path + '.pkl', 'wb') pickle.dump(r, o) o.close() print ds_path
def train_test(ds_path): data = SparseDataSet(ds_path) g, c, fold = 0.25, 128, 2 ################################################## #### This part of the code does Does statistical #### feature selection ... #labels = np.array([int(n) for n in data.labels.L]) #ranks = rank_feat(data.getMatrix().T, labels) #ranks = [(abs(r),i) for i, r in enumerate(ranks)] #ranks.sort() #ranks.reverse() #feats = [f[1] for f in ranks] #data.keepFeatures(feats[:2662]) data.attachKernel('gaussian', gamma = g) s=SVM(C=c) r = s.cv(data, numFolds=fold) o = open(ds_path+'.pkl', 'wb') pickle.dump(r, o) o.close(); print ds_path
def train(self, datalist, labelslist): data = SparseDataSet(datalist, L = labelslist) self.svminstance.C = 20 data.attachKernel('gaussian', degree = 5) self.svminstance.train(data)
def train(self, datalist, labelslist): data = SparseDataSet(datalist, L=labelslist) self.svminstance.C = 20 data.attachKernel('gaussian', degree=5) self.svminstance.train(data)