def learn(X, Y): pca = None dictionary = None model = None # Data augmentation if DO_DATA_AUGMENTATION: print("Augmenting data") X, Y = transform_T(X, Y) print("Number of samples augmented to {}".format(X.shape[0])) # Dictionary learning if DO_DICTIONARY_LEARNING: dictionary = Dictionary(n_atoms=128, atom_width=16) if dictionary.weights_available: print("Loading dictionary") dictionary.load() else: print("Learning dictionary") tic = time.time() dictionary.fit(X) dictionary.save() print("Dictionary learned in {0:.1f}s".format(time.time() - tic)) print("Getting dictionary representation") X = dictionary.get_representation(X) # PCA if DO_PCA: tic = time.time() print("Applying PCA") n_components = 100 pca = PCA(n_components=n_components) X = pca.fit(X, scale=False) print("Variance explained: {:.2f}".format(np.sum(pca.e_values_ratio_))) print("PCA applied in {0:.1f}s".format(time.time() - tic)) # Training print("Starting training") tic = time.time() model = KernelSVM(C=1, kernel='rbf') model.train(X, Y) print("Model trained in {0:.1f}s".format(time.time() - tic)) return pca, dictionary, model
from src.ovr import OVR SHAPE = (46, 56) M = 121 standard = False data = fetch_data(ratio=0.8) X_train, y_train = data['train'] D, N = X_train.shape pca = PCA(n_comps=M, standard=standard) W_train = pca.fit(X_train) X_test, y_test = data['test'] I, K = X_test.shape W_test = pca.transform(X_test) params = {'C': 1, 'gamma': 2e-4, 'kernel': 'linear'} ovr = OVR(**params) ovr.fit(W_train, y_train) y_hat = ovr.predict(W_test[::-1]).ravel() done = {'success': False, 'failure': False}