def cca_projection(self, X, Y, k=2): ''' Return U_K^T, \Simgma_{XX}^{-1/2} ''' ###SCALE AN IDENTITY MATRIX BY THIS TERM AND ADD TO COMPUTED COVARIANCE MATRIX TO PREVENT IT BEING SINGULAR ### reg = 1e-5 Y = create_one_hot_label(Y, self.NUM_CLASSES) X, Y = subtract_mean_from_data(X, Y) cov_XX = compute_covariance_matrix(X, X) cov_XX = cov_XX + reg * np.identity(len(cov_XX)) cov_XY = compute_covariance_matrix(X, Y) cov_YY = compute_covariance_matrix(Y, Y) cov_YY = cov_YY + reg * np.identity(len(cov_YY)) left = sqrtm(inv(cov_XX)) middle = cov_XY right = sqrtm(inv(cov_YY)) m = left.dot(middle.dot(right)) U, D, V = svd(m) return (U.T)[0:k], left
def cca_projection(self, X, Y, k=2): ''' Return U_K^T, \Simgma_{XX}^{-1/2} ''' ###SCALE AN IDENTITY MATRIX BY THIS TERM AND ADD TO COMPUTED COVARIANCE MATRIX TO PREVENT IT BEING SINGULAR ### reg = 1e-5 #number of classes for this part is 3 y_one_hot = create_one_hot_label(Y, 3) #perform mean subtraction X_new, Y_new = subtract_mean_from_data(X, y_one_hot) m = X_new.shape[1] n = Y_new.shape[1] XX = compute_covariance_matrix(X_new, X_new) YY = compute_covariance_matrix(Y_new, Y_new) #compute the trace of each matrix XX += np.trace(XX)**reg**np.eye(m) YY += np.trace(YY)**reg**np.eye(n) #compute the inverses XX_inverse = inv(sqrtm(XX)).T YY_inverse = inv(sqrtm(YY)).T #correlation X and Y correlation_XY = np.dot( np.dot(X_new, XX_inverse).T, np.dot(Y_new, YY_inverse)) #finally svd decomposition U, sigma, V = svd(correlation_XY) #first two columns only U = U[:, :3] return U.T, XX_inverse
def cca_projection(self,X,Y,k=2): ''' Return U_K^T, \Simgma_{XX}^{-1/2} ''' Y = create_one_hot_label(Y,self.NUM_CLASSES) X,Y = subtract_mean_from_data(X,Y) C_XY = compute_covariance_matrix(X,Y) C_XX = compute_covariance_matrix(X,X) C_YY = compute_covariance_matrix(Y,Y) dim_x = C_XX.shape[0] dim_y = C_YY.shape[0] A = inv(sqrtm(C_XX+1e-5*np.eye(dim_x))) B = inv(sqrtm(C_YY+1e-5*np.eye(dim_y))) C = np.matmul(A,np.matmul(C_XY,B)) u,s,d = svd(C) return u[:,0:k].T, A
def cca_projection(self, X, Y, k=2): reg = 1e-5 Y = np.array(Y) one_hot_y = create_one_hot_label(Y, self.NUM_CLASSES) X_bar = subtract_mean_from_data(X, Y) Y_bar = subtract_mean_from_data(one_hot_y, X) cov_XX = compute_covariance_matrix(X_bar[0], X_bar[0]) cov_XX += np.identity(729) * (reg) cov_YY = compute_covariance_matrix(Y_bar[0], Y_bar[0]) cov_XY = compute_covariance_matrix(X_bar[0], Y_bar[0]) cov_XX_inv = inv(sqrtm(cov_XX)) cov_YY_inv = inv(sqrtm(cov_YY)) ccm = cov_XX_inv.dot(cov_XY).dot(cov_YY_inv) U, s, V = svd(ccm) return U.T[:k], cov_XX_inv
def pca_projection(self, X, Y): ''' Return U_2^T ''' X, Y = subtract_mean_from_data(X, Y) cov_X = compute_covariance_matrix(X, X) U, D, V = svd(cov_X) want = (U.T)[0:2] return want
def pca_projection(self, X, Y): ''' Return U_2^T ''' X, Y = subtract_mean_from_data(X, Y) C_XX = compute_covariance_matrix(X, X) u, s, d = svd(C_XX) return u[:, 0:2].T
def pca_projection(self, X, Y): ''' Return U_2^T ''' Y = create_one_hot_label(Y, self.NUM_CLASSES) X, Y = subtract_mean_from_data(X, Y) C_XX = compute_covariance_matrix(X, X) u, s, d = svd(C_XX) return u[:, 0:2].T
def train_model(self,X,Y): '''' FILL IN CODE TO TRAIN MODEL MAKE SURE TO ADD HYPERPARAMTER TO MODEL ''' X = np.array(X) Y = np.array(Y) self.mu = [] for i in range(self.NUM_CLASSES) : index = Y == i n = sum(index) Xi = X[index,:] mean = np.mean(Xi, axis = 0) self.mu.append(mean) Xi_minus_mean = Xi - mean if i == 0 : self.cov = n * compute_covariance_matrix(Xi_minus_mean, Xi_minus_mean) else : self.cov += n * compute_covariance_matrix(Xi_minus_mean, Xi_minus_mean) self.cov = self.cov / X.shape[0]
def pca_projection(self, X, Y): ''' Return U_2^T ''' Y_one_hot = create_one_hot_label(Y, 729) #perform mean subtraction X_new, Y_new = subtract_mean_from_data(X, Y_one_hot) #compute covariance matrix cov_matrix = compute_covariance_matrix(X_new, Y_new) #svd decomposition U, sigma, V_transpose = LA.svd(cov_matrix, full_matrices=False) return np.dot(U, np.dot(np.diag(sigma), V_transpose.T))
def train_model(self,X,Y): """ self.muj = [] self.cov_XX = [] X0 = X[:324] mu0 = np.mean(X0,axis=0) X1 = X[325:633] mu1 = np.mean(X1,axis=0) X2 = X[634:] mu2 = np.mean(X2,axis=0) self.muj.append(mu0) self.muj.append(mu1) self.muj.append(mu2) X_bar_0 = subtract_mean_from_data(X0, Y) X_bar_1 = subtract_mean_from_data(X1, Y) X_bar_2 = subtract_mean_from_data(X2, Y) self.cov_XX.append(compute_covariance_matrix(X_bar_0[0],X_bar_0[0])+np.identity(len(np.array(X0).T))*self.reg_cov) self.cov_XX.append(compute_covariance_matrix(X_bar_1[0],X_bar_1[0])+np.identity(len(np.array(X0).T))*self.reg_cov) self.cov_XX.append(compute_covariance_matrix(X_bar_2[0],X_bar_2[0])+np.identity(len(np.array(X0).T))*self.reg_cov) """ self.muj = [] self.cov_XX = [] j = 0 k = 0 val = Y[0] for i in range(self.NUM_CLASSES): while(j<len(Y)-1 and val==Y[j]): j = j + 1 X0 = X[k:j-1] mu0 = np.mean(X0,axis=0) self.muj.append(mu0) X_bar_0 = subtract_mean_from_data(X0,Y) cov_XX_0 = compute_covariance_matrix(X_bar_0[0],X_bar_0[0]) + np.identity(len(np.array(X0).T))*self.reg_cov self.cov_XX.append(cov_XX_0) k = j val = Y[j]
def train_model(self, X, Y): """ X_bar = subtract_mean_from_data(X,Y) cov_XX = compute_covariance_matrix(X_bar[0],X_bar[0]) cov_XX += np.identity(len(np.array(X_bar).T)) * self.reg_cov self.cov_XX = cov_XX self.muj = [] X0 = X[:324] mu0 = np.mean(X0,axis=0) print(mu0) X1 = X[325:633] mu1 = np.mean(X1,axis=0) X2 = X[634:] mu2 = np.mean(X2,axis=0) self.muj.append(mu0) self.muj.append(mu1) self.muj.append(mu2) """ X_bar = subtract_mean_from_data(X, Y) cov_XX = compute_covariance_matrix(X_bar[0], X_bar[0]) cov_XX += np.identity(len(np.array(X).T)) * self.reg_cov self.cov_XX = cov_XX self.muj = [] j = 0 k = 0 val = Y[0] for i in range(self.NUM_CLASSES): print(i) while (j < len(Y) - 1 and val == Y[j]): j = j + 1 X0 = X[k:j - 1] mu0 = np.mean(X0, axis=0) self.muj.append(mu0) k = j val = Y[j]
def pca_projection(self, X, Y): #mux = np.mean(X,axis=0) X_bar = subtract_mean_from_data(X, Y) cov_XX = compute_covariance_matrix(X_bar[0], X_bar[0]) U, s, V = svd(cov_XX) return U.T[:2]