def test_single_autoencoder(): Xtrain, Ytrain, Xtest, Ytest = get_MNIST() Xtrain = Xtrain.astype(np.float32) Xtest = Xtest.astype(np.float32) _, D = Xtrain.shape autoencoder = AutoEncoder(D, 300, 0) init_op = tf.global_variables_initializer() with tf.Session() as session: session.run(init_op) autoencoder.set_session(session) autoencoder.fit(Xtrain, show_fig=True) done = False while not done: i = np.random.choice(len(Xtest)) x = Xtest[i] y = autoencoder.predict([x]) plt.subplot(1, 2, 1) plt.imshow(x.reshape(28, 28), cmap='gray') plt.title('Original') plt.subplot(1, 2, 2) plt.imshow(y.reshape(28, 28), cmap='gray') plt.title('Reconstructed') plt.show() ans = input("Generate another?") if ans and ans[0] in ('n' or 'N'): done = True
def test_pretraining_dnn(): Xtrain, Ytrain, Xtest, Ytest = get_MNIST() # dnn = DNN([1000, 750, 500]) # dnn.fit(Xtrain, Ytrain, Xtest, Ytest, epochs=3) # vs Xtrain = Xtrain.astype(np.float32) Xtest = Xtest.astype(np.float32) _, D = Xtrain.shape K = len(set(Ytrain)) dnn = DNN(D, [1000, 750, 500], K) init_op = tf.global_variables_initializer() with tf.Session() as session: session.run(init_op) dnn.set_session(session) dnn.fit(Xtrain, Ytrain, Xtest, Ytest, pretrain=True, epochs=10)
def score(self, X, y): return np.mean(np.equal(self.predict(X), y)) if __name__ == '__main__': M = 100 # Nclass = 500 # X1 = np.random.randn(Nclass, 2) + np.array([0, -2]) # X2 = np.random.randn(Nclass, 2) + np.array([2, 2]) # X3 = np.random.randn(Nclass, 2) + np.array([-2, 2]) # X = np.vstack([X1, X2, X3]) # # Y = np.array([0]*Nclass + [1]*Nclass + [2]*Nclass) # # plt.scatter(X[:,0], X[:,1], c=Y, s=100, alpha=0.5) # plt.show() # # NN = MultilayerPerceptron(D, M, K) # NN.fit(X, Y) # p = NN.predict_proba(X) X, Y = get_MNIST(2000) Ntrain = int(len(Y) / 2) Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain] Xtest, Ytest = X[Ntrain:], Y[Ntrain:] NN = MultilayerPerceptron(M) NN.fit(Xtrain, Ytrain)
def __init__(self, L1=0): self.L1 = L1 def __str__(self): return "LogisticRegression(L1=%f)" % self.L1 def fit(self, X, Y, learning_rate=0.1, iters=100): N, D = X.shape X0 = np.ones((N, D + 1)) X0[:, 1:] = X w = np.random.randn(D + 1) / np.sqrt(D + 1) for t in range(iters): Yhat = sigmoid(X0.dot(w)) delta = Y - Yhat w += learning_rate * (X0.T.dot(delta) + self.L1 * np.sign(*w)) self.w = w if __name__ == "__main__": X, Y = get_MNIST() idx = np.logical_or(Y == 0, Y == 1) X = X[idx] Y = Y[idx] Ntrain = len(Y) / 2 Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain] Xtest, Ytest = X[Ntrain:], Y[Ntrain:] model = LogisticRegression(L2=0.1) test_models_classification(Xtrain, Ytrain, Xtest, Ytest, [model])
num_r, num_c = X.shape labels = set(Y) for c in labels: Xc = X[Y == c] mu, cov = Xc.mean(axis=0), np.cov(Xc.T) + np.eye(num_c) * smoothing self.d_gaussians[c] = {"mu": mu, "cov": cov} self.d_priors[c] = len(Y[Y == c]) / float(len(Y)) def predict(self, X): num_r, num_c = X.shape P = np.zeros((num_r, len(self.d_gaussians))) for c, d in self.d_gaussians.iteritems(): mean, cov = d["mu"], d["cov"] P[:, c] = mvn.logpdf(X, mean=mean, cov=cov) + np.log(self.d_priors[c]) return np.argmax(P, axis=1) def score(self, X, Y): P = self.predict(X) return np.mean(P == Y) if __name__ == '__main__': X, Y = get_MNIST(10000) Ntrain = len(Y) / 2 Xtrain, Ytrain = X[:Ntrain], Y[:Ntrain] Xtest, Ytest = X[Ntrain:], Y[Ntrain:] models = [MaximumLikelyhoodClassifier(), NaiveBayes(), BayesClassifier()] test_models_classification(Xtrain, Ytrain, Xtest, Ytest, models)