seed = 42 sparse = False Nclass = 10 k = 10000 nn_cache = "../Data/hw2-data/MNIST_nn_cache.npz" if not os.path.exists(nn_cache): RuntimeError("Transformation matrix v doesn't exist!") else: print("Reading from cache...") res = np.load(nn_cache) v = res["v"] # Load in MNIST training data then transform it print("Loading MNIST Training data...") X_train, y_train = mu.load_mnist(dataset='training') X_train = ru.naive_nn_layer(X_train, k=k, v=v) y_train_true = np.asarray(y_train[:, None] == np.arange(max(y_train)+1),dtype=int).squeeze() print("Estimated best lambda: %.3lf" % val.estimate_lambda(X_train, scale=1.0e-2)) # Load in MNIST training data then transform it print("Loading MNIST Testing data...") X_test, y_test = mu.load_mnist(dataset='testing') X_test = ru.naive_nn_layer(X_test, k=k, v=v) y_test_true = np.asarray(y_test[:, None] == np.arange(max(y_test)+1),dtype=int).squeeze() ####################################################### # # Run minibatch with bestfit? # #######################################################
import matplotlib.pyplot as plt import matplotlib as mpl #Typical plot parameters that make for pretty plots mpl.rcParams['figure.figsize'] = (9,8) mpl.rcParams['font.size'] = 20.0 mpl.rc('font',**{'family':'serif','serif':['Computer Modern']}) mpl.rc('text', usetex=True) # Flags to control functionality show_plots = True save_plots = True use_one_digit = False # Load in MNIST data print("Loading MNIST Training data...") X_train, y_train = mu.load_mnist(dataset='training') # Training using only one digit? if use_one_digit: mask = y_train.squeeze() == 5 X_train = X_train[mask] print(X_train.shape) y_train = y_train[mask] # Init PCA object # Solve for all principal components but do calculations using only 50 # Can reset l later if need be as all principal components are retained PCA = pca.PCA(l=50, center=True) # Fit model print("Fitting PCA model...")
# Test mnist data importing functions import sys sys.path.append("..") import DML.data_processing.mnist_utils as mu # Test data loading, make sure shape is correct images, labels = mu.load_mnist(dataset='training') print(images.shape, labels.shape)
# Test mnist data importing functions import sys sys.path.append("..") import DML.data_processing.mnist_utils as mu # Test data loading, make sure shape is correct images, labels = mu.load_mnist(dataset='training') print(images.shape,labels.shape)
nout = 30000 nclass = 10 nodes = 500 seed = 42 max_iters = 50 # Seed RNG np.random.seed(seed=seed) # Flags to control functionality show_plots = True save_plots = True # Load in MNIST data, process it for multiclass classification print("Loading MNIST data...") X_train, y_train = mu.load_mnist(dataset='training') y_train_true = np.asarray(y_train[:, None] == np.arange(max(y_train) + 1), dtype=int).squeeze() X_test, y_test = mu.load_mnist(dataset='testing') y_test_true = np.asarray(y_test[:, None] == np.arange(max(y_test) + 1), dtype=int).squeeze() # Solve for all principal components but do calculations using only 50 # Can reset l later if need be as all principal components are retained print("Performing PCA with k = %d components..." % k) PCA = pca.PCA(l=k, center=False) PCA.fit(X_train) X_train = PCA.transform(X_train) X_test = PCA.transform(X_test) print("Training neural network...")
import matplotlib.pyplot as plt import matplotlib as mpl # Typical plot parameters that make for pretty plots mpl.rcParams["figure.figsize"] = (9, 8) mpl.rcParams["font.size"] = 20.0 mpl.rc("font", **{"family": "serif", "serif": ["Computer Modern"]}) mpl.rc("text", usetex=True) # Flags to control functionality show_plots = True save_plots = True # Load in MNIST data print("Loading MNIST data...") X_train, y_train = mu.load_mnist(dataset="training") y_train_true = np.asarray(y_train[:, None] == np.arange(max(y_train) + 1), dtype=int).squeeze() # print("Loading MNIST Testing data...") X_test, y_test = mu.load_mnist(dataset="testing") y_test_true = np.asarray(y_test[:, None] == np.arange(max(y_test) + 1), dtype=int).squeeze() # Init PCA object # Solve for all principal components but do calculations using only 50 # Can reset l later if need be as all principal components are retained PCA = pca.PCA(l=50, center=True) print("Training the model...") PCA.fit(X_train) X_train = PCA.transform(X_train) X_test = PCA.transform(X_test)