def load_or_dataset(): if "or_O" not in loaded_datasets: trn_data = np.load(os.path.join(get_base_dir(), "datasets", "ordata.npz")) tst_data = np.load(os.path.join(get_base_dir(), "datasets", "ordata_test.npz")) loaded_datasets["or_OX"] = gp.as_garray(trn_data["O"]) loaded_datasets["or_OZ"] = gp.as_garray(trn_data["OZ"]) loaded_datasets["or_TOX"] = gp.as_garray(tst_data["O"]) loaded_datasets["or_TOZ"] = gp.as_garray(tst_data["OZ"]) return (loaded_datasets["or_OX"], loaded_datasets["or_TOX"], loaded_datasets["or_OZ"], loaded_datasets["or_TOZ"])
def load_mnist(with_verification_set): with gzip.open(os.path.join(get_base_dir(), "datasets", "mnist.pkl.gz"), "rb") as f: (X, Z), (VX, VZ), (TX, TZ) = cPickle.load(f) # X = np.asarray(X, dtype='float32') # VX = np.asarray(VX, dtype='float32') # TX = np.asarray(TX, dtype='float32') if with_verification_set: if "mnistv_X" not in loaded_datasets: loaded_datasets["mnistv_X"] = gp.as_garray(X) loaded_datasets["mnistv_Z"] = Z loaded_datasets["mnistv_VX"] = gp.as_garray(VX) loaded_datasets["mnistv_VZ"] = VZ loaded_datasets["mnistv_TX"] = gp.as_garray(TX) loaded_datasets["mnistv_TZ"] = TZ return ( loaded_datasets["mnistv_X"], loaded_datasets["mnistv_VX"], loaded_datasets["mnistv_TX"], loaded_datasets["mnistv_Z"], loaded_datasets["mnistv_VZ"], loaded_datasets["mnistv_TZ"], ) else: if "mnist_X" not in loaded_datasets: loaded_datasets["mnist_X"] = gp.as_garray(np.concatenate((X, VX), axis=0)) loaded_datasets["mnist_Z"] = np.concatenate((Z, VZ), axis=0) loaded_datasets["mnist_TX"] = gp.as_garray(TX) loaded_datasets["mnist_TZ"] = TZ return ( loaded_datasets["mnist_X"], loaded_datasets["mnist_TX"], loaded_datasets["mnist_Z"], loaded_datasets["mnist_TZ"], )
import gzip import time import numpy as np import theano.tensor as T import climin.stops import climin.initialize from brummlearn.mlp import Mlp, DropoutMlp from brummlearn.data import one_hot from ml.common.util import get_base_dir savepath = "../mnist_dropout_model.npz" datafile = get_base_dir() + "/datasets/mnist.pkl.gz" # Load data. with gzip.open(datafile,'rb') as f: train_set, val_set, test_set = cPickle.load(f) X, Z = train_set VX, VZ = val_set TX, TZ = test_set Z = one_hot(Z, 10) VZ = one_hot(VZ, 10) TZ = one_hot(TZ, 10) image_dims = 28, 28
def load_ruslan_mnist(): mdata = scipy.io.loadmat(os.path.join(get_base_dir(), "datasets", "mnist.mat")) return (gp.as_garray(mdata["fbatchdata"]), gp.as_garray(mdata["test_fbatchdata"]))