import numpy as np import singularity as S from singularity.components import layers, optimizers, regularizers, models from singularity.utils import datasets batch_size = 128 nb_epoch = 20 # the data, shuffled and split between train and test sets (X_train, y_train), (X_test, y_test) = datasets.load_mnist() X_train = X_train.reshape(60000, 784) X_test = X_test.reshape(10000, 784) X_train = X_train.astype(S.floatX()) X_test = X_test.astype(S.floatX()) X_train /= 255 X_test /= 255 print(X_train.shape[0], 'train samples') print(X_test.shape[0], 'test samples') # convert class vectors to binary class matrices Y_train = S.categorical(y_train, 10) Y_test = S.categorical(y_test, 10) model = models.DeepNetwork() model.add(layers.InputLayer((None, 28 * 28))) model.add(layers.DenseLayer(512)) model.add(layers.ActivationLayer(S.relu)) model.add(layers.DenseLayer(484, activation=S.relu)) model.add(layers.DropoutLayer(0.2))
from singularity.components.models import * from singularity.utils import datasets from singularity.utils import hdf5 batch_size = 16 original_dim = 784 latent_dim = 2 intermediate_dim = 128 #epsilon_std = 0.01 epsilon_std = 0.01 nb_epoch = 1 # train the VAE on MNIST digits (x_train, y_train), (x_test, y_test) = datasets.load_mnist() x_train = x_train.astype(S.floatX()) / 255. x_test = x_test.astype(S.floatX()) / 255. x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:]))) x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:]))) ################### ## SAVE HDF5 TEST # ################### #Save the model into a hdf5 file format #hdf5.save("test2.hdf", _x_decoded_mean, "root") # Load the data settings _x_decoded_mean = hdf5.load("test2.hdf")