Example #1
0
from dagbldr.datasets import fetch_binarized_mnist, minibatch_iterator
from dagbldr.utils import get_params
from dagbldr.utils import TrainingLoop
from dagbldr.utils import create_checkpoint_dict

from dagbldr.nodes import softplus
from dagbldr.nodes import sigmoid
from dagbldr.nodes import linear
from dagbldr.nodes import gaussian_log_sample
from dagbldr.nodes import gaussian_log_kl
from dagbldr.nodes import binary_crossentropy

from dagbldr.optimizers import adam

mnist = fetch_binarized_mnist()

X = mnist["data"].astype("float32")
X_sym = tensor.fmatrix()

# random state so script is deterministic
random_state = np.random.RandomState(1999)

minibatch_size = 100
n_code = 100
n_hid = 200
width = 28
height = 28
n_input = width * height

# encode path aka q
Example #2
0
from collections import OrderedDict
import numpy as np
import theano

from dagbldr.datasets import fetch_binarized_mnist, minibatch_iterator
from dagbldr.optimizers import adam
from dagbldr.utils import add_datasets_to_graph, get_params_and_grads
from dagbldr.utils import convert_to_one_hot, create_or_continue_from_checkpoint_dict
from dagbldr.utils import TrainingLoop
from dagbldr.nodes import softplus_layer, linear_layer, sigmoid_layer
from dagbldr.nodes import gaussian_log_sample_layer, gaussian_log_kl
from dagbldr.nodes import binary_crossentropy, softmax_layer
from dagbldr.nodes import categorical_crossentropy

mnist = fetch_binarized_mnist()
train_indices = mnist["train_indices"]
train_end = len(train_indices)
valid_indices = mnist["valid_indices"]
X = mnist["data"]
y = mnist["target"]
n_targets = 10
y = convert_to_one_hot(y, n_targets)

# graph holds information necessary to build layers from parents
graph = OrderedDict()
X_sym, y_sym = add_datasets_to_graph([X, y], ["X", "y"], graph)
# random state so script is deterministic
random_state = np.random.RandomState(1999)

minibatch_size = 100
n_code = 100