Example #1
0
def test_vae():
    minibatch_size = 100
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym, y_sym = add_datasets_to_graph([X, y], ["X", "y"], graph)

    l1_enc = relu_layer([X_sym, y_sym], graph, 'l1_enc', proj_dim=20,
                        random_state=random_state)
    mu = linear_layer([l1_enc], graph, 'mu', proj_dim=10,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc], graph, 'log_sigma', proj_dim=10,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma], graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = relu_layer([samp], graph, 'l1_dec', proj_dim=20,
                        random_state=random_state)
    out = sigmoid_layer([l1_dec], graph, 'out', proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.001
    opt = sgd(params)
    updates = opt.updates(params, grads, learning_rate)

    train_function = theano.function([X_sym, y_sym], [cost], updates=updates,
                                     mode="FAST_COMPILE")

    iterate_function(train_function, [X, y], minibatch_size,
                     list_of_output_names=["cost"], n_epochs=1)
Example #2
0
def test_vae():
    minibatch_size = 10
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym = add_datasets_to_graph([X], ["X"], graph)

    l1_enc = softplus_layer([X_sym], graph, 'l1_enc', proj_dim=100,
                            random_state=random_state)
    mu = linear_layer([l1_enc], graph, 'mu', proj_dim=50,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc], graph, 'log_sigma', proj_dim=50,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma], graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = softplus_layer([samp], graph, 'l1_dec', proj_dim=100,
                            random_state=random_state)
    out = sigmoid_layer([l1_dec], graph, 'out', proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.00000
    opt = sgd(params, learning_rate)
    updates = opt.updates(params, grads)

    fit_function = theano.function([X_sym], [cost], updates=updates,
                                   mode="FAST_COMPILE")

    cost_function = theano.function([X_sym], [cost],
                                    mode="FAST_COMPILE")

    checkpoint_dict = {}
    train_indices = np.arange(len(X))
    valid_indices = np.arange(len(X))
    early_stopping_trainer(fit_function, cost_function,
                           train_indices, valid_indices,
                           checkpoint_dict, [X],
                           minibatch_size,
                           list_of_train_output_names=["cost"],
                           valid_output_name="valid_cost",
                           n_epochs=1)
Example #3
0
minibatch_size = 100
n_code = 400
n_enc_layer = [600, 600]
n_dec_layer = [600, 600]
width = 48
height = 48
n_input = width * height

# encode path aka q
l1_enc = softplus_layer([X_sym], graph, 'l1_enc', n_enc_layer[0], random_state)
l2_enc = softplus_layer([l1_enc], graph, 'l2_enc',  n_enc_layer[1],
                        random_state)
code_mu = linear_layer([l2_enc], graph, 'code_mu', n_code, random_state)
code_log_sigma = linear_layer([l2_enc], graph, 'code_log_sigma', n_code,
                              random_state)
kl = gaussian_log_kl([code_mu], [code_log_sigma], graph, 'kl').mean()
samp = gaussian_log_sample_layer([code_mu], [code_log_sigma], graph, 'samp',
                                 random_state)

# decode path aka p
l1_dec = softplus_layer([samp], graph, 'l1_dec',  n_dec_layer[0], random_state)
l2_dec = softplus_layer([l1_dec], graph, 'l2_dec', n_dec_layer[1], random_state)
out = linear_layer([l2_dec], graph, 'out', n_input, random_state)

nll = squared_error(out, X_sym).mean()
# log p(x) = -nll so swap sign
# want to minimize cost in optimization so multiply by -1
cost = -1 * (-nll - kl)
params, grads = get_params_and_grads(graph, cost)

learning_rate = 0.0003
Example #4
0
def test_gaussian_log_kl():
    graph = OrderedDict()
    X_sym = add_datasets_to_graph([X], ["X"], graph)
    kl = gaussian_log_kl([X_sym, X_sym], [X_sym, X_sym], graph,
                         'gaussian_log_kl')
    theano.function([X_sym], [kl], mode="FAST_COMPILE")
Example #5
0
n_code = 100
n_hid = 200
width = 28
height = 28
n_input = width * height

# encode path aka q
l1_enc = softplus([X_sym], [X.shape[1]], proj_dim=n_hid, name='l1_enc',
                  random_state=random_state)
l2_enc = softplus([l1_enc], [n_hid], proj_dim=n_hid, name='l2_enc',
                  random_state=random_state)
code_mu = linear([l2_enc], [n_hid], proj_dim=n_code, name='code_mu',
                 random_state=random_state)
code_log_sigma = linear([l2_enc], [n_hid], proj_dim=n_code,
                        name='code_log_sigma', random_state=random_state)
kl = gaussian_log_kl([code_mu], [code_log_sigma]).mean()
sample_state = np.random.RandomState(2177)
samp = gaussian_log_sample([code_mu], [code_log_sigma], name='samp',
                           random_state=sample_state)

# decode path aka p
l1_dec = softplus([samp], [n_code], proj_dim=n_hid, name='l1_dec',
                  random_state=random_state)
l2_dec = softplus([l1_dec], [n_hid], proj_dim=n_hid, name='l2_dec',
                  random_state=random_state)
out = sigmoid([l2_dec], [n_hid], proj_dim=X.shape[1], name='out',
              random_state=random_state)

nll = binary_crossentropy(out, X_sym).mean()
# See https://arxiv.org/pdf/1406.5298v2.pdf, eq 5
# log p(x | z) = -nll so swap sign
Example #6
0
                  proj_dim=n_hid,
                  name='l1_enc',
                  random_state=random_state)
l2_enc = softplus([l1_enc], [n_hid],
                  proj_dim=n_hid,
                  name='l2_enc',
                  random_state=random_state)
code_mu = linear([l2_enc], [n_hid],
                 proj_dim=n_code,
                 name='code_mu',
                 random_state=random_state)
code_log_sigma = linear([l2_enc], [n_hid],
                        proj_dim=n_code,
                        name='code_log_sigma',
                        random_state=random_state)
kl = gaussian_log_kl([code_mu], [code_log_sigma]).mean()
sample_state = np.random.RandomState(2177)
samp = gaussian_log_sample([code_mu], [code_log_sigma],
                           name='samp',
                           random_state=sample_state)

# decode path aka p
l1_dec = softplus([samp], [n_code],
                  proj_dim=n_hid,
                  name='l1_dec',
                  random_state=random_state)
l2_dec = softplus([l1_dec], [n_hid],
                  proj_dim=n_hid,
                  name='l2_dec',
                  random_state=random_state)
out = sigmoid([l2_dec], [n_hid],
Example #7
0
def test_gaussian_log_kl():
    kl = gaussian_log_kl([X_sym, X_sym], [X_sym, X_sym])
    theano.function([X_sym], [kl], mode="FAST_COMPILE")
Example #8
0
def test_gaussian_log_kl():
    kl = gaussian_log_kl([X_sym, X_sym], [X_sym, X_sym])
    theano.function([X_sym], [kl], mode="FAST_COMPILE")
Example #9
0
def test_vae():
    minibatch_size = 10
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym = add_datasets_to_graph([X], ["X"], graph)

    l1_enc = softplus_layer([X_sym],
                            graph,
                            'l1_enc',
                            proj_dim=100,
                            random_state=random_state)
    mu = linear_layer([l1_enc],
                      graph,
                      'mu',
                      proj_dim=50,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc],
                             graph,
                             'log_sigma',
                             proj_dim=50,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma],
                                     graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = softplus_layer([samp],
                            graph,
                            'l1_dec',
                            proj_dim=100,
                            random_state=random_state)
    out = sigmoid_layer([l1_dec],
                        graph,
                        'out',
                        proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.00000
    opt = sgd(params)
    updates = opt.updates(params, grads, learning_rate)

    fit_function = theano.function([X_sym], [cost],
                                   updates=updates,
                                   mode="FAST_COMPILE")

    cost_function = theano.function([X_sym], [cost], mode="FAST_COMPILE")

    checkpoint_dict = {}
    train_indices = np.arange(len(X))
    valid_indices = np.arange(len(X))
    early_stopping_trainer(fit_function,
                           cost_function,
                           checkpoint_dict, [X],
                           minibatch_size,
                           train_indices,
                           valid_indices,
                           fit_function_output_names=["cost"],
                           cost_function_output_name="valid_cost",
                           n_epochs=1)