Ejemplo n.º 1
0
def test_vae():
    minibatch_size = 100
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym, y_sym = add_datasets_to_graph([X, y], ["X", "y"], graph)

    l1_enc = relu_layer([X_sym, y_sym], graph, 'l1_enc', proj_dim=20,
                        random_state=random_state)
    mu = linear_layer([l1_enc], graph, 'mu', proj_dim=10,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc], graph, 'log_sigma', proj_dim=10,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma], graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = relu_layer([samp], graph, 'l1_dec', proj_dim=20,
                        random_state=random_state)
    out = sigmoid_layer([l1_dec], graph, 'out', proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.001
    opt = sgd(params)
    updates = opt.updates(params, grads, learning_rate)

    train_function = theano.function([X_sym, y_sym], [cost], updates=updates,
                                     mode="FAST_COMPILE")

    iterate_function(train_function, [X, y], minibatch_size,
                     list_of_output_names=["cost"], n_epochs=1)
Ejemplo n.º 2
0
def test_vae():
    minibatch_size = 10
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym = add_datasets_to_graph([X], ["X"], graph)

    l1_enc = softplus_layer([X_sym], graph, 'l1_enc', proj_dim=100,
                            random_state=random_state)
    mu = linear_layer([l1_enc], graph, 'mu', proj_dim=50,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc], graph, 'log_sigma', proj_dim=50,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma], graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = softplus_layer([samp], graph, 'l1_dec', proj_dim=100,
                            random_state=random_state)
    out = sigmoid_layer([l1_dec], graph, 'out', proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.00000
    opt = sgd(params, learning_rate)
    updates = opt.updates(params, grads)

    fit_function = theano.function([X_sym], [cost], updates=updates,
                                   mode="FAST_COMPILE")

    cost_function = theano.function([X_sym], [cost],
                                    mode="FAST_COMPILE")

    checkpoint_dict = {}
    train_indices = np.arange(len(X))
    valid_indices = np.arange(len(X))
    early_stopping_trainer(fit_function, cost_function,
                           train_indices, valid_indices,
                           checkpoint_dict, [X],
                           minibatch_size,
                           list_of_train_output_names=["cost"],
                           valid_output_name="valid_cost",
                           n_epochs=1)
Ejemplo n.º 3
0
def test_binary_crossentropy():
    graph = OrderedDict()
    X_sym = add_datasets_to_graph([X], ["X"], graph)
    cost = binary_crossentropy(.99 * X_sym, X_sym)
    theano.function([X_sym], cost, mode="FAST_COMPILE")
Ejemplo n.º 4
0
code_log_sigma = linear([l2_enc], [n_hid], proj_dim=n_code,
                        name='code_log_sigma', random_state=random_state)
kl = gaussian_log_kl([code_mu], [code_log_sigma]).mean()
sample_state = np.random.RandomState(2177)
samp = gaussian_log_sample([code_mu], [code_log_sigma], name='samp',
                           random_state=sample_state)

# decode path aka p
l1_dec = softplus([samp], [n_code], proj_dim=n_hid, name='l1_dec',
                  random_state=random_state)
l2_dec = softplus([l1_dec], [n_hid], proj_dim=n_hid, name='l2_dec',
                  random_state=random_state)
out = sigmoid([l2_dec], [n_hid], proj_dim=X.shape[1], name='out',
              random_state=random_state)

nll = binary_crossentropy(out, X_sym).mean()
# See https://arxiv.org/pdf/1406.5298v2.pdf, eq 5
# log p(x | z) = -nll so swap sign
# want to minimize cost in optimization so multiply by -1
# cost = -1 * (-nll - kl)
cost = nll + kl
params = list(get_params().values())
grads = theano.grad(cost, params)

learning_rate = 0.0003
opt = adam(params, learning_rate)
updates = opt.updates(params, grads)

fit_function = theano.function([X_sym], [nll, kl, nll + kl], updates=updates)
cost_function = theano.function([X_sym], [nll + kl])
encode_function = theano.function([X_sym], [code_mu, code_log_sigma])
Ejemplo n.º 5
0
                        'l1_dec',
                        n_dec_layer,
                        random_state=random_state)
l2_dec = softplus_layer([l1_dec],
                        graph,
                        'l2_dec',
                        n_dec_layer,
                        random_state=random_state)
l3_dec = softplus_layer([l2_dec],
                        graph,
                        'l3_dec',
                        n_dec_layer,
                        random_state=random_state)
out = sigmoid_layer([l3_dec], graph, 'out', n_input, random_state=random_state)

nll = binary_crossentropy(out, X_sym).mean()
# log p(x) = -nll so swap sign
# want to minimize cost in optimization so multiply by -1
base_cost = -1 * (-nll - kl)

# -log q(y | x) is negative log likelihood already
alpha = 0.1
err = categorical_crossentropy(y_pred, y_sym).mean()
cost = base_cost + alpha * err

params, grads = get_params_and_grads(graph, cost)

learning_rate = 0.0001
opt = adam(params, learning_rate)
updates = opt.updates(params, grads)
Ejemplo n.º 6
0
valid_itr = minibatch_iterator([X, y], minibatch_size, make_mask=True, axis=1)
datasets_list = [X_mb, X_mb_mask, y_mb, y_mb_mask]
names_list = ["X", "X_mask", "y", "y_mask"]
X_sym, X_mask_sym, y_sym, y_mask_sym = add_datasets_to_graph(
    datasets_list, names_list, graph, list_of_test_values=datasets_list)

n_hid = 256
n_out = 8

h = location_attention_tanh_recurrent_layer(
    [X_sym], [y_sym], X_mask_sym, y_mask_sym, n_hid, graph, 'l1_att_rec',
    random_state=random_state)

X_hat = sigmoid_layer([h], graph, 'output', proj_dim=n_out,
                      random_state=random_state)
cost = binary_crossentropy(X_hat, X_sym).mean()
cost = masked_cost(cost, X_mask_sym).mean()
params, grads = get_params_and_grads(graph, cost)
opt = adadelta(params)
updates = opt.updates(params, grads)
fit_function = theano.function([X_sym, X_mask_sym, y_sym, y_mask_sym],
                               [cost], updates=updates)
valid_function = theano.function([X_sym, X_mask_sym, y_sym, y_mask_sym], [cost])

checkpoint_dict = {}
checkpoint_dict["fit_function"] = fit_function
checkpoint_dict["valid_function"] = valid_function
TL = TrainingLoop(fit_function, valid_function, train_itr, valid_itr,
                  checkpoint_dict=checkpoint_dict,
                  list_of_train_output_names=["train_cost"],
                  valid_output_name="valid_cost",
Ejemplo n.º 7
0
def test_binary_crossentropy():
    cost = binary_crossentropy(.99 * X_sym, X_sym)
    theano.function([X_sym], cost, mode="FAST_COMPILE")
Ejemplo n.º 8
0
def test_binary_crossentropy():
    cost = binary_crossentropy(.99 * X_sym, X_sym)
    theano.function([X_sym], cost, mode="FAST_COMPILE")
Ejemplo n.º 9
0
def test_vae():
    minibatch_size = 10
    random_state = np.random.RandomState(1999)
    graph = OrderedDict()

    X_sym = add_datasets_to_graph([X], ["X"], graph)

    l1_enc = softplus_layer([X_sym],
                            graph,
                            'l1_enc',
                            proj_dim=100,
                            random_state=random_state)
    mu = linear_layer([l1_enc],
                      graph,
                      'mu',
                      proj_dim=50,
                      random_state=random_state)
    log_sigma = linear_layer([l1_enc],
                             graph,
                             'log_sigma',
                             proj_dim=50,
                             random_state=random_state)
    samp = gaussian_log_sample_layer([mu], [log_sigma],
                                     graph,
                                     'gaussian_log_sample',
                                     random_state=random_state)
    l1_dec = softplus_layer([samp],
                            graph,
                            'l1_dec',
                            proj_dim=100,
                            random_state=random_state)
    out = sigmoid_layer([l1_dec],
                        graph,
                        'out',
                        proj_dim=X.shape[1],
                        random_state=random_state)

    kl = gaussian_log_kl([mu], [log_sigma], graph, 'gaussian_kl').mean()
    cost = binary_crossentropy(out, X_sym).mean() + kl
    params, grads = get_params_and_grads(graph, cost)
    learning_rate = 0.00000
    opt = sgd(params)
    updates = opt.updates(params, grads, learning_rate)

    fit_function = theano.function([X_sym], [cost],
                                   updates=updates,
                                   mode="FAST_COMPILE")

    cost_function = theano.function([X_sym], [cost], mode="FAST_COMPILE")

    checkpoint_dict = {}
    train_indices = np.arange(len(X))
    valid_indices = np.arange(len(X))
    early_stopping_trainer(fit_function,
                           cost_function,
                           checkpoint_dict, [X],
                           minibatch_size,
                           train_indices,
                           valid_indices,
                           fit_function_output_names=["cost"],
                           cost_function_output_name="valid_cost",
                           n_epochs=1)