Exemplo n.º 1
0
def objective(params):
    encode_l1, encode_dropout, decode_l1, decode_dropout = params
    # load model
    vae, enc, gen = create_lstm_vae(input_dim,
        timesteps=timesteps,
        batch_size=batch_size,
        intermediate_dim=intermediate_dim,
        latent_dim=latent_dim,
        epsilon_std=epsilon_std)

    # train model
    vae.fit(x, x, epochs=epochs, callbacks=[history])
    return np.mean(history.losses)
Exemplo n.º 2
0
def main():
    x_input = []
    gen = get_coord_drawings_z_axis()
    for i in range(0, 2048):
        x, y = next(gen)
        x_input.extend(x)

    x_input = np.asarray(x_input)
    print(x_input.shape)
    y_input = x_input
    input_dim = 5  # 13

    vae, enc, gen = create_lstm_vae(input_dim,
                                    timesteps=timesteps,
                                    batch_size=batch_size,
                                    intermediate_dim=32,
                                    latent_dim=100,
                                    epsilon_std=1.)

    for i in range(4):
        vae.fit(x_input, x_input, epochs=4, batch_size=batch_size)

        preds = vae.predict(x_input, batch_size=batch_size)

        # pick a column to plot.
        print("[plotting...]")
        print("x: %s, preds: %s" % (x_input.shape, preds.shape))
        tt = preds
        l = np.exp(tt[:, :, 2:5])
        ld = np.sum(np.exp(tt[:, :, 2:5]), axis=-1, keepdims=True)
        l = l / ld
        print(l[0])
        print(tt[0])
        print(y_input[0])
        tot = sketcher.save_batch_diff_z_axis(list((1 + tt[0:16]) * 128),
                                              list((1 + y_input[0:16]) * 128),
                                              "./",
                                              str(i) + "FINAL")
Exemplo n.º 3
0
    dataX = []
    for i in range(len(data) - timesteps - 1):
        x = data[i:(i+timesteps), :]
        dataX.append(x)
    return np.array(dataX) # samples x time-steps x features


if __name__ == "__main__":
    x = get_data()
    input_dim = x.shape[-1] # 13 features
    timesteps = x.shape[1] # 3 timesteps
    batch_size = 1

    vae, enc, gen = create_lstm_vae(input_dim, 
        timesteps=timesteps, 
        batch_size=batch_size, 
        intermediate_dim=32,
        latent_dim=100,
        epsilon_std=1.)

    vae.fit(x, x, epochs=20)

    preds = vae.predict(x, batch_size=batch_size)

    # pick a column to plot.
    print("[plotting...]")
    print("x: %s, preds: %s" % (x.shape, preds.shape))
    plt.plot(x[:,0,3], label='data')
    plt.plot(preds[:,0,3], label='predict')
    plt.legend()
    plt.show()
Exemplo n.º 4
0
if __name__ == "__main__":

    timesteps_max, enc_tokens, characters, char2id, id2char, x, x_decoder = get_text_data(
        num_samples=3000, data_path="data/fra.txt")

    print(x.shape, "Creating model...")

    input_dim = x.shape[-1]
    timesteps = x.shape[-2]
    batch_size = 1
    latent_dim = 191
    intermediate_dim = 353
    epochs = 40

    vae, enc, gen, stepper = create_lstm_vae(input_dim,
                                             batch_size=batch_size,
                                             intermediate_dim=intermediate_dim,
                                             latent_dim=latent_dim)
    print("Training model...")

    vae.fit([x, x_decoder], x, epochs=epochs, verbose=1)

    print("Fitted, predicting...")

    def decode(s):
        return inference.decode_sequence(s, gen, stepper, input_dim, char2id,
                                         id2char, timesteps_max)

    for _ in range(5):

        id_from = np.random.randint(0, x.shape[0] - 1)
        id_to = np.random.randint(0, x.shape[0] - 1)
Exemplo n.º 5
0
    batch_size = 1
    penalty = 0.001
    lr = 0.001
    dr = 0.5
    period = 10  # checkpoint period

    if dataname == 'germany':
        penalty = 0
        lr = 0.00005

    vae, enc, gen = create_lstm_vae(nb_features,
                                    n_pre=n_pre,
                                    n_post=n_post,
                                    batch_size=batch_size,
                                    intermediate_dim=32,
                                    latent_dim=200,
                                    initialization='glorot_normal',
                                    activation='linear',
                                    lr=lr,
                                    penalty=penalty,
                                    dropout=dr,
                                    epsilon_std=1.)

    filepath = "results/{}/{}".format(
        dataname, analysis) + "/weights.{epoch:02d}-{val_loss:.3f}.hdf5"
    checkpointer = ModelCheckpoint(filepath=filepath,
                                   monitor='val_loss',
                                   verbose=1,
                                   period=period,
                                   save_best_only=True)

    csv_logger = CSVLogger('results/{}/{}/training_log_{}_{}.csv'.format(