コード例 #1
0
def train(args):
    trainset = TrainDataset()
    trainloader = DataLoader(trainset,
                             batch_size=args.batch_size,
                             shuffle=True)
    vae = VAE().to(DEVICE)
    vae.fit(trainloader, n_epochs=args.num_epochs, lr=args.lr)
コード例 #2
0
def main():
    # Set parameters
    vae_epoch = 2
    can_epoch = 1000
    batch_size = 64
    latent_dim = 10
    beta_eeg = 5.0
    train = True

    # Read data sets
    data_root = "/home/zainkhan/bci-representation-learning"
    eeg_train, eeg_test, pupil_train, pupil_test, sub_cond = utils.read_single_trial_datasets(
        data_root)

    if train:
        # Train VAE
        vae = VAE(beta=beta_eeg, latent_dim=latent_dim)
        vae.compile(optimizer=keras.optimizers.Adam())
        vae.fit(eeg_train, epochs=vae_epoch, batch_size=batch_size)

        # Save VAE
        #vae.encoder.save("vae_encoder")
        #vae.decoder.save("vae_decoder")

        # Train CAN
        can = CAN(
            vae=vae,
            can_data=pupil_train,
            vae_data=eeg_train,
            latent_dim=latent_dim,
            epochs=can_epoch,
            batch_size=batch_size,
        )
        can.compile(optimizer=keras.optimizers.Adam(), run_eagerly=True)
        can.fit(pupil_train,
                epochs=can_epoch,
                batch_size=batch_size,
                shuffle=False)

        # Save CAN
        can.encoder.save("can_encoder")
        can.decoder.save("can_decoder")
    else:
        # Load all encoders/decoders
        vae = VAE(beta=beta_eeg, latent_dim=latent_dim)
        vae.encoder = keras.models.load_model("vae_encoder")
        vae.decoder = keras.models.load_model("vae_decoder")

        can = CAN(vae=vae, vae_data=eeg_train, latent_dim=latent_dim)
        can.encoder = keras.models.load_model("can_encoder")
        can.decoder = keras.models.load_model("can_decoder")

    # VAE predictions
    encoded_data = vae.encoder.predict(eeg_test)
    decoded_data = vae.decoder.predict(encoded_data)
    fn = utils.get_filename("predictions/", "test-eeg")
コード例 #3
0
def vae_train():
    # load train
    x_train = load_dataset('adi')

    # load test normal and anomaly
    test_anomaly = load_dataset('vans')
    test_normal = load_dataset('adi_test')

    # drfine train and valid iamge for train
    trains = x_train[10:]
    valid = x_train[:10]
    print(trains.shape, valid.shape, test_anomaly.shape, test_normal.shape)

    # try to plot
    plt.imshow(x_train[10].reshape(256, 256))
    plt.gray()
    plt.show()

    # train
    config = tf.ConfigProto()
    config.gpu_options.allow_growth = True

    def step_decay(epoch):
        initial_lrate = 0.0001
        decay_rate = 0.5
        decay_steps = 8.0
        lrate = initial_lrate * math.pow(decay_rate,
                                         math.floor((1 + epoch) / decay_steps))
        return lrate

    callback = []
    callback.append(
        HistoryCheckpoint(filepath='tb/LearningCurve_{history}.png',
                          verbose=1,
                          period=300))
    callback.append(LearningRateScheduler(step_decay))

    model = VAE()
    model, loss = model.vae_net()
    #model.load_weights("vae_model.h5")

    model.add_loss(loss)
    model.compile(optimizer=Adam(lr=0.0001))
    model.summary()

    try:
        model.fit(trains,
                  batch_size=20,
                  epochs=300,
                  callbacks=callback,
                  validation_data=(valid, None))
    finally:
        model.save('weight/vae_model.h5')
コード例 #4
0
# Optimizer and learning_rate schedule
lr_0 = .001
decay_rate = .998
lr_decay = lambda t: lr_0 * decay_rate**t
lr_schedule = tf.keras.callbacks.LearningRateScheduler(lr_decay)
optimizer = tf.keras.optimizers.Adam()

# Compile Model
model.compile(optimizer = optimizer,
              loss = cost_function,
              metrics = ['accuracy'])

# Train model
history = model.fit(dataset, dataset,
                    batch_size = batch_size,
                    epochs = epochs,
                    callbacks = [lr_schedule],
                    validation_data = (val_dataset, val_dataset))



### Plot Training Metrics
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
training_loss = history.history['loss']
traininig_accuracy = history.history['accuracy']
val_loss = history.history['val_loss']
val_accuracy = history.history['val_accuracy']


plt.figure(1)
plt.plot(training_loss, 'b', label='Training')
コード例 #5
0
# Reference
[1] Kingma, Diederik P., and Max Welling.
"Auto-encoding variational bayes."
https://arxiv.org/abs/1312.6114
'''

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from VAE import VAE
from Encoder import Encoder
from Decoder import Decoder
from Parameters import x_train, x_test, y_test, latent_dim, input_shape, epochs, batch_size
from keras.utils import plot_model
from Util import plot_results
if __name__ == '__main__':
    encoder = Encoder(input_data=input_shape)
    decoder = Decoder(input_data=(latent_dim, ))
    models = (encoder, decoder)
    data = (x_test, y_test)
    vae = VAE(input_data=input_shape, encoder=encoder, decoder=decoder)
    vae.compile(optimizer='adam')
    vae.summary()
    plot_model(vae, to_file='vae_mlp.png', show_shapes=True)
    vae.fit(x=x_train,
            y=None,
            epochs=epochs,
            batch_size=batch_size,
            validation_data=(x_test, None))
    vae.save_weights('vae_mlp_mnist.h5')
    plot_results(models, data, batch_size=batch_size, model_name="vae_mlp")