Esempio n. 1
0
    from autoencoder import AutoEncoder
    from dem_trainer import DEMTrainer
    import cifar10_ae
    import gibbs_sampler
    import utils

    import keras.backend as K
    import os
    import h5py
    import numpy as np

    np.random.seed(66699)
    sess = utils.create_session()
    K.set_session(sess)

    dataset = Cifar10Wrapper.load_default()
    ae_folder = 'prod/cifar10_ae3_relu_6/'
    encoder_weights_file = os.path.join(ae_folder, 'encoder.h5')
    decoder_weights_file = os.path.join(ae_folder, 'decoder.h5')
    rbm_params_file = os.path.join(
        ae_folder,
        'ptrbm_scheme1/ptrbm_hid2000_lr0.001_pcd25/epoch_500_rbm.h5')

    # encoder_weights_file = '/home/hhu/Developer/dem/prod/cifar10_ae3_relu_6/test_ae_fe_const_balance/epoch_500_encoder.h5'
    # decoder_weights_file = encoder_weights_file.replace('encoder.', 'decoder.')
    # rbm_params_file = encoder_weights_file.replace('encoder.', 'rbm.')

    dem = DEM.load_from_param_files(dataset.x_shape, cifar10_ae.RELU_MAX,
                                    cifar10_ae.encode, encoder_weights_file,
                                    cifar10_ae.decode, decoder_weights_file,
                                    rbm_params_file)
Esempio n. 2
0
    output_dir = os.path.join(parent_dir, rbm_dir)
    train_config.dump_log(output_dir)

    trainer = RBMPretrainer(sess, dataset, rbm, decoder, vis_fn, output_dir)
    trainer.train(train_config, sampler, sampler_generator)
    trainer.dump_log(output_dir)
    return output_dir


if __name__ == '__main__':
    np.random.seed(66699)
    sess = utils.create_session()
    K.set_session(sess)

    ae_folder = 'prod/cifar10_ae2_relu_%d' % cifar10_ae.RELU_MAX
    ae = AutoEncoder(Cifar10Wrapper.load_default(), cifar10_ae.encode,
                     cifar10_ae.decode, cifar10_ae.RELU_MAX, ae_folder)
    ae.build_models(ae_folder)  # load model

    encoded_dataset = Cifar10Wrapper.load_from_h5(
        os.path.join(ae_folder, 'encoded_cifar10.h5'))
    assert len(encoded_dataset.x_shape) == 1

    num_hid = 2000
    output_folder = os.path.join(ae_folder, 'test_pretrain')
    # weights_file = os.path.join(
    #     output_folder, 'ptrbm_hid2000_lr0.1_cd1', 'epoch_100_rbm.h5')
    weights_file = '/home/hhu/Developer/dem/prod/cifar10_ae2_relu_6/ptrbm_scheme0/ptrbm_hid2000_lr0.1_cd1/epoch_100_rbm.h5'
    rbm = RBM(None, None, weights_file)
    # rbm = RBM(encoded_dataset.x_shape[0], num_hid, None)
Esempio n. 3
0
def compare_dataset():
    d1 = Cifar10Wrapper.load_from_h5('prod/test_relu6/encoded_cifar10.h5')
    d2 = Cifar10Wrapper.load_from_h5(
        'prod/cifar10_ae2_relu_6/encoded_cifar10.h5')

    return d1, d2
Esempio n. 4
0
import os
import keras
from keras.layers import Input
from keras.models import Model
from keras.utils.visualize_util import plot
from keras.callbacks import LearningRateScheduler
from keras.preprocessing.image import ImageDataGenerator

import utils
import autoencoder
from dataset_wrapper import Cifar10Wrapper
import cifar10_ae


def evalute_encoder(encode_fn, relu_max, weights_file, dataset, dataset_cls):
    encoder = autoencoder.build_model(dataset.x_shape, relu_max, encode_fn,
                                      None, weights_file)

    encoded_train_xs = encoder.predict(dataset.train_xs)
    encoded_test_xs = encoder.predict(dataset.test_xs)
    encoded_dataset = dataset_cls(encoded_train_xs, dataset.train_ys,
                                  encoded_test_xs, dataset.test_ys)
    plot_path = weights_file + '.encoded_dataset.png'
    encoded_dataset.plot_data_dist(plot_path)


if __name__ == '__main__':
    evalute_encoder(cifar10_ae.encode, 6,
                    'prod/cifar10_new_ae768_relu6/encoder.h5',
                    Cifar10Wrapper.load_default(), Cifar10Wrapper)
Esempio n. 5
0
                    default=None,
                    help='Where to store samples and models')

if __name__ == '__main__':
    opt = parser.parse_args()
    opt.manualSeed = 666999
    print(opt)

    np.random.seed(opt.manualSeed)
    torch.manual_seed(opt.manualSeed)
    torch.cuda.manual_seed(opt.manualSeed)

    assert opt.experiment is not None, 'specify output dir to avoid overwriting.'
    if not os.path.exists(opt.experiment):
        os.makedirs(opt.experiment)
    print(opt, file=open(os.path.join(opt.experiment, 'configs.txt'), 'w'))

    cudnn.benchmark = True

    dataset = Cifar10Wrapper.load_default(opt.batch_size)
    dem = DEM(opt)
    sampler = Sampler(opt)
    print(dem.net_f)
    print(sampler.net_g)

    opt.max_steps = 25
    dem.train(opt, dataset, sampler)

    # if opt.net_f and opt.net_g:
    #     dem.eval(dataset.train_xs, dataset.test_xs)
Esempio n. 6
0
        ]},
}


if __name__ == '__main__':
    np.random.seed(66699)
    sess = utils.create_session()
    K.set_session(sess)

    data = 'cifar'

    if data == 'cifar':
        ae_folder = 'prod/cifar10_ae3_relu_%d' % cifar10_ae.RELU_MAX
        # ae_folder = 'prod/cifar10_new_ae%d_relu%d' % (
        #     cifar10_ae.LATENT_DIM, cifar10_ae.RELU_MAX)
        ae = AutoEncoder(Cifar10Wrapper.load_default(),
                         cifar10_ae.encode, cifar10_ae.decode,
                         cifar10_ae.RELU_MAX, ae_folder)
        encoded_dataset = Cifar10Wrapper.load_from_h5(
            os.path.join(ae_folder, 'encoded_cifar10.h5'))
    elif data =='stl':
        ae_folder = '/home/hengyuah/dem/prod/stl10_ae_1024_relu6'
        ae = AutoEncoder(STL10Wrapper.load_default(),
                         stl_ae.encode, stl_ae.decode,
                         stl_ae.RELU_MAX, ae_folder)
        encoded_dataset = STL10Wrapper.load_from_h5(
            os.path.join(ae_folder, 'encoded_stl10.h5'))

    ae.build_models(ae_folder) # load model
    assert len(encoded_dataset.x_shape) == 1