Ejemplo n.º 1
0
    trainer.train(train_config, sampler, sampler_generator)
    trainer.dump_log(output_dir)
    return output_dir


if __name__ == '__main__':
    np.random.seed(66699)
    sess = utils.create_session()
    K.set_session(sess)

    ae_folder = 'prod/cifar10_ae2_relu_%d' % cifar10_ae.RELU_MAX
    ae = AutoEncoder(Cifar10Wrapper.load_default(), cifar10_ae.encode,
                     cifar10_ae.decode, cifar10_ae.RELU_MAX, ae_folder)
    ae.build_models(ae_folder)  # load model

    encoded_dataset = Cifar10Wrapper.load_from_h5(
        os.path.join(ae_folder, 'encoded_cifar10.h5'))
    assert len(encoded_dataset.x_shape) == 1

    num_hid = 2000
    output_folder = os.path.join(ae_folder, 'test_pretrain')
    # weights_file = os.path.join(
    #     output_folder, 'ptrbm_hid2000_lr0.1_cd1', 'epoch_100_rbm.h5')
    weights_file = '/home/hhu/Developer/dem/prod/cifar10_ae2_relu_6/ptrbm_scheme0/ptrbm_hid2000_lr0.1_cd1/epoch_100_rbm.h5'
    rbm = RBM(None, None, weights_file)
    # rbm = RBM(encoded_dataset.x_shape[0], num_hid, None)

    # train_config = utils.TrainConfig(
    #     lr=0.1, batch_size=100, num_epoch=100, use_pcd=False, cd_k=1)
    train_config = utils.TrainConfig(lr=0.01,
                                     batch_size=100,
                                     num_epoch=200,
Ejemplo n.º 2
0
def compare_dataset():
    d1 = Cifar10Wrapper.load_from_h5('prod/test_relu6/encoded_cifar10.h5')
    d2 = Cifar10Wrapper.load_from_h5(
        'prod/cifar10_ae2_relu_6/encoded_cifar10.h5')

    return d1, d2