Exemple #1
0
def train_shallow(beta):
    # inputs
    input_shape = (1, 28, 28)
    epochs = 20
    batch_size = 1
    filters = 32
    kernel_size = 6
    pre_latent_size = 128
    latent_size = 32

    # define filename
    name = 'shallow'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'beta': beta,
        'filters': filters,
        'kernel_size': kernel_size,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    log_dir = './summaries/' + experiment + '/' + utils.build_hyperparameter_string(
        name, hp_dictionary) + '/'

    # make VAE
    vae = ShallowDenseMNIST(input_shape,
                            log_dir,
                            filters=filters,
                            kernel_size=kernel_size,
                            pre_latent_size=pre_latent_size,
                            latent_size=latent_size,
                            beta=beta)

    # compile VAE
    from keras import optimizers
    optimizer = optimizers.Adam(lr=1e-4)
    vae.compile(optimizer=optimizer)

    # get dataset
    (X_train, _), (X_test, _) = utils.load_mnist()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # print summaries
    vae.print_model_summaries()

    # fit VAE
    steps_per_epoch = int(train_size / batch_size)
    validation_steps = int(test_size / batch_size)
    vae.fit_generator(train_generator,
                      epochs=epochs,
                      steps_per_epoch=steps_per_epoch,
                      validation_data=test_generator,
                      validation_steps=validation_steps)
def main():
    # inputs
    input_shape = (1, 28, 20)
    filters = 32
    latent_channels = 1
    kernel_size = 2
    beta = 1.0  # entangled latent space
    epochs = 10
    batch_size = 1

    # log directory
    experiment = 'experiment_optimal_network_convolutional_latent_frey'
    run = 'cvae_frey_entangled_with_fully_connected_filters_same_borders_15_May_13_50_05_batch_size_1_beta_1.0_epochs_20_filters_32_kernel_size_2_latent_channels_64_loss_vae_loss_optimizer_adam'
    log_dir = './summaries/' + experiment + '/' + run + '/'

    # define model
    vae = FreyOptimalConvolutionalLatentExperimentFullyConnectedFilterVAE(
        input_shape,
        log_dir,
        filters=filters,
        latent_channels=latent_channels,
        kernel_size=kernel_size,
        beta=beta)

    # load weights
    vae.load_model()

    # extract models
    model = vae.get_model()
    decoder = vae.get_decoder()
    encoder = vae.get_encoder()

    # get dataset
    (X_train, _), (X_test, _) = utils.load_frey()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # show original and reconstruction
    # sampling.encode_decode_sample(X_test, model)

    # plot filters
    # sampling.show_convolutional_layers(X_test, encoder, 8, 8)

    # sample from prior
    # sampling.decode_prior_samples(5, decoder, latent_shape=(1, 64, 1, 1))

    # sample from posterior
    num_iter = 1000
    sampling.sample_posterior(X_test, model, num_iter, show_every=5)
Exemple #3
0
def seeds_maker():
    """
    开始f**k seeds了
    生成一个车型的全部请求的url,包含用不同的城市,以及不同的月份
    虽然结果都是展示的当前月份
    seed = {
        'brand_id': xxx,
        'brand': xxxx,
        'serise_id': xxx,
        'serise': xxxx,
        'url': xxxxx,
        'check_city': xxxx,
        'check_year': xxxx,
        'check_month': xxxx,
        'cookie': {},
        'cookie_status': 0,
        'data': [],
    }
    """
    initial_file(seed_file)
    epoh = int(file_content(epoh_file))
    seed_demo.update({'epoh': epoh + 1})
    serise_list = make_generator(serise_file, blank=blank)
    for each in serise_list:
        construct_seed(each[2], each[1], each[-2], each[0], each[-1])
    overwrite_file(epoh_file, str(epoh + 1))
    return
Exemple #4
0
def train_convolutional_autoencoder():

    # inputs
    input_shape = (1, 28, 28)
    epochs = 15
    batch_size = 1
    lr = 1e-4

    # define filename
    name = 'autoencoder_mnist'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'lr': lr,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    log_dir = './summaries/' + experiment + '/' + utils.build_hyperparameter_string(
        name, hp_dictionary) + '/'

    # make VAE
    autoencoder = ConvolutionalAutoencoder(input_shape, log_dir)

    # compile VAE
    optimizer = optimizers.Adam(lr=lr)
    autoencoder.compile(optimizer=optimizer)

    # get dataset
    (X_train, _), (X_test, _) = utils.load_mnist()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)

    # print summaries
    autoencoder.print_model_summaries()

    # fit VAE
    steps_per_epoch = int(len(X_train) / batch_size)
    validation_steps = int(len(X_test) / batch_size)
    autoencoder.fit_generator(train_generator,
                              epochs=epochs,
                              steps_per_epoch=steps_per_epoch,
                              validation_data=test_generator,
                              validation_steps=validation_steps)
Exemple #5
0
def main():
    # inputs
    img_channels = 1
    input_shape = (img_channels, 28, 28)
    epochs = 50
    batch_size = 1
    lr = 1e-4

    # define filename
    name = 'autoencoder_mnist'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'lr': lr,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    run = 'autoencoder_mnist_31_May_21_44_51_batch_size_1_epochs_15_loss_vae_loss_lr_0.0001_optimizer_adam'
    log_dir = './summaries/' + experiment + '/' + run + '/'

    # make VAE
    autoencoder = DenseAutoencoder(input_shape, log_dir)

    # load weights
    autoencoder.load_model()

    # extract models
    model = autoencoder.get_model()
    decoder = autoencoder.get_decoder()
    encoder = autoencoder.get_encoder()

    # get dataset
    (X_train, _), (X_test, _) = utils.load_mnist()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)

    # show original and reconstruction
    sampling.plot_original_and_reconstructions(X_test,
                                               model,
                                               input_shape=input_shape)
    plt.show()
Exemple #6
0
def main():
    # inputs
    input_shape = (1, 28, 20)
    filters = 32
    latent_filters = 1
    kernel_size = 2
    pool_size = 2
    lr = 1e-4
    beta = 1.0
    batch_size = 1

    # log directory
    run = 'cvae_frey_20_May_17_07_04_batch_size_1_beta_1_epochs_20_filters_32_kernel_size_2_latent_filters_1_loss_vae_loss_lr_0.0001_optimizer_adam'
    log_dir = './summaries/' + experiment + '/' + run + '/'

    # define model
    vae = FreyConvolutionalLatentSpaceNoBatchNormVAE(
        input_shape,
        log_dir,
        filters=filters,
        latent_filters=latent_filters,
        kernel_size=kernel_size,
        pool_size=pool_size,
        beta=beta)

    # load weights
    vae.load_model()

    # extract models
    model = vae.get_model()
    decoder = vae.get_decoder()
    encoder = vae.get_encoder()

    # get dataset
    (X_train, _), (X_test, _) = utils.load_frey()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # show original and reconstruction
    sampling.encode_decode_sample(X_test, model)
def get_all_serise():
    """
    获取所有车系
    :return:
    """
    id_set = make_set(serise_file, 2, '\t')
    all_brands = make_generator(brands_file, blank='\t')
    session = RequestAPI()
    for each in all_brands:
        url = serise_url.format(each[0])
        html = session.receive_and_request(url=url,
                                           headers=headers,
                                           method='GET')
        if html != 'null_html':
            data = parse_json(html)
            if data != []:
                save_serise_data(each[0], each[1], data, id_set)
    # make VAE
    vae = ConvolutionalLatentDeepVAE(input_shape,
                                     log_dir,
                                     filters=filters,
                                     latent_filters=latent_filters,
                                     kernel_size=kernel_size,
                                     pool_size=pool_size)

    # compile VAE
    from keras import optimizers
    optimizer = optimizers.Adam(lr=1e-3)
    vae.compile(optimizer=optimizer)

    # get dataset
    (X_train, _), (X_test, _) = utils.load_mnist()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # print summaries
    vae.print_model_summaries()

    # fit VAE
    steps_per_epoch = int(train_size / batch_size)
    validation_steps = int(test_size / batch_size)
    vae.fit_generator(train_generator,
                      epochs=epochs,
                      steps_per_epoch=steps_per_epoch,
                      validation_data=test_generator,
                      validation_steps=validation_steps)
def train_reconstruction_only_frey_network_with_image_latent_space(latent_filters):
    # inputs
    input_shape = (1, 28, 20)
    filters = 32
    kernel_size = 2
    pool_size = 2
    beta = 1.0
    lr = 1e-4
    epochs = 20
    batch_size = 1

    # define filename
    name = 'cvae_frey'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'filters': filters,
        'latent_filters': latent_filters,
        'kernel_size': kernel_size,
        'beta': beta,
        'lr': lr,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    log_dir = './summaries/' + experiment + '/' + utils.build_hyperparameter_string(name, hp_dictionary) + '/'

    # make VAE
    vae = FreyConvolutionalLatentSpaceNoBatchNormVAE(input_shape, 
                                            log_dir,
                                            filters=filters,
                                            latent_filters=latent_filters,
                                            kernel_size=kernel_size,
                                            pool_size=pool_size,
                                            beta=beta)

    # compile VAE
    from keras import optimizers
    optimizer = optimizers.Adam(lr=lr)
    vae.compile(optimizer=optimizer)

    # get dataset
    (X_train, _), (X_test, _) = utils.load_frey()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # print summaries
    vae.print_model_summaries()

    # fit VAE
    steps_per_epoch = int(train_size / batch_size)
    validation_steps = int(test_size / batch_size)
    vae.fit_generator(train_generator,
                   epochs=epochs,
                   steps_per_epoch=steps_per_epoch,
                   validation_data=test_generator,
                   validation_steps=validation_steps)
Exemple #10
0
def run_convolutional_latent_space_with_same_number_of_parameters():
    '''
    Latent shape: (128, 5, 3)
    Number of parameters in network: 119,521
    Number of parameters in latent space: 32,896
    '''

    # inputs
    input_shape = (1, 28, 20)
    epochs = 1
    batch_size = 1
    beta = 1.0
    filters = 32
    kernel_size = 2
    latent_channels = 128
    pool_size = 2

    # define filename
    name = 'cvae_frey_convolutional_latent'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'beta': beta,
        'filters': filters,
        'kernel_size': kernel_size,
        'latent_channels': latent_channels,
        'pool_size': pool_size,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    log_dir = './summaries/' + utils.build_hyperparameter_string(
        name, hp_dictionary) + '/'

    # make VAE
    vae = FreyConvolutionalLatentVAE(input_shape,
                                     log_dir,
                                     filters=filters,
                                     kernel_size=kernel_size,
                                     latent_channels=latent_channels,
                                     pool_size=pool_size)

    # compile VAE
    from keras import optimizers
    optimizer = optimizers.Adam(lr=1e-3)
    vae.compile(optimizer=optimizer)

    # print summaries
    vae.print_model_summaries()

    # get dataset
    (X_train, _), (X_test, _) = utils.load_frey()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # fit VAE
    steps_per_epoch = int(train_size / batch_size)
    validation_steps = int(test_size / batch_size)
    vae.fit_generator(train_generator,
                      epochs=epochs,
                      steps_per_epoch=steps_per_epoch,
                      validation_data=test_generator,
                      validation_steps=validation_steps)
def train_entangled_frey_network_with_fully_connected_filters_same_borders_no_pooling_less_filters(
):
    # inputs
    input_shape = (1, 28, 20)
    filters = 8
    latent_channels = 16
    kernel_size = 2
    beta = 1.0  # entangled latent space
    epochs = 20
    batch_size = 1

    # define filename
    name = 'cvae_frey_entangled_with_fully_connected_filters_same_borders_no_pooling_less_filters'

    # builder hyperparameter dictionary
    hp_dictionary = {
        'epochs': epochs,
        'batch_size': batch_size,
        'beta': beta,
        'filters': filters,
        'latent_channels': latent_channels,
        'kernel_size': kernel_size,
        'loss': 'vae_loss',
        'optimizer': 'adam'
    }

    # define log directory
    log_dir = './summaries/experiment_optimal_network_convolutional_latent_frey/' + utils.build_hyperparameter_string(
        name, hp_dictionary) + '/'

    # make VAE
    vae = FreyOptimalConvolutionalLatentExperimentFullyConnectedFilterVAE(
        input_shape,
        log_dir,
        filters=filters,
        latent_channels=latent_channels,
        kernel_size=kernel_size,
        beta=beta)

    # compile VAE
    from keras import optimizers
    optimizer = optimizers.Adam(lr=1e-3)
    vae.compile(optimizer=optimizer)

    # get dataset
    (X_train, _), (X_test, _) = utils.load_frey()
    train_generator = utils.make_generator(X_train, batch_size=batch_size)
    test_generator = utils.make_generator(X_test, batch_size=batch_size)
    train_size = len(X_train)
    test_size = len(X_test)

    # print summaries
    vae.print_model_summaries()

    # fit VAE
    steps_per_epoch = int(train_size / batch_size)
    validation_steps = int(test_size / batch_size)
    vae.fit_generator(train_generator,
                      epochs=epochs,
                      steps_per_epoch=steps_per_epoch,
                      validation_data=test_generator,
                      validation_steps=validation_steps)
Exemple #12
0
def loads_seed_in_generator():
    """返回一个生成器,导入种子
    上层在调用的时候
    通过 next()函数,一次一次调用
    """
    return make_generator(seed_file, '')