Exemplo n.º 1
0
            if batch_count%1000==0:
                # sample data
                save_as = samples_dir + '/' + model_name + '_SAMPLES{}.png'.format(batch_count)
                sample_data = sampling_function(fixed_hidden_data)[0]
                sample_data = np.asarray(sample_data)
                color_grid_vis(inverse_transform(sample_data).transpose([0,2,3,1]), (16, 16), save_as)

                np.save(file=samples_dir + '/' + model_name +'_MOMENT_COST',
                        arr=np.asarray(moment_cost_list))

if __name__=="__main__":

    batch_size = 128
    num_epochs = 100
    _ , data_stream = imagenet(batch_size=batch_size)


    num_hiddens   = 100
    learning_rate = 1e-3
    l2_weight     = 1e-10

    generator_optimizer = Adagrad(lr=sharedX(learning_rate),
                                  regularizer=Regularizer(l2=l2_weight))

    model_test_name = model_name \
                      + '_HIDDEN{}'.format(int(num_hiddens)) \
                      + '_REG{}'.format(int(-np.log10(l2_weight))) \
                      + '_LR{}'.format(int(-np.log10(learning_rate))) \

    train_model(model_name=model_test_name,
Exemplo n.º 2
0
                save_model(tensor_params_list=generator_params + generator_bn_params + energy_params,
                           save_to=save_as)


if __name__=="__main__":

    model_config_dict = OrderedDict()
    model_config_dict['batch_size']          = 128
    model_config_dict['num_display']         = 16*16
    model_config_dict['hidden_distribution'] = 1.
    model_config_dict['epochs']              = 200

    #################
    # LOAD DATA SET #
    #################
    _ , data_stream = imagenet(batch_size=model_config_dict['batch_size'])

    expert_size_list = [1024]
    hidden_size_list = [100]
    num_filters_list = [128]
    lr_list          = [1e-3]
    dropout_list     = [False,]
    lambda_eng_list  = [1e-10]
    lambda_gen_list  = [1e-10]

    for lr in lr_list:
        for num_filters in num_filters_list:
            for hidden_size in hidden_size_list:
                for expert_size in expert_size_list:
                    for dropout in dropout_list:
                        for lambda_eng in lambda_eng_list:
Exemplo n.º 3
0
b1 = 0.5
nc = 3
ny = 10
nbatch = 128
npx = 32
nz = 256
ndf = 128
ngf = 128
nx = nc * npx * npx
niter = 30
niter_decay = 30
lr = 0.0002
# ntrain = 100000
ntrain = 1281167

tr_data, te_data, tr_stream, val_stream, te_stream = imagenet(ntrain=ntrain)

te_handle = te_data.open()
vaX, vaY = te_data.get_data(te_handle, slice(0, 10000))
vaX = transform(vaX)

desc = 'imagenet_gan_pretrain_128f_relu_lrelu_7l_3x3_256z'
model_dir = 'models/%s' % desc
samples_dir = 'samples/%s' % desc
if not os.path.exists(model_dir):
    os.makedirs(model_dir)
if not os.path.exists(samples_dir):
    os.makedirs(samples_dir)

relu = activations.Rectify()
sigmoid = activations.Sigmoid()