config.weight_init_std = 0.001
	config.weight_initializer = "Normal"
	config.nonlinearity = "relu"
	config.optimizer = "Adam"
	config.learning_rate = 0.0002
	config.momentum = 0.5
	config.gradient_clipping = 5
	config.weight_decay = 0

	decoder = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std)
	decoder.add(Linear(None, 1000))
	decoder.add(Activation(config.nonlinearity))
	decoder.add(Linear(None, 1000))
	decoder.add(Activation(config.nonlinearity))
	decoder.add(Linear(None, config.ndim_x))
	decoder.add(sigmoid())

	discriminator = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std)
	discriminator.add(Merge(num_inputs=2, out_size=1000, nobias=True))
	discriminator.add(gaussian_noise(std=0.3))
	discriminator.add(Activation(config.nonlinearity))
	discriminator.add(Linear(None, 1000))
	discriminator.add(Activation(config.nonlinearity))
	discriminator.add(Linear(None, 1000))
	discriminator.add(Activation(config.nonlinearity))
	discriminator.add(Linear(None, 2))

	generator = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std)
	generator.add(Linear(config.ndim_x, 1000))
	generator.add(Activation(config.nonlinearity))
	generator.add(Linear(None, 1000))
Ejemplo n.º 2
0
                        128,
                        ksize=4,
                        stride=2,
                        pad=paddings.pop(0),
                        use_weightnorm=config.use_weightnorm))
    generator.add(BatchNormalization(128))
    generator.add(Activation(config.nonlinearity))
    generator.add(
        Deconvolution2D(128,
                        3,
                        ksize=4,
                        stride=2,
                        pad=paddings.pop(0),
                        use_weightnorm=config.use_weightnorm))
    if config.distribution_output == "sigmoid":
        generator.add(sigmoid())
    if config.distribution_output == "tanh":
        generator.add(tanh())

    params = {
        "config": config.to_dict(),
        "model": generator.to_dict(),
    }

    with open(generator_sequence_filename, "w") as f:
        json.dump(params, f, indent=4, sort_keys=True, separators=(',', ': '))

generator_params = params

gan = GAN(discriminator_params, generator_params)
gan.load(args.model_dir)
Ejemplo n.º 3
0
	config.learning_rate = 0.0002
	config.momentum = 0.5
	config.gradient_clipping = 10
	config.weight_decay = 0

	# model
	model = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std)
	model.add(Linear(config.ndim_input, 500, use_weightnorm=config.use_weightnorm))
	model.add(BatchNormalization(500))
	model.add(Activation(config.nonlinearity))
	model.add(Linear(None, 500, use_weightnorm=config.use_weightnorm))
	model.add(BatchNormalization(500))
	model.add(Activation(config.nonlinearity))
	model.add(Linear(None, config.ndim_output, use_weightnorm=config.use_weightnorm))
	if config.distribution_output == "sigmoid":
		model.add(sigmoid())
	if config.distribution_output == "tanh":
		model.add(tanh())

	params = {
		"config": config.to_dict(),
		"model": model.to_dict(),
	}

	with open(generative_model_filename, "w") as f:
		json.dump(params, f, indent=4, sort_keys=True, separators=(',', ': '))

params_generative_model = params

ddgm = DDGM(params_energy_model, params_generative_model)
ddgm.load(args.model_dir)