stride=2, pad=paddings.pop(0), use_weightnorm=config.use_weightnorm)) generator.add(BatchNormalization(128)) generator.add(Activation(config.nonlinearity)) generator.add( Deconvolution2D(128, 3, ksize=4, stride=2, pad=paddings.pop(0), use_weightnorm=config.use_weightnorm)) if config.distribution_output == "sigmoid": generator.add(sigmoid()) if config.distribution_output == "tanh": generator.add(tanh()) params = { "config": config.to_dict(), "model": generator.to_dict(), } with open(generator_sequence_filename, "w") as f: json.dump(params, f, indent=4, sort_keys=True, separators=(',', ': ')) generator_params = params gan = GAN(discriminator_params, generator_params) gan.load(args.model_dir) if args.gpu_device != -1:
config.gradient_clipping = 10 config.weight_decay = 0 # feature extractor feature_extractor = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) feature_extractor.add(Linear(config.ndim_input, 1000, use_weightnorm=config.use_weightnorm)) feature_extractor.add(Activation(config.nonlinearity)) feature_extractor.add(gaussian_noise(std=0.3)) feature_extractor.add(Linear(None, 500, use_weightnorm=config.use_weightnorm)) feature_extractor.add(Activation(config.nonlinearity)) feature_extractor.add(gaussian_noise(std=0.3)) feature_extractor.add(Linear(None, 250, use_weightnorm=config.use_weightnorm)) feature_extractor.add(Activation(config.nonlinearity)) feature_extractor.add(gaussian_noise(std=0.3)) feature_extractor.add(Linear(None, config.num_experts, use_weightnorm=config.use_weightnorm)) feature_extractor.add(tanh()) # experts experts = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) experts.add(Linear(config.num_experts, config.num_experts, use_weightnorm=config.use_weightnorm)) # b b = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) b.add(Linear(config.ndim_input, 1, nobias=True)) params = { "config": config.to_dict(), "feature_extractor": feature_extractor.to_dict(), "experts": experts.to_dict(), "b": b.to_dict(), }
config.nonlinearity = "relu" config.optimizer = "Adam" config.learning_rate = 0.001 config.momentum = 0.5 config.gradient_clipping = 5 config.weight_decay = 0 decoder = Sequential() decoder.add(Linear(None, 1000)) decoder.add(Activation(config.nonlinearity)) # decoder.add(BatchNormalization(1000)) decoder.add(Linear(None, 1000)) decoder.add(Activation(config.nonlinearity)) # decoder.add(BatchNormalization(1000)) decoder.add(Linear(None, config.ndim_x)) decoder.add(tanh()) discriminator = Sequential() discriminator.add(Merge(num_inputs=2, out_size=1000, nobias=True)) discriminator.add(gaussian_noise(std=0.3)) discriminator.add(Activation(config.nonlinearity)) # discriminator.add(BatchNormalization(1000)) discriminator.add(Linear(None, 1000)) discriminator.add(Activation(config.nonlinearity)) # discriminator.add(BatchNormalization(1000)) discriminator.add(Linear(None, 1000)) discriminator.add(Activation(config.nonlinearity)) # discriminator.add(BatchNormalization(1000)) discriminator.add(Linear(None, 2)) generator = Sequential()
config.weight_init_std = 0.05 config.weight_initializer = "Normal" config.use_weightnorm = True config.nonlinearity = "elu" config.optimizer = "Adam" config.learning_rate = 0.0002 config.momentum = 0.5 config.gradient_clipping = 10 config.weight_decay = 0 # feature extractor feature_extractor = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) feature_extractor.add(Linear(config.ndim_input, 128, use_weightnorm=config.use_weightnorm)) feature_extractor.add(Activation(config.nonlinearity)) feature_extractor.add(Linear(None, 128, use_weightnorm=config.use_weightnorm)) feature_extractor.add(tanh()) # experts experts = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) experts.add(Linear(128, config.num_experts, use_weightnorm=config.use_weightnorm)) # b b = Sequential(weight_initializer=config.weight_initializer, weight_init_std=config.weight_init_std) b.add(Linear(config.ndim_input, 1, nobias=True)) params = { "config": config.to_dict(), "feature_extractor": feature_extractor.to_dict(), "experts": experts.to_dict(), "b": b.to_dict(), }