discriminator_params = params # specify generator generator_sequence_filename = args.model_dir + "/generator.json" if os.path.isfile(generator_sequence_filename): print "loading", generator_sequence_filename with open(generator_sequence_filename, "r") as f: try: params = json.load(f) except: raise Exception( "could not load {}".format(generator_sequence_filename)) else: config = GeneratorParams() config.ndim_input = ndim_z config.distribution_output = "tanh" config.use_weightnorm = False config.weight_init_std = 0.001 config.weight_initializer = "Normal" config.nonlinearity = "relu" config.optimizer = "Adam" config.learning_rate = 0.0001 config.momentum = 0.5 config.gradient_clipping = 10 config.weight_decay = 0 # model # compute projection width input_size = get_in_size_of_deconv_layers(image_width, num_layers=4,
ndim_latent_code = 50 # specify discriminator discriminator_sequence_filename = args.model_dir + "/discriminator.json" if os.path.isfile(discriminator_sequence_filename): print "loading", discriminator_sequence_filename with open(discriminator_sequence_filename, "r") as f: try: params = json.load(f) except Exception as e: raise Exception( "could not load {}".format(discriminator_sequence_filename)) else: config = DiscriminatorParams() config.ndim_input = image_width * image_height config.clamp_lower = -0.01 config.clamp_upper = 0.01 config.num_critic = 5 config.weight_init_std = 0.001 config.weight_initializer = "Normal" config.use_weightnorm = False config.nonlinearity = "leaky_relu" config.optimizer = "rmsprop" config.learning_rate = 0.0001 config.momentum = 0.5 config.gradient_clipping = 1 config.weight_decay = 0 config.use_feature_matching = False config.use_minibatch_discrimination = False
separators=(',', ': ')) # specify generator generator_sequence_filename = args.model_dir + "/generator.json" if os.path.isfile(generator_sequence_filename): print "loading", generator_sequence_filename with open(generator_sequence_filename, "r") as f: try: generator_params = json.load(f) except: raise Exception( "could not load {}".format(generator_sequence_filename)) else: config = GeneratorParams() config.ndim_input = 256 config.ndim_output = 2 config.num_mixture = args.num_mixture config.distribution_output = "universal" config.use_weightnorm = False config.weight_std = 0.01 config.weight_initializer = "Normal" config.nonlinearity = "relu" config.optimizer = "adam" config.learning_rate = 0.0001 config.momentum = 0.5 config.gradient_clipping = 1 config.weight_decay = 0 # generator generator = Sequential()
separators=(',', ': ')) # specify generator generator_sequence_filename = args.model_dir + "/generator.json" if os.path.isfile(generator_sequence_filename): print "loading", generator_sequence_filename with open(generator_sequence_filename, "r") as f: try: generator_params = json.load(f) except: raise Exception( "could not load {}".format(generator_sequence_filename)) else: config = GeneratorParams() config.ndim_input = ndim_latent_code config.ndim_output = image_width * image_height config.distribution_output = "tanh" config.use_weightnorm = False config.weight_std = 0.01 config.weight_initializer = "Normal" config.nonlinearity = "relu" config.optimizer = "adam" config.learning_rate = 0.0001 config.momentum = 0.5 config.gradient_clipping = 1 config.weight_decay = 0 # generator generator = Sequential() generator.add(