コード例 #1
0
# specify generator
generator_sequence_filename = args.model_dir + "/generator.json"

if os.path.isfile(generator_sequence_filename):
    print "loading", generator_sequence_filename
    with open(generator_sequence_filename, "r") as f:
        try:
            params = json.load(f)
        except:
            raise Exception(
                "could not load {}".format(generator_sequence_filename))
else:
    config = GeneratorParams()
    config.ndim_input = ndim_z
    config.distribution_output = "tanh"
    config.use_weightnorm = False
    config.weight_init_std = 0.001
    config.weight_initializer = "Normal"
    config.nonlinearity = "relu"
    config.optimizer = "Adam"
    config.learning_rate = 0.0001
    config.momentum = 0.5
    config.gradient_clipping = 10
    config.weight_decay = 0

    # model
    # compute projection width
    input_size = get_in_size_of_deconv_layers(image_width,
                                              num_layers=4,
                                              ksize=4,
コード例 #2
0
ファイル: model.py プロジェクト: takerum/LSGAN
generator_sequence_filename = args.model_dir + "/generator.json"

if os.path.isfile(generator_sequence_filename):
    print "loading", generator_sequence_filename
    with open(generator_sequence_filename, "r") as f:
        try:
            generator_params = json.load(f)
        except:
            raise Exception(
                "could not load {}".format(generator_sequence_filename))
else:
    config = GeneratorParams()
    config.ndim_input = 256
    config.ndim_output = 2
    config.num_mixture = args.num_mixture
    config.distribution_output = "universal"
    config.use_weightnorm = False
    config.weight_std = 0.01
    config.weight_initializer = "Normal"
    config.nonlinearity = "relu"
    config.optimizer = "adam"
    config.learning_rate = 0.0001
    config.momentum = 0.5
    config.gradient_clipping = 1
    config.weight_decay = 0

    # generator
    generator = Sequential()
    generator.add(
        Linear(config.ndim_input, 128, use_weightnorm=config.use_weightnorm))
    # generator.add(BatchNormalization(128))