Beispiel #1
0
    if config.adaptive_rate:
        lr = tf.placeholder(tf.float32, shape=())
    else:
        lr = config.learning_rate

    gen_optimizer = tf.train.AdamOptimizer(config.gen_learning_rate, beta1=0.5, beta2=0.9)
    disc_optimizer = tf.train.AdamOptimizer(lr, beta1=0.5, beta2=0.9)

    clipper_ret = get_clipper(config.clipper, config)
    if isinstance(clipper_ret, tuple):
        clipper, sampler = clipper_ret
        sampler.set_data_loader(sample_data_loader)
        sampler.keep_memory = False
    else:
        clipper = clipper_ret
        sampler = None

    scheduler = get_scheduler(config.scheduler, config)
    def callback_before_train(_0, _1, _2):
        print(clipper.info())
    supervisor = BasicSupervisorMNIST(config, clipper, scheduler, sampler=sampler,
                                      callback_before_train=callback_before_train)
    if config.adaptive_rate:
        supervisor.put_key("lr", lr)
    print(gan_data_loader)
    train(config, gan_data_loader, mnist.generator_forward, mnist.discriminator_forward,
          gen_optimizer=gen_optimizer,
          disc_optimizer=disc_optimizer, accountant=accountant,
          supervisor=supervisor, n_samples=n_samples)
Beispiel #2
0
    disc_optimizer = tf.train.AdamOptimizer(lr, beta1=config.beta1,
                                       beta2=config.beta2)

    clipper_ret = get_clipper(config.clipper, config)
    if isinstance(clipper_ret, tuple):
        clipper, sampler = clipper_ret
        sampler.set_data_loader(LSUNLoader(config.sample_dir, block_size=18, max_blocks=256,
                                        num_workers=2, actions=lsun_process_actions()))
        sampler.data_loader.start_fetch()
    else:
        clipper = clipper_ret
        sampler = None
    scheduler = get_scheduler(config.scheduler, config)

    def callback_before_train(_0, _1, _2):
        print(clipper.info())

    supervisor = BasicSupervisorLSUN(config, clipper, scheduler,
                                     sampler=sampler, callback_before_train=callback_before_train)
    if config.adaptive_rate:
        supervisor.put_key("lr", lr)

    try:
        train(config, data_loader, generator_forward, discriminator_forward,
              disc_optimizer=disc_optimizer,
              gen_optimizer=gen_optimizer, accountant=accountant, supervisor=supervisor)
    finally:
        data_loader.stop_fetch()
        if sampler is not None:
            sampler.data_loader.stop_fetch()