Beispiel #1
0
    set_logger(os.path.join(args.model_dir, 'output/train.log'))
    set_seeds(args.seed)

    # Prepare dataset
    logging.info('Preparing dataset...')
    corpus = Corpus(args.data_dir, n_tokens=args.vocab_size)
    args.vocab_size = min(args.vocab_size, corpus.vocab_size)
    dataset = tf.data.Dataset.from_tensor_slices((corpus.train_source, corpus.train_target)).batch(params.batch_size)

    # Models
    autoencoder = Seq2Seq(params, args)
    discriminator = Discriminator(params)
    generator = Generator(params)

    autoencoder.trainable = True
    discriminator.trainable = True
    generator.trainable = True

    # Optimizers
    ae_optim = tf.keras.optimizers.SGD(params.lr_ae)
    #ae_optim = tf.keras.optimizers.RMSprop(params.lr_ae)
    disc_optim = tf.keras.optimizers.Adam(params.lr_disc, params.beta1)
    gen_optim = tf.keras.optimizers.Adam(params.lr_gen, params.beta1)

    models = autoencoder, discriminator, generator
    optimizers = ae_optim, disc_optim, gen_optim

    tb_writer = SummaryWriter(logdir=args.model_dir)

    ckpts = Checkpoints(models, optimizers, os.path.join(args.model_dir, 'ckpts'))
    ckpts.restore()
    set_seeds(args.seed)

    # Prepare dataset
    logging.info('Preparing dataset...')
    corpus = Corpus(args.data_dir, n_tokens=args.vocab_size)
    args.vocab_size = min(args.vocab_size, corpus.vocab_size)
    test_dataset = tf.data.Dataset.from_tensor_slices(
        (corpus.test_source, corpus.test_target, corpus.test_label)).batch(32)

    # Models
    autoencoder = Seq2Seq(params, args)
    discriminator = Discriminator(params)
    generator = Generator(params)

    autoencoder.trainable = False
    discriminator.trainable = False
    generator.trainable = False

    # Optimizers
    ae_optim = tf.keras.optimizers.SGD(params.lr_ae)
    #ae_optim = tf.keras.optimizers.RMSprop(params.lr_ae)
    disc_optim = tf.keras.optimizers.Adam(params.lr_disc, params.beta1)
    gen_optim = tf.keras.optimizers.Adam(params.lr_gen, params.beta1)

    models = autoencoder, discriminator, generator
    optimizers = ae_optim, disc_optim, gen_optim

    tb_writer = SummaryWriter(logdir=os.path.join(args.model_dir, 'anogan'))

    ckpts = Checkpoints(models, optimizers,
                        os.path.join(args.model_dir, 'ckpts'))