Exemple #1
0
    train_images = (train_images - 127.5) / 127.5

    # use tf.data.Dataset to create batches and shuffle --> data pipeline to TF model
    train_dataset = tf.data.Dataset.from_tensor_slices(train_images)
    train_dataset = train_dataset.shuffle(buffer_size=BUFFER_SIZE)
    train_dataset = train_dataset.batch(batch_size=BATCH_SIZE)

    print("Shape of batches: {}".format(train_dataset.output_shapes))

    # ----- MODEL ----- #
    g = Generator()
    d = Discriminator()

    # defun gives 10s per epoch performance boost
    g.call = tf.contrib.eager.defun(g.call)
    d.call = tf.contrib.eager.defun(d.call)

    # Optimizers
    d_optimizer = tf.train.AdamOptimizer(learning_rate=D_LEARNING_RATE)
    g_optimizer = tf.train.AdamOptimizer(learning_rate=G_LEARNING_RATE)
    """
    # Checkpoints
    checkpoint_dir = output_dir
    checkpoint_prefix = os.path.join(checkpoint_dir, "ckpt")
    checkpoint = tf.train.Checkpoint(
        g_optimizer=g_optimizer,
        d_optimizer=d_optimizer,
        g=g,
        d=d
    )
    """