Ejemplo n.º 1
0
print("Starting training...")
num_epochs = 200
gen_label = numpy.zeros(100).astype(numpy.int32)
adv_label = numpy.zeros(200).astype(numpy.int32)
adv_label[100:200] = 1
for epoch in range(num_epochs):

    # In each epoch, we do a full pass over the training data:

    train_err = 0
    adv_err = 0
    train_batches = 0
    start_time = time.time()

    for batch in mini_batch.iterate_minibatches_pair(flower_corrupt_train,
                                                     flower_truth_train,
                                                     100,
                                                     shuffle=True):
        if epoch < 50:
            inputs, targets = batch
            train_err += train_fn(inputs, targets)
            train_batches += 1
        else:
            inputs, targets = batch
            err, gen_pic = generator_fn(inputs, targets, gen_label)
            train_err += err

            adv_feed = numpy.concatenate((gen_pic, targets), axis=0)
            adv_err += discriminator_fn(adv_feed, adv_label)

            train_batches += 1
Ejemplo n.º 2
0
# compute the loss on test set
view_fn = theano.function([input_var], test_seg)

# The training loop
print("Starting training...")
num_epochs = 50
for epoch in range(num_epochs):

    # In each epoch, we do a full pass over the training data:

    train_err = 0
    train_batches = 0
    start_time = time.time()

    for batch in mini_batch.iterate_minibatches_pair(flower_train,
                                                     segment_train,
                                                     100,
                                                     shuffle=True):

        inputs, targets = batch
        train_err += train_fn(inputs, targets)
        train_batches += 1

    # Then we print the results for this epoch:

    print("Epoch {} of {} took {:.3f}s".format(epoch + 1, num_epochs,
                                               time.time() - start_time))

    print("  training loss:\t\t{:.6f}".format(train_err / train_batches))

    ################### View reconstruction###########