Exemple #1
0
        features = tf.train.Features(feature=examples)
        example = tf.train.Example(features=features)
        writer.write(example.SerializeToString())

    count += batchsize

    print("Finished %0.2f percentage storing dataset" %
          (count * 100 / float(nb_samples)))

writer.close()
''' TRAIN SET '''
nb_samples = 5000
batchsize = 200

with sess.as_default():
    generator = val_generator(batchsize)
    writer = tf.python_io.TFRecordWriter('weights/nasnet_large_val.tfrecord')

count = 0
for _ in range(nb_samples // batchsize):
    x_batch, y_batch = next(generator)

    with sess.as_default():
        x_batch = model.predict(x_batch, batchsize, verbose=1)

    for i, (x, y) in enumerate(zip(x_batch, y_batch)):
        examples = {
            'features': _float32_feature_list(x.flatten()),
            'scores': _float32_feature_list(y.flatten()),
        }
        features = tf.train.Features(feature=examples)
model.summary()
optimizer = Adam(lr=1e-3)
model.compile(optimizer, loss=earth_mover_loss)

# load weights from trained model if it exists
if os.path.exists('weights/inception_resnet_weights.h5'):
    model.load_weights('weights/inception_resnet_weights.h5')

# load pre-trained NIMA(Inception ResNet V2) classifier weights
# if os.path.exists('weights/inception_resnet_pretrained_weights.h5'):
#     model.load_weights('weights/inception_resnet_pretrained_weights.h5', by_name=True)

checkpoint = ModelCheckpoint('weights/inception_resnet_weights.h5',
                             monitor='val_loss',
                             verbose=1,
                             save_weights_only=True,
                             save_best_only=True,
                             mode='min')
tensorboard = TensorBoardBatch()
callbacks = [checkpoint, tensorboard]

batchsize = 100
epochs = 20

model.fit_generator(train_generator(batchsize=batchsize),
                    steps_per_epoch=(250000. // batchsize),
                    epochs=epochs,
                    verbose=1,
                    callbacks=callbacks,
                    validation_data=val_generator(batchsize=batchsize),
                    validation_steps=(5000. // batchsize))