Exemple #1
0
def train(model_type, epochs, batch_size, logdir):
    ds_train, ds_test = tfds.load('cifar10',
                                  split=['train', 'test'],
                                  as_supervised=True)
    num_samples = 50000
    num_samples_test = 10000
    ds_train = ds_train.shuffle(num_samples)\
      .batch(batch_size)\
      .map(preprocess, num_parallel_calls=tf.data.experimental.AUTOTUNE)\
      .repeat(epochs)\
      .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

    ds_test = ds_test.batch(batch_size)\
      .map(preprocess, num_parallel_calls=tf.data.experimental.AUTOTUNE)\
      .repeat(epochs)\
      .prefetch(buffer_size=tf.data.experimental.AUTOTUNE)

    kl_div_fn = (lambda q, p, _: tfd.kl_divergence(q, p) / num_samples)

    if model_type == 'normal':
        model = CNN(num_classes=10)
    elif model_type == 'reparam':
        model = ReparamCNN(num_classes=10, kernel_divergence_fn=kl_div_fn)
    else:
        model = FlipOutCNN(num_classes=10, kernel_divergence_fn=kl_div_fn)
    # Set input_shape explicitly (before compile) to instantiate model.losses
    model.build(input_shape=[None, 32, 32, 3])

    optimizer = optimizers.Adam()
    loss_fn = losses.SparseCategoricalCrossentropy(from_logits=True)
    metrics = [tf.keras.metrics.SparseCategoricalAccuracy(name='accuracy')]
    model.compile(optimizer, loss=loss_fn, metrics=metrics)

    callbacks = [tf.keras.callbacks.TensorBoard(log_dir=logdir)]
    model.fit(ds_train,
              epochs=epochs,
              callbacks=callbacks,
              validation_data=ds_test,
              steps_per_epoch=num_samples // batch_size,
              validation_steps=num_samples_test // batch_size)

    return None
#convert the list to array
X = np.array(X)
X = X / 255.0

y = np.array(y)
y = y.reshape(-1, 1)

y = to_categorical(y)

#split train and test set
trainX, testX, trainY, testY = train_test_split(X,
                                                y,
                                                test_size=0.2,
                                                random_state=1)

print(trainX.shape, trainY.shape)
print(testX.shape, testY.shape)

cnn = CNN(size, numOfLabels)
#build CNN model
model = cnn.build()
model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])
model.fit(trainX, trainY, epochs=10, batch_size=BATCH)
_, acc = model.evaluate(testX, testY)

print("accuracy: %.2f%%" % (acc * 100))
model.save(args['save'] + '.h5')