Exemplo n.º 1
0
        batch_size,
        w_sam_map=w_sample,
        train_callbacks=[checkpoint])
    train_losses += tmp_train_losses
    val_losses += tmp_val_losses
    y_pred = classifier.predict(x_valid)
    f2samples = fbeta_score(np.array(y_valid),
                            y_pred > 0.2,
                            beta=2,
                            average='samples')
    print("F2 samples = {}".format(f2samples))

# <codecell>
# ## Load Best Weights
# Here you should load back in the best weights that were automatically saved by ModelCheckpoint during training
classifier.load_weights(model_filepath + ".hdf5")
print("Weights loaded")
p_pred = classifier.predict(x_valid)

Y = np.array(y_valid)

thrs = optimise_f2_thresholds(Y, p_pred, y_map)
y_pred = p_pred > thrs[np.newaxis, :]

f2samples = fbeta_score(Y, y_pred, beta=2, average="samples")
print("F2 samples = {}".format(f2samples))

R = recall_score(Y, y_pred, average=None)
P = precision_score(Y, y_pred, average=None)
A = np.equal(Y.astype('bool'), y_pred).sum(axis=0).astype('float') / Y.shape[0]
C = Y.sum(axis=0)
Exemplo n.º 2
0
        validation_split_size=validation_split_size,
        train_callbacks=[checkpoint])
    train_losses += tmp_train_losses
    val_losses += tmp_val_losses

# <markdowncell>

# ## Load Best Weights

# <markdowncell>

# Here you should load back in the best weights that were automatically saved by ModelCheckpoint during training

# <codecell>

classifier.load_weights("weights.best.hdf5")
print("Weights loaded")

# <markdowncell>

# ## Monitor the results

# <markdowncell>

# Check that we do not overfit by plotting the losses of the train and validation sets

# <codecell>

plt.plot(train_losses, label='Training loss')
plt.plot(val_losses, label='Validation loss')
plt.legend()
classifier = AmazonKerasClassifier()
classifier.add_conv_layer(img_resize)
classifier.add_flatten_layer()
classifier.add_ann_layer(len(y_map))

train_losses, val_losses = [], []
epochs_arr = [20, 5, 5]
learn_rates = [0.001, 0.0001, 0.00001]
for learn_rate, epochs in zip(learn_rates, epochs_arr):
    tmp_train_losses, tmp_val_losses, fbeta_score = classifier.train_model(x_train, y_train, learn_rate, epochs, 
                                                                           batch_size, validation_split_size=validation_split_size, 
                                                                           train_callbacks=[checkpoint])
    train_losses += tmp_train_losses
    val_losses += tmp_val_losses
classifier.load_weights("weights.best.hdf5")
print("Weights loaded")

print (fbeta_score)
del x_train, y_train
gc.collect()

x_test, x_test_filename = data_helper.preprocess_test_data(test_jpeg_dir, img_resize)
# Predict the labels of our x_test images
predictions = classifier.predict(x_test)

del x_test
gc.collect()

x_test, x_test_filename_additional = data_helper.preprocess_test_data(test_jpeg_additional, img_resize)
new_predictions = classifier.predict(x_test)