Beispiel #1
0
def objective(trial):
    arch = get_arch_type(trial)
    if arch == 'b0':
        model = EfficientNetB0(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b1':
        model = EfficientNetB1(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b2':
        model = EfficientNetB2(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b3':
        model = EfficientNetB3(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b4':
        model = EfficientNetB4(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b5':
        model = EfficientNetB5(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b6':
        model = EfficientNetB6(weights=None, classes=n_classes, input_shape=in_shape)
    elif arch == 'b7':
        model = EfficientNetB7(weights=None, classes=n_classes, input_shape=in_shape)

    loss = tfk.losses.CategoricalCrossentropy()
    opt = get_optimizer(trial)
    alpha = get_alpha(trial)

    hist = training_mixup(model, train_ds, test_ds, loss, opt, n_epochs, batch_size, n_classes, alpha, output_best_weights=False)

    test_acc = np.array(hist['test_acc'])

    return 1 - test_acc.max()
Beispiel #2
0
def objective(trial):
    model = DenseNet121(weights=None, classes=n_classes, input_shape=in_shape)
    loss = tfk.losses.CategoricalCrossentropy()
    opt = get_optimizer(trial)
    alpha = get_alpha(trial)

    hist = training_mixup(model, train_ds, test_ds, loss, opt, n_epochs, batch_size, n_classes, alpha, output_best_weights=False)

    test_acc = np.array(hist['test_acc'])

    return 1 - test_acc.max()
Beispiel #3
0
def objective(trial):
    model = InceptionResNetV2(classes=n_classes, input_shape=in_shape)
    loss = tfk.losses.CategoricalCrossentropy()
    opt = get_optimizer(trial)
    alpha = get_alpha(trial)

    train_weights = model.layers[1].trainable_weights

    hist = training_mixup(model, train_ds, test_ds, loss, opt, n_epochs, batch_size, n_classes, alpha, output_best_weights=False, train_weights=train_weights)

    test_acc = np.array(hist['test_acc'])

    return 1 - test_acc.max()
Beispiel #4
0
with open('test_ds.pkl', 'wb') as fp:
    pickle.dump((x_test, y_test), fp, protocol=4)

del x_train, y_train, x_test, y_test

models_and_params = {
    'vgg16': (models.wrapper.VGG16(weights=None, classes=3, input_shape=in_shape), 6.33e-5, 0.129, False),
    'vgg16_pretrained': (models.wrapper_T.VGG16(classes=3, input_shape=in_shape), 5.66e-5, 0.121, True),
    'vgg16_ft': (models.wrapper_T.VGG16(classes=3, input_shape=in_shape), 1.51e-5, 0.241, False),
}

# Loss
loss = tfk.losses.CategoricalCrossentropy()

# Validating
for model_name in models_and_params:
    model, lr, alpha, pretrained = models_and_params[model_name]
    opt = tfk.optimizers.Adam(lr)

    weight_name = 'best_param_{}'.format(model_name)
    if not pretrained:
        hist = training_mixup(model, train_ds, test_ds, loss, opt, n_epochs, batch_size, n_classes, alpha=alpha,
                              output_best_weights=True, weight_name=weight_name)
    else:
        train_weights = model.layers[1].trainable_variables
        hist = training_mixup(model, train_ds, test_ds, loss, opt, n_epochs, batch_size, n_classes, alpha=alpha,
                              output_best_weights=True, weight_name=weight_name, train_weights=train_weights)

    plot_learning_curve('history_{}.png'.format(model_name), history=hist, epochs=n_epochs)
    pd.DataFrame(hist).to_csv('history_{}.csv'.format(model_name))
Beispiel #5
0
in_shape = x_train.shape[1:]
del x_train, y_train, x_test, y_test

# Model
# model = SimpleCNN(in_shape, n_out=3)
# model = VGG16(weights=None, classes=3, input_shape=in_shape)
# model = ResNet50(weights=None, classes=3, input_shape=in_shape)
# model = InceptionV3(weights=None, classes=3, input_shape=in_shape)
# model = InceptionV3(classes=3, input_shape=in_shape)
model = ResNet50(classes=n_classes, input_shape=in_shape)

# Loss
loss = tfk.losses.CategoricalCrossentropy()

# Optimizer
opt = tfk.optimizers.Adam(lr)

# Training
hist = training_mixup(model,
                      train_ds,
                      test_ds,
                      loss,
                      opt,
                      n_epochs,
                      batch_size,
                      n_classes,
                      alpha=0.2)

pd.DataFrame(hist).to_csv('history.csv')