Esempio n. 1
0
                             save_best_only=True,
                             save_weights_only=True,
                             mode='min',
                             period=1)
csv_logger = CSVLogger(TRAINING_LOG, append=True)
tb = TensorBoard(log_dir=LOGS_DIR,
                 histogram_freq=0,
                 write_graph=True,
                 write_images=False)

callbacks_list = [lrate, checkpoint, csv_logger, tb]

# sgd optimizer with lr multipliers
multisgd = MultiSGD(lr=base_lr,
                    momentum=momentum,
                    decay=0.0,
                    nesterov=False,
                    lr_mult=lr_mult)

# start training
model.compile(loss=losses,
              loss_weights=loss_weights,
              optimizer=multisgd,
              metrics=["accuracy"])

model.fit_generator(train_di,
                    steps_per_epoch=train_samples // batch_size,
                    epochs=max_iter,
                    callbacks=callbacks_list,
                    validation_data=val_di,
                    validation_steps=val_samples // batch_size,
                  data_shape=(3, 368, 368),
                  label_shape=(57, 46, 46),
                  split_point=38,
                  batch_size=batch_size,
                  shuffle=True)

x, y1, y2 = di.next()

print("x  : ", x.shape)
print("y1 : ", y1.shape)
print("y2 : ", y2.shape)

# as suggested in: https://github.com/fchollet/keras/issues/5920

last_layer_variables = list()
for layer in model.layers:
    #print(layer.weights)
    if layer.name in ['prediction']:
        last_layer_variables.extend(layer.weights)

multisgd = MultiSGD(lr=base_lr,
                    momentum=momentum,
                    decay=weight_decay,
                    nesterov=False,
                    exception_vars=last_layer_variables,
                    multiplier=0.1)

model.compile(loss='mean_squared_error',
              optimizer=multisgd,
              metrics=['accuracy'])
Esempio n. 3
0
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 38)),
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 19)),
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 38)),
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 19)),
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 38)),
        Input(shape=(image_shape[0] // 8, image_shape[1] // 8, 19))
    ]
    params = model.trainable_weights
    loss = total_eucl_loss(model.output,
                           labels,
                           len(labels),
                           batch_size=batch_size)
    # opt = keras.optimizers.SGD(lr=1e-5, momentum=0.9, decay=1e-6, nesterov=True).get_updates(params, [], loss)
    opt = MultiSGD(lr=1e-5,
                   momentum=0.9,
                   decay=1e-6,
                   nesterov=True,
                   lr_mult=optimizer_lr_mult(model)).get_updates(
                       params, [], loss)
    train_model = K.function(model.input + labels + [K.learning_phase()],
                             [loss], opt)

    # define generaters
    t_generator = Generator("images/train_dataset_2014.npy")
    v_generator = Generator("images/valid_dataset_2014.npy")

    # train for model
    for epoch in range(epochs):
        t_total_coss = 0
        t_steps = t_generator.samples_length // batch_size
        for step in range(t_steps):