model = Model(input_image, model)
model.compile(optimizer=Adam(lr=0.001), loss='binary_crossentropy', metrics = ['accuracy'])  

## we will have Vto train in batches because the whole thing won't fit in memory
datagen = ImageDataGenerator(featurewise_center=True, rescale = 1./255 )
training = datagen.flow_from_directory("sampled_tiles/junk_no_junk/training/",
        class_mode="binary", batch_size=32, target_size=(256, 256), color_mode = "grayscale" )

valid = datagen.flow_from_directory("sampled_tiles/junk_no_junk/validation", 
        class_mode="binary", batch_size=32, target_size=(256, 256), color_mode = "grayscale")

history = model.fit_generator(training, epochs = 15, validation_data = valid,
        validation_steps = 800)

with open("models/junk_no_junk_10_29_256px_1024_56k_subset.json", 'w') as out:
    out.write(model.to_json())

next_batch = valid.next()
b1 = next_batch[0]
b1_pred = model.predict_on_batch(b1)
q=0
for i in b1:
    img = np.uint8(np.multiply(np.asarray(i), 255))
    img = img.reshape(256, 256)
    Image.fromarray(np.asarray(img)).save("junk_test/test_" + str(q) + "_class_" + 
                        str(next_batch[1][q]) + "_label_" + str(b1_pred[q][0]) +".png")
    q+=1



Example #2
0
def counter_model(x_train_all, x_val_all, y_train_all, y_val_all):
    x_aug = ImageDataGenerator(
        rotation_range=180,
        width_shift_range=0.1,
        height_shift_range=0.1,
        horizontal_flip=True,
        vertical_flip=True,
    )

    # compute quantities required for featurewise normalization
    # (std, mean, and principal components if ZCA whitening is applied)
    x_aug.fit(x_train_all)

    res_model = ResNet50(weights='imagenet',
                         include_top=False,
                         input_shape=(320, 320, 3))
    model = res_model.output
    model = Flatten(name='flatten')(model)
    model = Dense(1024, activation='relu')(model)
    model = Dense(512,
                  activation='relu',
                  activity_regularizer=regularizers.l2(0.02))(model)
    leaf_pred = Dense(1)(model)

    eps = 50
    csv_logger = keras.callbacks.CSVLogger(results_path + '/training.log',
                                           separator=',')
    early_stop = EarlyStopping(monitor='val_loss',
                               min_delta=0.03,
                               mode='min',
                               patience=8)

    model = Model(inputs=res_model.input, outputs=leaf_pred)

    model.compile(optimizer=Adam(lr=0.0001), loss='mse')
    #fitted_model = model.fit(x_train_all, y_train_all, epochs=eps, batch_size=16, validation_split=0.1, callbacks= [csv_logger])
    fitted_model = model.fit_generator(x_aug.flow(x_train_all,
                                                  y_train_all,
                                                  batch_size=6),
                                       steps_per_epoch=812,
                                       epochs=eps,
                                       validation_data=(x_val_all, y_val_all),
                                       callbacks=[csv_logger, early_stop])
    #model = load_model(results_path+'/the_model.h5')

    ## Saving Model parameters
    # for i, layer in enumerate(res_model.layers):
    # 	print(i, layer.name)
    #model = load_model('./Results/PhenotikiCounter/CVPPP_Chal_split2 2017-06-09 16:45:31/the_model.h5')

    model_json = model.to_json()
    model.save(results_path + '/the_model.h5')

    # #Plotting Loss
    # plt.title('Leaf Counting Loss')
    # plt.plot(range(1,eps+1), fitted_model.history['loss'], label='Training', color='k')
    # plt.plot(range(1,eps+1), fitted_model.history['val_loss'], label='Validation', color='r')
    # plt.xticks(range(1,eps+1))
    # plt.xlabel('Epochs')
    # plt.ylabel('Loss')
    # plt.legend(loc='best')
    # plt.savefig(results_path+'/counter_network_train.png')

    return model