Exemplo n.º 1
0
def build_model_densenet121_with_mask():
    img_input = Input(INPUT_SHAPE, name='data')
    mask_model = model_unet(INPUT_SHAPE)
    mask_model.load_weights(
        '../output/checkpoints/fish_mask_unet/model_fish_unet2/checkpoint-best-064-0.0476.hdf5'
    )

    mask = mask_model(img_input)
    mask3 = concatenate([mask, mask, mask], axis=3)
    masked_image = multiply([img_input, mask3])

    base_model = densenet121.DenseNet(
        img_input=img_input,
        reduction=0.5,
        weights_path='../input/densenet121_weights_tf.h5',
        classes=1000)
    base_model.layers.pop()
    base_model.layers.pop()
    base_model_output = base_model(masked_image)
    species_dense = Dense(len(SPECIES_CLASSES),
                          activation='softmax',
                          name='cat_species')(base_model_output)
    cover_dense = Dense(len(COVER_CLASSES),
                        activation='softmax',
                        name='cat_cover')(base_model_output)

    model = Model(inputs=img_input, outputs=[species_dense, cover_dense])
    sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(optimizer=sgd,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    return model
Exemplo n.º 2
0
def build_model_densenet_121():
    img_input = Input(INPUT_SHAPE, name='data')
    base_model = densenet121.DenseNet(
        img_input=img_input,
        reduction=0.5,
        weights_path='../input/densenet121_weights_tf.h5',
        classes=1000)
    base_model.layers.pop()
    base_model.layers.pop()

    species_dense = Dense(len(SPECIES_CLASSES),
                          activation='softmax',
                          name='cat_species')(base_model.layers[-1].output)
    cover_dense = Dense(len(COVER_CLASSES),
                        activation='softmax',
                        name='cat_cover')(base_model.layers[-1].output)
    # output = concatenate([species_dense, cover_dense], axis=0)

    model = Model(input=img_input, outputs=[species_dense, cover_dense])
    sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(optimizer=sgd,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    return model
Exemplo n.º 3
0
def dense121():
    weights_path = '/your path/wh_code/densenet/densenet121_weights_tf.h5'
    image_size = (224, 224)
    base_model = densenet121.DenseNet(reduction=0.5,
                                      classes=1000,
                                      weights_path=weights_path)
    #base_model = densenet161.densenet161_model(img_rows=224, img_cols=224, color_type=3)
    model = Model(base_model.input,
                  GlobalAveragePooling2D()(base_model.output))
    gen = ImageDataGenerator(
        preprocessing_function=densenet121.preprocess_input)
    print(base_model.output)
    #gen = ImageDataGenerator()
    #train_generator = gen.flow_from_directory("/your path/train_cut2p", image_size, shuffle=False, batch_size=8)#37189

    #train_generator = gen.flow_from_directory("/your path/traindata2_pre", image_size, shuffle=False, batch_size=8)#20227
    #val_generator = gen.flow_from_directory("/your path/val_cutpre", image_size, shuffle=False, batch_size=8)#3117
    #train_generator = gen.flow_from_directory("/your path/train_cut", image_size, shuffle=False, batch_size=8)#18686

    #train_generator = gen.flow_from_directory("/your path/train_cut2p", image_size, shuffle=False, batch_size=8)#37189
    #train_generator = gen.flow_from_directory("/your path/train_cut", image_size, shuffle=False, batch_size=8)#18686
    #train_generator = gen.flow_from_directory("/your path/train",image_size, shuffle=False, batch_size=8)#18686
    #train_generator = gen.flow_from_directory("/your path/train_cut3p", image_size, shuffle=False, batch_size=8)#55298
    #test_generator = gen.flow_from_directory("/your path/test_cut", image_size, shuffle=False, batch_size=8, class_mode=None)#10593
    test_generator = gen.flow_from_directory("/your path/test",
                                             image_size,
                                             shuffle=False,
                                             batch_size=8,
                                             class_mode=None)  #10593
    #train_generator = gen.flow_from_directory("/your path/train4p", image_size, shuffle=False, batch_size=8)#73049
    #train_generator = gen.flow_from_directory("/your path/train_cut4p", image_size, shuffle=False, batch_size=8)#72981
    #train_generator = gen.flow_from_directory("/your path/train_cut5p", image_size, shuffle=False, batch_size=8)#90253
    #train_generator = gen.flow_from_directory("/your path/train2p", image_size, shuffle=False, batch_size=8)#37201
    train_generator = gen.flow_from_directory("/your path/train3p",
                                              image_size,
                                              shuffle=False,
                                              batch_size=8)  #55273
    #train = model.predict_generator(train_generator,2336,verbose=True)#18686
    #train = model.predict_generator(train_generator,4649,verbose=True)#37189
    #train = model.predict_generator(train_generator,6913,verbose=True)#55298
    #train = model.predict_generator(train_generator,6908,verbose=True)#55261
    #val = model.predict_generator(val_generator, 390,verbose=True)#3117
    #train = model.predict_generator(train_generator,4651,verbose=True)#37201
    train = model.predict_generator(train_generator, 6910,
                                    verbose=True)  #55273
    #train = model.predict_generator(train_generator,9127,verbose=True)#73014
    #train = model.predict_generator(train_generator,9123,verbose=True)#72981
    #train = model.predict_generator(train_generator,11282,verbose=True)#90253
    test = model.predict_generator(test_generator, 1325, verbose=True)  #10593

    #print(test .shape)
    with h5py.File(fileDir + "wh_code/nocut/224_3p-dense121.h5") as h:
        h.create_dataset("train", data=train)
        h.create_dataset("test", data=test)
        h.create_dataset("label", data=train_generator.classes)
        #h.create_dataset("val", data=val)
        #h.create_dataset("trainlabel", data=train_generator.classes)
        #h.create_dataset("vallabel", data=val_generator.classes)

    K.clear_session()
def train_ps_cnn(epochs, datapath, net_type, log_save_path, continue_from_weight, keep_zero_map_rate, update_zero_map_path, update_zero_map_array, lr,lr_s, training_generator, validation_generator, kernel_regu):
    normal_classes = 3
    continue_from_model = None

    kernel_constraint = keras.constraints.NonNeg()
    weight_decay = 0

    loss_funcs = keras.losses.mean_squared_error
    metric_funcs = [mean_angular_error]
    input_shape = (14,14,1)

    if continue_from_model is None:
        if net_type == "DenseNet":
            model = densenet121.DenseNet(nb_dense_block=3, growth_rate=32, nb_filter=64, reduction=0.0,
                                         dropout_rate=0.2, weight_decay=1e-4, classes=normal_classes, weights_path=None,
                                         input_shape=input_shape)
            model.compile(loss=loss_funcs,
                          optimizer=keras.optimizers.Adam(),
                          metrics=metric_funcs)
        elif net_type == "ResNet":
            model = ResNet(input_shape=input_shape, n=3, classes=normal_classes)
            model.compile(loss=loss_funcs,
                          optimizer=keras.optimizers.Adam(lr=lr),
                          metrics=metric_funcs)
        elif net_type == "Simple":
            model = simple_cnn(input_shape,
                               classes=normal_classes,
                               weights_path=continue_from_weight,
                               kernel_regu=kernel_regu,
                               kernel_constraint=kernel_constraint,
                               weight_decay=weight_decay)
            model.compile(loss=loss_funcs,
                          optimizer=keras.optimizers.Adam(lr=lr),
                          metrics=metric_funcs)
        elif net_type == "pscnn":
            model = get_densenet_2d_channel_last_2dense(input_shape[0], input_shape[1],
                                                        classes=normal_classes,
                                                        weights_path=continue_from_weight,
                                                        kernel_regu=kernel_regu,
                                                        kernel_constraint=kernel_constraint,
                                                        weight_decay=weight_decay)
            model.compile(loss=loss_funcs,
                          optimizer=keras.optimizers.Adam(lr=lr),
                          metrics=metric_funcs)
        else:
            print("No model:", net_type)
    else:
        model = keras.models.load_model(continue_from_model, custom_objects={"Scale": Scale,
                                           "mean_angular_error": mean_angular_error,
                                           "mean_sqrt_error": mean_sqrt_error,
                                           "ConnectMap": ConnectMap,
                                           "normalize": normalize})
        model.compile(loss=loss_funcs,
                      optimizer=keras.optimizers.Adam(lr=lr),
                      metrics=metric_funcs)

    if keep_zero_map_rate != 1:
        ws = model.get_weights()
        my_function.update_ZeroMap(ws, keep_zero_map_rate)
        model.set_weights(ws)

    if update_zero_map_path is not None:
        ws = model.get_weights()
        my_function.update_ZeroMap_from_npy(ws, update_zero_map_path)
        model.set_weights(ws)

    if update_zero_map_array is not None:
        ws = model.get_weights()
        ws[1] = update_zero_map_array.reshape((1, 14, 14, 1))
        model.set_weights(ws)

    # Prepare model model saving directory.
    save_dir = os.path.join(os.getcwd(), 'saved_models')
    model_name = net_type + '.{epoch:03d}.weights.h5'
    if not os.path.isdir(save_dir):
        os.makedirs(save_dir)
    if not os.path.isdir(log_save_path):
        os.makedirs(log_save_path)
    filepath = os.path.join(save_dir, model_name)

    # Prepare callbacks for model saving and for learning rate adjustment.
    checkpoint = ModelCheckpoint(filepath=filepath,
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=False,
                                 save_weights_only=True)
    tensorboard = TensorBoard(log_dir=log_save_path)
    call_lists = [tensorboard, checkpoint]
    if kernel_regu is not None:
        save_connect_map = Connection_map_plot(log_save_path)
        call_lists.append(save_connect_map)

    if lr_s is not None:
        call_lists.append(lr_s)

    if continue_from_model is None and continue_from_weight is None:
        initial_epoch = 0
    else:
        initial_epoch = int(os.path.basename(continue_from_weight).split('.')[1])

    copyfile(__file__, os.path.join(log_save_path, "note%03d.py" % initial_epoch))

    model.fit_generator(generator=training_generator,
                        validation_data=validation_generator,
                        steps_per_epoch=len(training_generator),
                        validation_steps=len(validation_generator),
                        epochs=epochs,
                        use_multiprocessing=True,
                        workers=3,
                        max_queue_size=10,
                        verbose=1,
                        initial_epoch=initial_epoch,
                        callbacks=call_lists)
    model.save(os.path.join(log_save_path, net_type + '.%03d.h5' % epochs))
    model.save_weights(os.path.join(log_save_path, net_type + '.%03d.weights.h5' % epochs))