コード例 #1
0
def build_model_densenet_161():
    img_input = Input(INPUT_SHAPE, name='data')
    base_model = densenet161.DenseNet(
        img_input=img_input,
        reduction=0.5,
        weights_path='../input/densenet161_weights_tf.h5',
        classes=1000)
    base_model.layers.pop()
    base_model.layers.pop()

    species_dense = Dense(len(SPECIES_CLASSES),
                          activation='softmax',
                          name='cat_species')(base_model.layers[-1].output)
    cover_dense = Dense(len(COVER_CLASSES),
                        activation='softmax',
                        name='cat_cover')(base_model.layers[-1].output)
    # output = concatenate([species_dense, cover_dense], axis=0)

    model = Model(input=img_input, outputs=[species_dense, cover_dense])
    sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(optimizer=sgd,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    return model
コード例 #2
0
def build_model_densenet_with_mask():
    img_input = Input(INPUT_SHAPE, name='data')
    mask_model = model_unet(INPUT_SHAPE)
    mask_model.load_weights(
        '../output/checkpoints/fish_mask_unet/model_fish_unet2/checkpoint-best-064-0.0476.hdf5'
    )

    mask = mask_model(img_input)
    mask3 = concatenate([mask, mask, mask], axis=3)
    masked_image = multiply([img_input, mask3])

    base_model = densenet161.DenseNet(
        img_input=img_input,
        reduction=0.5,
        weights_path='../input/densenet161_weights_tf.h5',
        classes=1000)
    base_model.layers.pop()
    base_model.layers.pop()
    base_model_output = base_model(masked_image)
    species_dense = Dense(len(SPECIES_CLASSES),
                          activation='softmax',
                          name='cat_species')(base_model_output)
    cover_dense = Dense(len(COVER_CLASSES),
                        activation='softmax',
                        name='cat_cover')(base_model_output)

    model = Model(inputs=img_input, outputs=[species_dense, cover_dense])
    sgd = SGD(lr=1e-3, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(optimizer=sgd,
                  loss='categorical_crossentropy',
                  metrics=['accuracy'])

    return model
コード例 #3
0
def dense161():
    weights_path = '/your path/wh_code/densenet/densenet161_weights_tf.h5'
    image_size = (224, 224)
    base_model = densenet161.DenseNet(reduction=0.5,
                                      classes=1000,
                                      weights_path=weights_path)
    #base_model = densenet161.densenet161_model(img_rows=224, img_cols=224, color_type=3)
    model = Model(base_model.input,
                  GlobalAveragePooling2D()(base_model.output))
    gen = ImageDataGenerator(
        preprocessing_function=densenet161.preprocess_input)
    print(base_model.output)
    #gen = ImageDataGenerator()
    #train_generator = gen.flow_from_directory("/your path/train_cut2p", image_size, shuffle=False, batch_size=8)#37189

    #train_generator = gen.flow_from_directory("/your path/traindata2_pre", image_size, shuffle=False, batch_size=8)#20227
    #val_generator = gen.flow_from_directory("/your path/val_cutpre", image_size, shuffle=False, batch_size=8)#3117
    #train_generator = gen.flow_from_directory("/your path/train", image_size, shuffle=False, batch_size=8)#18686

    #train_generator = gen.flow_from_directory("/your path/train_cut2p", image_size, shuffle=False, batch_size=8)#37189
    #train_generator = gen.flow_from_directory("/your path/train_cut", image_size, shuffle=False, batch_size=8)#18686
    #train_generator = gen.flow_from_directory("/your path/train",image_size, shuffle=False, batch_size=8)#18686
    #train_generator = gen.flow_from_directory("/your path/train_cut3p", image_size, shuffle=False, batch_size=8)#55298
    test_generator = gen.flow_from_directory("/your path/test",
                                             image_size,
                                             shuffle=False,
                                             batch_size=8,
                                             class_mode=None)  #10593
    #test_generator = gen.flow_from_directory("/your path/test_cut", image_size, shuffle=False, batch_size=8, class_mode=None)#10593
    #train_generator = gen.flow_from_directory("/your path/train4p", image_size, shuffle=False, batch_size=8)#73049
    #train_generator = gen.flow_from_directory("/your path/train_cut4p", image_size, shuffle=False, batch_size=8)#72981
    #train_generator = gen.flow_from_directory("/your path/train_cut5p", image_size, shuffle=False, batch_size=8)#90253
    #train_generator = gen.flow_from_directory("/your path/train2p", image_size, shuffle=False, batch_size=8)#37201
    train_generator = gen.flow_from_directory("/your path/train3p",
                                              image_size,
                                              shuffle=False,
                                              batch_size=8)  #55273
    #train = model.predict_generator(train_generator,2336,verbose=True)#18686
    #train = model.predict_generator(train_generator,4649,verbose=True)#37189
    #train = model.predict_generator(train_generator,6913,verbose=True)#55298
    #train = model.predict_generator(train_generator,6908,verbose=True)#55261
    #val = model.predict_generator(val_generator, 390,verbose=True)#3117
    #train = model.predict_generator(train_generator,4651,verbose=True)#37201
    train = model.predict_generator(train_generator, 6910,
                                    verbose=True)  #55273
    #train = model.predict_generator(train_generator,9127,verbose=True)#73014
    #train = model.predict_generator(train_generator,9123,verbose=True)#72981
    #train = model.predict_generator(train_generator,11282,verbose=True)#90253
    test = model.predict_generator(test_generator, 1325, verbose=True)  #10593

    #print(test .shape)
    with h5py.File(fileDir + "wh_code/nocut/224_3p-dense161.h5") as h:
        h.create_dataset("train", data=train)
        h.create_dataset("test", data=test)
        h.create_dataset("label", data=train_generator.classes)
        #h.create_dataset("val", data=val)
        #h.create_dataset("trainlabel", data=train_generator.classes)
        #h.create_dataset("vallabel", data=val_generator.classes)

    K.clear_session()
コード例 #4
0
from keras.models import *
from keras.layers import *
from keras.preprocessing.image import *
import h5py
import densenet161
from keras import backend as K

weights_path = '/your path/wh_code/densenet/densenet161_weights_tf.h5'
image_size = (224, 224)
base_model = densenet161.DenseNet(reduction=0.5,
                                  classes=1000,
                                  weights_path=weights_path)
model = Model(base_model.input, GlobalAveragePooling2D()(base_model.output))
gen = ImageDataGenerator(preprocessing_function=densenet161.preprocess_input)
test_generator = gen.flow_from_directory("/your path/test",
                                         image_size,
                                         shuffle=False,
                                         batch_size=8,
                                         class_mode=None)
train_generator = gen.flow_from_directory("/your path/train3p",
                                          image_size,
                                          shuffle=False,
                                          batch_size=8)
train = model.predict_generator(train_generator, 6910, verbose=True)
test = model.predict_generator(test_generator, 1325, verbose=True)

# print(test .shape)
with h5py.File(fileDir + "wh_code/nocut/224_3p-dense161.h5") as h:
    h.create_dataset("train", data=train)
    h.create_dataset("test", data=test)
    h.create_dataset("label", data=train_generator.classes)