def __init__(self, model_file, num_classes, input_image_size): if not gfile.Exists(model_file) or input_image_size != (224, 224, 3): print("need download the model") mobile_net = MobileNet(weights='imagenet', input_shape=input_image_size) self.mobile_net_model = models.Model( inputs=mobile_net.input, outputs=mobile_net.get_layer( 'global_average_pooling2d').output) print("save the downloaded model for reuse") mobile_net.save(model_file) else: self.mobile_net_model = models.load_model(model_file) classes = num_classes self.inputs = layers.Input(shape=(1024, )) self.outputs = layers.Dense(classes, activation='softmax', name='final_output')(self.inputs) self.one_layer_model = models.Model(inputs=[self.inputs], outputs=[self.outputs]) final_output = layers.Dense(classes, activation='softmax', name='final_output')( self.mobile_net_model.output) self.final_model = models.Model(inputs=self.mobile_net_model.inputs, outputs=final_output)
def precision_m(y_true, y_pred): true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1))) precision = true_positives / (predicted_positives + K.epsilon()) return precision def f1_m(y_true, y_pred): precision = precision_m(y_true, y_pred) recall = recall_m(y_true, y_pred) return 2 * ((precision * recall) / (precision + recall + K.epsilon())) model.save('C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_rmsprop_1.h5') # 컴파일, 훈련 op = RMSprop(lr=1e-3) batch_size = 8 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_rmsprop_1.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy",
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], aaa) print(x_train.shape, y_train.shape) # (3628, 128, 862, 1) (3628,) print(x_test.shape, y_test.shape) # (908, 128, 862, 1) (908,) model = MobileNet( include_top=True, input_shape=(128, 862, 1), classes=2, pooling=None, weights=None, ) model.summary() # model.trainable = False model.save('C:/nmb/nmb_data/h5/5s/mobilenet_nadam_1.h5') # 컴파일, 훈련 op = Nadam(lr=1e-3) batch_size = 8 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_nadam_1.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy",
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], aaa) print(x_train.shape, y_train.shape) # (3628, 128, 862, 1) (3628,) print(x_test.shape, y_test.shape) # (908, 128, 862, 1) (908,) model = MobileNet( include_top=True, input_shape=(128, 862, 1), classes=2, pooling=None, weights=None, ) model.summary() # model.trainable = False model.save('C:/nmb/nmb_data/h5/5s/mobilenet_sgd_1.h5') # 컴파일, 훈련 op = SGD(lr=1e-2) batch_size = 8 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_sgd_1.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy",
def precision_m(y_true, y_pred): true_positives = K.sum(K.round(K.clip(y_true * y_pred, 0, 1))) predicted_positives = K.sum(K.round(K.clip(y_pred, 0, 1))) precision = true_positives / (predicted_positives + K.epsilon()) return precision def f1_m(y_true, y_pred): precision = precision_m(y_true, y_pred) recall = recall_m(y_true, y_pred) return 2 * ((precision * recall) / (precision + recall + K.epsilon())) model.save('C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_rmsprop_f80_m20_2.h5') # 컴파일, 훈련 op = RMSprop(lr=1e-3) batch_size = 8 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_rmsprop_f80_m20_2.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy",
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2], aaa) x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], aaa) print(x_train.shape, y_train.shape) # (3628, 128, 862, 1) (3628,) print(x_test.shape, y_test.shape) # (908, 128, 862, 1) (908,) model = MobileNet( include_top=True, input_shape=(128, 862, 1), classes=2, pooling=None, weights=None, ) model.summary() # model.trainable = False model.save('C:\\nmb\\nmb_data\\h5\\pre_train\\mobilenet_rmsprop_.h5') # 컴파일, 훈련 op = RMSprop(lr=1e-4) batch_size = 32 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:\\nmb\\nmb_data\\h5\\pre_train\\mobilenet_rmsprop_1e4.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) tb = TensorBoard(log_dir='C:/nmb/nmb_data/graph/' + 'mobilenet_rmsprop_1e4' + "/", histogram_freq=0, write_graph=True,
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], aaa) print(x_train.shape, y_train.shape) # (3628, 128, 862, 1) (3628,) print(x_test.shape, y_test.shape) # (908, 128, 862, 1) (908,) model = MobileNet( include_top=True, input_shape=(128, 862, 1), classes=2, pooling=None, weights=None, ) model.summary() # model.trainable = False model.save('C:/nmb/nmb_data/h5/5s/mobilenet_adadelta_1.h5') # 컴파일, 훈련 op = Adadelta(lr=1e-3) batch_size = 8 es = EarlyStopping(monitor='val_loss', patience=20, restore_best_weights=True, verbose=1) lr = ReduceLROnPlateau(monitor='val_loss', vactor=0.5, patience=10, verbose=1) path = 'C:/nmb/nmb_data/h5/5s/mobilenet/mobilenet_adadelta_1.h5' mc = ModelCheckpoint(path, monitor='val_loss', verbose=1, save_best_only=True) model.compile(optimizer=op, loss="sparse_categorical_crossentropy",