def train(): tr_config = { 'flag': True, 'rg': 25, # 7, 5 'wrg': 0.25, # 1, 3 'hrg': 0.25, # 1, 3 'zoom': 0.25 # 1, 1 } callbacks = get_callbacks('mynet_v4_bias', patience=30) paths, y = search_file('set1/segmented_set1') paths, y = search_file('set2/segmented_set2', paths=paths, y=y) ds = DataSet(nframe=30, fstride=6, name='UT interaction', size=[224, 224, 3], filepaths=paths, y=y, kernel_size=4) ds.make_set(op='msqr', name='train') ds.make_set(op='msqr', name='valid') #opt = Adam(lr=1e-4, beta_1=0.9, beta_2=0.999, decay=0.1) #opt = SGD(lr=2*1e-1, momentum=0.9, nesterov=True, decay=0.2) opt = RMSprop(lr=0.001, rho=0.9, decay=0.01) model = MobileNet(alpha=1.0, shape=[29, 56, 56, 1], nframe=29) model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) model.summary() #model.load_weights('mynet_v4.h5') model.fit_generator(generator=ds.train_gen(batch_size=5, aug_config=tr_config), steps_per_epoch=100, epochs=300, validation_data=ds.valid_gen(), verbose=1, validation_steps=ds.getVlen, callbacks=callbacks)
def training_model(model_name='mobilenet'): train_gen, valid_gen, tconfig = get_gen_tconfig() callbacks = get_callbacks('mobilenet05_short_adam03_dr35_v3', patience=4) if model_name == 'mobilenet': print('MobileNet') model = MobileNet(config=tconfig, alpha=1.0) model.summary() elif model_name == 'mobilenet_dih': print('MobileNetDih') model = MobileNetDih4(config=tconfig, alpha=1) model.summary() elif model_name == 'mobilenet_dih_r': print('MobileNetDihR') model = MobileNetDR(config=tconfig, alpha=0.5) model.summary() opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) #opt = SGD(lr=1e-7, momentum=0.9, decay=0., nesterov=True) model.compile(optimizer=opt, loss='mse', metrics=['mae', 'mse']) #model.load_weights('mobilenet_05shortd01_catcros_resize_b16.hdf5') model.fit_generator(generator=train_gen, steps_per_epoch=1000, epochs=40, validation_data=valid_gen, verbose=2, validation_steps=500, callbacks=callbacks) #opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) """
def training_model(model_name='mobilenet'): train_img, valid_img, train_y, valid_y = get_data() callbacks = get_callbacks('mobilenet_10fulld01_b16', patience=2) if model_name == 'mobilenet': print('MobileNet') model = MobileNet(alpha=1.) model.summary() elif model_name == 'mobilenet_dih': print('MobileNetDih') model = MobileNetDih4(alpha=1.) model.summary() elif model_name == 'mobilenet_dih_r': print('MobileNetDihR') model = MobileNetDR(alpha=1.) model.summary() opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) #opt = SGD(lr=1e-7, momentum=0.9, decay=0., nesterov=True) model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['accuracy']) #model.load_weights('mobilenet_05shortd01_catcros_resize_b16.hdf5') gen = ImageDataGenerator(rotation_range=359, zoom_range=[0.5, 2], width_shift_range=0.1, height_shift_range=0.1, vertical_flip=True, horizontal_flip=True) model.fit_generator( gen.flow(np.array(train_img), np.array(train_y), batch_size=BATCH_SIZE), steps_per_epoch=16 * len(train_y) // BATCH_SIZE, epochs=40, validation_data=[np.array(valid_img), np.array(valid_y)], verbose=1, callbacks=callbacks) # """ #opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) opt = SGD(lr=0.05, momentum=0.9, decay=0., nesterov=True) model.load_weights('mobilenet_10shortd01_b16_sgd') model.fit_generator( gen.flow(np.array(train_img), np.array(train_y), batch_size=BATCH_SIZE), steps_per_epoch=16 * len(train_y) // BATCH_SIZE, epochs=10, validation_data=[np.array(valid_img), np.array(valid_y)], verbose=1, callbacks=callbacks)
# net = applications.mobilenet_v2.MobileNetV2(include_top=False, pooling='avg', weights='imagenet', # input_shape = (223,223,3)) # net = applications.nasnet.NASNetMobile(input_shape=(223, 223, 3), include_top=False, weights='imagenet', # pooling='avg') # print(len(net.layers)) # model = Sequential() # model.add(net) # model.add(Dense(2, activation='softmax')) # for layer in net.layers[:-45]: # layer.trainable = False model = MobileNet((64, 64, 3), 200) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) model.summary() # exit(0) print("Compile model done!") earlyStopping = EarlyStopping(monitor='val_acc', patience=20, verbose=1) filepath = "models/imagenet_clf_model_test.h5" mcp_save = ModelCheckpoint(filepath, save_best_only=True, monitor='val_acc') reduce_lr = ReduceLROnPlateau('val_acc', factor=0.5, patience=4, verbose=1) train_data_dir = 'data/train' validation_data_dir = 'data/validation' batch_size = 128 train_datagen = ImageDataGenerator(