def training_model(model_name='mobilenet'): train_gen, valid_gen, tconfig = get_gen_tconfig() callbacks = get_callbacks('mobilenet05_short_adam03_dr35_v3', patience=4) if model_name == 'mobilenet': print('MobileNet') model = MobileNet(config=tconfig, alpha=1.0) model.summary() elif model_name == 'mobilenet_dih': print('MobileNetDih') model = MobileNetDih4(config=tconfig, alpha=1) model.summary() elif model_name == 'mobilenet_dih_r': print('MobileNetDihR') model = MobileNetDR(config=tconfig, alpha=0.5) model.summary() opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) #opt = SGD(lr=1e-7, momentum=0.9, decay=0., nesterov=True) model.compile(optimizer=opt, loss='mse', metrics=['mae', 'mse']) #model.load_weights('mobilenet_05shortd01_catcros_resize_b16.hdf5') model.fit_generator(generator=train_gen, steps_per_epoch=1000, epochs=40, validation_data=valid_gen, verbose=2, validation_steps=500, callbacks=callbacks) #opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) """
def training_model(model_name='mobilenet'): train_img, valid_img, train_y, valid_y = get_data() callbacks = get_callbacks('mobilenet_10fulld01_b16', patience=2) if model_name == 'mobilenet': print('MobileNet') model = MobileNet(alpha=1.) model.summary() elif model_name == 'mobilenet_dih': print('MobileNetDih') model = MobileNetDih4(alpha=1.) model.summary() elif model_name == 'mobilenet_dih_r': print('MobileNetDihR') model = MobileNetDR(alpha=1.) model.summary() opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) #opt = SGD(lr=1e-7, momentum=0.9, decay=0., nesterov=True) model.compile(optimizer=opt, loss='binary_crossentropy', metrics=['accuracy']) #model.load_weights('mobilenet_05shortd01_catcros_resize_b16.hdf5') gen = ImageDataGenerator(rotation_range=359, zoom_range=[0.5, 2], width_shift_range=0.1, height_shift_range=0.1, vertical_flip=True, horizontal_flip=True) model.fit_generator( gen.flow(np.array(train_img), np.array(train_y), batch_size=BATCH_SIZE), steps_per_epoch=16 * len(train_y) // BATCH_SIZE, epochs=40, validation_data=[np.array(valid_img), np.array(valid_y)], verbose=1, callbacks=callbacks) # """ #opt = Adam(lr=1e-3, beta_1=0.9, beta_2=0.999) #opt = Adadelta(lr=1e-1, rho=0.95, decay=0.1) opt = SGD(lr=0.05, momentum=0.9, decay=0., nesterov=True) model.load_weights('mobilenet_10shortd01_b16_sgd') model.fit_generator( gen.flow(np.array(train_img), np.array(train_y), batch_size=BATCH_SIZE), steps_per_epoch=16 * len(train_y) // BATCH_SIZE, epochs=10, validation_data=[np.array(valid_img), np.array(valid_y)], verbose=1, callbacks=callbacks)
def train(): tr_config = { 'flag': True, 'rg': 25, # 7, 5 'wrg': 0.25, # 1, 3 'hrg': 0.25, # 1, 3 'zoom': 0.25 # 1, 1 } callbacks = get_callbacks('mynet_v4_bias', patience=30) paths, y = search_file('set1/segmented_set1') paths, y = search_file('set2/segmented_set2', paths=paths, y=y) ds = DataSet(nframe=30, fstride=6, name='UT interaction', size=[224, 224, 3], filepaths=paths, y=y, kernel_size=4) ds.make_set(op='msqr', name='train') ds.make_set(op='msqr', name='valid') #opt = Adam(lr=1e-4, beta_1=0.9, beta_2=0.999, decay=0.1) #opt = SGD(lr=2*1e-1, momentum=0.9, nesterov=True, decay=0.2) opt = RMSprop(lr=0.001, rho=0.9, decay=0.01) model = MobileNet(alpha=1.0, shape=[29, 56, 56, 1], nframe=29) model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy']) model.summary() #model.load_weights('mynet_v4.h5') model.fit_generator(generator=ds.train_gen(batch_size=5, aug_config=tr_config), steps_per_epoch=100, epochs=300, validation_data=ds.valid_gen(), verbose=1, validation_steps=ds.getVlen, callbacks=callbacks)
validation_datagen = ImageDataGenerator(rescale=1. / 255) train_generator = train_datagen.flow_from_directory( train_data_dir, target_size=(64, 64), batch_size=batch_size, class_mode='categorical') validation_generator = validation_datagen.flow_from_directory( validation_data_dir, target_size=(64, 64), batch_size=batch_size, class_mode='categorical', shuffle = False) class_weights = class_weight.compute_class_weight( 'balanced', np.unique(train_generator.classes), train_generator.classes) print(class_weights) print(train_generator.class_indices) step_per_epoch = len(glob.glob(os.path.join(train_data_dir, '*/*'))) // batch_size validation_steps = len(glob.glob(os.path.join(validation_data_dir, '*/*'))) // batch_size print(step_per_epoch) print(validation_steps) model.fit_generator(generator=train_generator, epochs=50, verbose=1, class_weight=class_weights, steps_per_epoch=step_per_epoch,\ callbacks=[earlyStopping, mcp_save, reduce_lr], validation_data=validation_generator, validation_steps=validation_steps)