Interval_Epoch = 50 model_rpn.compile(loss={ 'classification': cls_loss(), 'regression': smooth_l1() }, optimizer=keras.optimizers.Adam(lr=lr)) model_all.compile(loss={ 'classification': cls_loss(), 'regression': smooth_l1(), 'dense_class_{}'.format(NUM_CLASSES): class_loss_cls, 'dense_regress_{}'.format(NUM_CLASSES): class_loss_regr(NUM_CLASSES - 1) }, optimizer=keras.optimizers.Adam(lr=lr)) gen = Generator(bbox_util, lines[:num_train], NUM_CLASSES, Batch_size, input_shape=[input_shape[0], input_shape[1]]).generate() gen_val = Generator(bbox_util, lines[num_train:], NUM_CLASSES, Batch_size, input_shape=[input_shape[0], input_shape[1]]).generate()
gen = Generator(bbox_util, lines, NUM_CLASSES, solid=True) rpn_train = gen.generate() log_dir = "logs" # 训练参数设置 logging = TensorBoard(log_dir=log_dir) callback = logging callback.set_model(model_all) model_rpn.compile(loss={ 'regression': smooth_l1(), 'classification': cls_loss() }, optimizer=keras.optimizers.Adam(lr=Learning_rate)) model_classifier.compile( loss=[class_loss_cls, class_loss_regr(NUM_CLASSES - 1)], metrics={'dense_class_{}'.format(NUM_CLASSES): 'accuracy'}, optimizer=keras.optimizers.Adam(lr=Learning_rate)) model_all.compile(optimizer='sgd', loss='mae') # 初始化参数 iter_num = 0 train_step = 0 losses = np.zeros((EPOCH_LENGTH, 5)) rpn_accuracy_rpn_monitor = [] rpn_accuracy_for_epoch = [] start_time = time.time() # 最佳loss best_loss = np.Inf # 数字到类的映射 print('Starting training')