print('loaded!') model.summary() model.compile(optimizer=tf.keras.optimizers.Adam(lr=cfg.TRAIN.LR_INIT), loss={ 'yolo_loss': lambda y_true, y_pred: y_pred }) auto_lr = AutoLr(warmup_steps) mAP = MAP() checkpoint = SelfModelCheckPoint(model_path, 10000) callbacks = [auto_lr, checkpoint, mAP] if not cfg.YOLO.TrainContinueFlag: model.fit_generator(trainset.loda_data(), epochs=train_epochs, steps_per_epoch=train_steps, validation_data=testset.loda_data(), validation_steps=val_steps, callbacks=callbacks) else: test_loss_save_dir = './plot/test_loss' test_loss_save_path = test_loss_save_dir + '/test_loss.txt' if os.path.exists(test_loss_save_path): with open(test_loss_save_path, 'rb') as test_loss_f: test_loss_list = pickle.load(test_loss_f) current_epochs = len(test_loss_list) model.fit_generator(trainset.loda_data(), epochs=(train_epochs),
from core.dataset import Dataset import tensorflow as tf import core.utils as utils from core.config import cfg import tqdm import cv2 draw_dataset = Dataset('train') data_product = draw_dataset.loda_data() for i in tqdm.tqdm(range(10)): get = next(data_product) get = get[0] label = get[1] print(label[].shape) assert 0 pred_bbox = list([label[0], label[1], label[2]]) image_size = get[2] image = get[3] pred_bbox = [tf.reshape(x, (-1, tf.shape(x)[-1])) for x in pred_bbox] pred_bbox = tf.concat(pred_bbox, axis=0) bboxes = utils.postprocess_boxes( pred_bbox, image_size, 416, 0.3 ) bboxes = utils.nms(bboxes, cfg.TEST.IOU_THRESHOLD, method="nms") image = utils.draw_bbox(image, bboxes) cv2.imwrite(str(i)+'.jpg', image)
model = create_model() if os.path.exists(os.path.join(model_dir, 'checkpoint')): model.load_weights(model_path) print('loaded!') model.summary() model.compile(optimizer=tf.keras.optimizers.Adam(lr=cfg.TRAIN.LR_INIT), loss={'yolo_loss': lambda y_true, y_pred: y_pred}) auto_lr = AutoLr(warmup_steps) mAP = MAP() checkpoint = SelfModelCheckPoint(model_path, 10000) callbacks = [auto_lr, checkpoint, mAP] if not cfg.YOLO.TrainContinueFlag: model.fit(trainset.loda_data(), epochs=train_epochs, steps_per_epoch=train_steps, validation_data=testset.loda_data(), validation_steps=val_steps, callbacks=callbacks) else: test_loss_save_dir = './plot/test_loss' test_loss_save_path = test_loss_save_dir + '/test_loss.txt' if os.path.exists(test_loss_save_path): with open(test_loss_save_path, 'rb') as test_loss_f: test_loss_list = pickle.load(test_loss_f) current_epochs = len(test_loss_list) model.fit(trainset.loda_data(), epochs=(train_epochs), initial_epoch=current_epochs, steps_per_epoch=train_steps, validation_data=testset.loda_data(), validation_steps=val_steps, callbacks=callbacks) else: