Exemple #1
0
    #------------------------------------------------------#
    #   主干特征提取网络特征通用,使用预训练权重可以加快训练
    #   Init_Epoch为起始世代
    #   Interval_Epoch为中间训练的世代
    #   Epoch总训练世代
    #   提示OOM或者显存不足请调小Batch_size
    #------------------------------------------------------#
    if True:
        lr = 1e-4
        Batch_size = 2
        Init_Epoch = 0
        Interval_Epoch = 50

        model_rpn.compile(loss={
            'classification': cls_loss(),
            'regression': smooth_l1()
        },
                          optimizer=keras.optimizers.Adam(lr=lr))
        model_all.compile(loss={
            'classification':
            cls_loss(),
            'regression':
            smooth_l1(),
            'dense_class_{}'.format(NUM_CLASSES):
            class_loss_cls,
            'dense_regress_{}'.format(NUM_CLASSES):
            class_loss_regr(NUM_CLASSES - 1)
        },
                          optimizer=keras.optimizers.Adam(lr=lr))
    # 每个世代训练数据集长度的步数
    # 根据数据集大小进行指定
    EPOCH_LENGTH = len(lines)

    gen = Generator(bbox_util, lines, NUM_CLASSES, solid=True)
    rpn_train = gen.generate()
    log_dir = "logs"
    # 训练参数设置
    logging = TensorBoard(log_dir=log_dir)
    callback = logging
    callback.set_model(model_all)

    model_rpn.compile(loss={
        'regression': smooth_l1(),
        'classification': cls_loss()
    },
                      optimizer=keras.optimizers.Adam(lr=Learning_rate))
    model_classifier.compile(
        loss=[class_loss_cls, class_loss_regr(NUM_CLASSES - 1)],
        metrics={'dense_class_{}'.format(NUM_CLASSES): 'accuracy'},
        optimizer=keras.optimizers.Adam(lr=Learning_rate))
    model_all.compile(optimizer='sgd', loss='mae')

    # 初始化参数
    iter_num = 0
    train_step = 0
    losses = np.zeros((EPOCH_LENGTH, 5))
    rpn_accuracy_rpn_monitor = []
    rpn_accuracy_for_epoch = []
    start_time = time.time()