Beispiel #1
0
        monitor='val_loss',
        save_weights_only=True,
        save_best_only=False,
        period=1)
    reduce_lr = ReduceLROnPlateau(monitor='val_loss',
                                  factor=0.5,
                                  patience=2,
                                  verbose=1)
    early_stopping = EarlyStopping(monitor='val_loss',
                                   min_delta=0,
                                   patience=6,
                                   verbose=1)

    BATCH_SIZE = 2
    gen = Generator(bbox_util, BATCH_SIZE, lines[:num_train],
                    lines[num_train:], (input_shape[0], input_shape[1]),
                    NUM_CLASSES)

    for i in range(174):
        model.layers[i].trainable = False

    model.compile(loss={
        'regression': smooth_l1(),
        'classification': focal()
    },
                  optimizer=keras.optimizers.Adam(lr=1e-4, clipnorm=0.001))

    model.fit_generator(
        gen.generate(True),
        steps_per_epoch=num_train // BATCH_SIZE,
        validation_data=gen.generate(False),
Beispiel #2
0
    #------------------------------------------------------#
    #   主干特征提取网络特征通用,冻结训练可以加快训练速度
    #   也可以在训练初期防止权值被破坏。
    #   Init_Epoch为起始世代
    #   Freeze_Epoch为冻结训练的世代
    #   Epoch总训练世代
    #------------------------------------------------------#
    if True:
        Init_epoch = 0
        Freeze_epoch = 50
        # batch_size大小,每次喂入多少数据
        batch_size = 8
        # 最大学习率
        learning_rate_base = 1e-3

        gen = Generator(training_dataset_path,img_dim,batch_size,bbox_util)

        model.compile(loss={
                    'bbox_reg'  : box_smooth_l1(),
                    'cls'       : conf_loss(),
                    'ldm_reg'   : ldm_smooth_l1()
                },optimizer=keras.optimizers.Adam(lr=learning_rate_base)
        )

        model.fit(gen.generate(False), 
                steps_per_epoch=gen.get_len()//batch_size,
                verbose=1,
                epochs=Freeze_epoch,
                initial_epoch=Init_epoch,
                callbacks=[logging, checkpoint, reduce_lr, early_stopping])