Exemplo n.º 1
0
        Freeze_epoch = 50
        learning_rate_base = 1e-3

        gen = Generator(training_dataset_path, img_dim, batch_size, bbox_util)

        model.compile(loss={
            'bbox_reg': box_smooth_l1(weights=cfg['loc_weight']),
            'cls': conf_loss(),
            'ldm_reg': ldm_smooth_l1()
        },
                      optimizer=keras.optimizers.Adam(lr=learning_rate_base))

        model.fit_generator(
            gen,
            steps_per_epoch=gen.get_len() // batch_size,
            verbose=1,
            epochs=Freeze_epoch,
            initial_epoch=Init_epoch,
            callbacks=[logging, checkpoint, reduce_lr, early_stopping])

    for i in range(freeze_layers):
        model.layers[i].trainable = True

    if True:
        batch_size = 4
        Freeze_epoch = 50
        Epoch = 100
        learning_rate_base = 1e-4

        gen = Generator(training_dataset_path, img_dim, batch_size, bbox_util)
Exemplo n.º 2
0
        gen = Generator(training_dataset_path, img_dim, batch_size, bbox_util)

        model.compile(loss={
            'bbox_reg': box_smooth_l1(weights=cfg['loc_weight']),
            'cls': conf_loss(),
            'ldm_reg': ldm_smooth_l1()
        },
                      optimizer=keras.optimizers.Adam(lr=learning_rate_base))

        model.fit_generator(
            gen,
            steps_per_epoch=gen.get_len() // batch_size,
            verbose=1,
            epochs=Freeze_epoch,
            initial_epoch=Init_epoch,
            # 开启多线程可以加快数据读取的速度。
            # workers=4,
            # use_multiprocessing=True,
            callbacks=[
                logging, checkpoint, reduce_lr, early_stopping, loss_history
            ])

    for i in range(freeze_layers):
        model.layers[i].trainable = True

    if True:
        batch_size = 4
        Freeze_epoch = 50
        Epoch = 100
        learning_rate_base = 1e-4
Exemplo n.º 3
0
                              epoch_size, gen, Freeze_epoch, cfg)
        else:
            model.compile(
                loss={
                    'bbox_reg': box_smooth_l1(weights=cfg['loc_weight']),
                    'cls': conf_loss(),
                    'ldm_reg': ldm_smooth_l1()
                },
                optimizer=keras.optimizers.Adam(lr=learning_rate_base))

            model.fit_generator(
                generator=gen,
                steps_per_epoch=epoch_size,
                epochs=Freeze_epoch,
                initial_epoch=Init_epoch,
                use_multiprocessing=True if num_workers > 1 else False,
                workers=num_workers,
                callbacks=[
                    logging, checkpoint, reduce_lr, early_stopping,
                    loss_history
                ])

    if Freeze_Train:
        for i in range(freeze_layers):
            model.layers[i].trainable = True

    if True:
        #----------------------------------------------------#
        #   解冻阶段训练参数
        #   此时模型的主干不被冻结了,特征提取网络会发生改变
        #   占用的显存较大,网络所有的参数都会发生改变