Ejemplo n.º 1
0
 def train_step(imgs, targets1, targets2, targets3, net, optimizer):
     with tf.GradientTape() as tape:
         # 计算loss
         prediction = net(imgs, training=True)
         loss_value1 = box_smooth_l1()(targets1, prediction[0])
         loss_value2 = conf_loss()(targets2, prediction[1])
         loss_value3 = ldm_smooth_l1()(targets3, prediction[2])
         loss_value = loss_value1 + loss_value2 + loss_value3
     grads = tape.gradient(loss_value, net.trainable_variables)
     optimizer.apply_gradients(zip(grads, net.trainable_variables))
     return loss_value
Ejemplo n.º 2
0
    #   Init_Epoch为起始世代
    #   Freeze_Epoch为冻结训练的世代
    #   Epoch总训练世代
    #------------------------------------------------------#
    if True:
        Init_epoch = 0
        Freeze_epoch = 50
        # batch_size大小,每次喂入多少数据
        batch_size = 8
        # 最大学习率
        learning_rate_base = 1e-3

        gen = Generator(training_dataset_path,img_dim,batch_size,bbox_util)

        model.compile(loss={
                    'bbox_reg'  : box_smooth_l1(),
                    'cls'       : conf_loss(),
                    'ldm_reg'   : ldm_smooth_l1()
                },optimizer=keras.optimizers.Adam(lr=learning_rate_base)
        )

        model.fit(gen.generate(False), 
                steps_per_epoch=gen.get_len()//batch_size,
                verbose=1,
                epochs=Freeze_epoch,
                initial_epoch=Init_epoch,
                callbacks=[logging, checkpoint, reduce_lr, early_stopping])

    for i in range(freeze_layers): model.layers[i].trainable = True

    if True:
Ejemplo n.º 3
0
    #   也可以在训练初期防止权值被破坏。
    #   Init_Epoch为起始世代
    #   Freeze_Epoch为冻结训练的世代
    #   Epoch总训练世代
    #   提示OOM或者显存不足请调小Batch_size
    #------------------------------------------------------#
    if True:
        batch_size = 8
        Init_epoch = 0
        Freeze_epoch = 50
        learning_rate_base = 1e-3

        gen = Generator(training_dataset_path, img_dim, batch_size, bbox_util)

        model.compile(loss={
            'bbox_reg': box_smooth_l1(weights=cfg['loc_weight']),
            'cls': conf_loss(),
            'ldm_reg': ldm_smooth_l1()
        },
                      optimizer=keras.optimizers.Adam(lr=learning_rate_base))

        model.fit_generator(
            gen,
            steps_per_epoch=gen.get_len() // batch_size,
            verbose=1,
            epochs=Freeze_epoch,
            initial_epoch=Init_epoch,
            callbacks=[logging, checkpoint, reduce_lr, early_stopping])

    for i in range(freeze_layers):
        model.layers[i].trainable = True