Exemple #1
0
            best_result = checkpoint['best_result']
            best_epoch = checkpoint['best_epoch']
    # ----- END RESUME ---------

    logger.info(
        "-------------------Train start :{}  {}  {}-------------------".format(
            cfg.BACKBONE.TYPE, cfg.MODULE.TYPE, cfg.TRAIN.COMBINER.TYPE))

    for epoch in range(start_epoch, epoch_number + 1):
        scheduler.step()
        train_acc, train_loss = train_model(
            trainLoader,
            model,
            epoch,
            epoch_number,
            optimizer,
            combiner,
            criterion,
            cfg,
            logger,
            writer=writer,
        )
        model_save_path = os.path.join(
            model_dir,
            "epoch_{}.pth".format(epoch),
        )
        if epoch % cfg.SAVE_STEP == 0:
            torch.save(
                {
                    'state_dict': model.state_dict(),
                    'epoch': epoch,
                    'best_result': best_result,
Exemple #2
0
        "-------------------Train start :{}  {}-------------------".format(
            cfg.BACKBONE.TYPE, cfg.MODULE.TYPE))

    for epoch in range(start_epoch, epoch_number + 1):

        scheduler.step()

        # if epoch>=180:
        #     for param_group in optimizer.param_groups:
        #         param_group['lr'] = 0.00005

        lr = next(iter(optimizer.param_groups))['lr']
        print("learning rate is ", lr)

        train_acc, train_loss = train_model(trainLoader, model, epoch,
                                            epoch_number, device, optimizer,
                                            criterion, cfg, logger)
        model_save_path = os.path.join(
            model_dir,
            "epoch_{}.pth".format(epoch),
        )
        if epoch % cfg.SAVE_STEP == 0:
            torch.save(
                {
                    'state_dict': model.state_dict(),
                    'epoch': epoch,
                    'best_result': best_result,
                    'best_epoch': best_epoch,
                    'scheduler': scheduler.state_dict(),
                    'optimizer': optimizer.state_dict()
                }, model_save_path)