コード例 #1
0
ファイル: train.py プロジェクト: SakuraRiven/DCLNet
def train(cfg):
    model = ResNetUNet(pretrain=cfg.imagenet_pretrain, backbone=cfg.backbone)
    if cfg.pretrained_pth:
        model.load_state_dict(
            torch.load(cfg.pretrained_pth, map_location='cpu'))
    if torch.cuda.device_count() > 1:
        model = DataParallel(model)
    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    model.to(device)

    trainset = SynthTextDataset(
        cfg) if cfg.mode == 'pretrain' else FinetuneDataset(cfg)
    train_loader = DataLoader(trainset, batch_size=cfg.batch_size, shuffle=True, \
                                      num_workers=cfg.num_workers, pin_memory=False, drop_last=cfg.drop_last)

    file_num = len(trainset)
    batch_num = int(file_num / cfg.batch_size)
    criterion = Loss_OHEM()

    optimizer = optim.Adam(model.parameters(),
                           lr=cfg.high_lr,
                           weight_decay=cfg.weight_decay)
    scheduler = lr_scheduler.CosineAnnealingLR(optimizer,
                                               cfg.epoch_iter * batch_num,
                                               cfg.low_lr)

    model.train()
    for epoch in range(cfg.epoch_iter):
        epoch_loss = 0
        epoch_time = time.time()
        for i, (img, text, ignore, rho, theta) in enumerate(train_loader):
            img, text, ignore, rho, theta = list(
                map(lambda x: x.to(device), [img, text, ignore, rho, theta]))
            pred_cls, pred_rho, pred_theta = model(img)
            loss = criterion(text, ignore, rho, theta, pred_cls, pred_rho,
                             pred_theta)

            epoch_loss += loss.item()
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()
            scheduler.step()

            print('Epoch is [{}/{}], mini-batch is [{}/{}], batch_loss is {:.8f}'.format(\
                         epoch+1, cfg.epoch_iter, i+1, batch_num, loss.item()))
            sys.stdout.flush()

        if (epoch + 1) % cfg.save_interval == 0:
            torch.save(
                model.module.state_dict(),
                os.path.join(cfg.pths_path,
                             'model_epoch_{}.pth'.format(epoch + 1)))
            print(time.asctime(time.localtime(time.time())))

        print('epoch_loss is {:.8f}, epoch_time is {:.8f}'.format(
            epoch_loss / batch_num,
            time.time() - epoch_time))
        print(time.asctime(time.localtime(time.time())))
        print('=' * 50)
コード例 #2
0
def main():
    device = utils.get_device()
    utils.set_seed(args.seed, device)  # set random seed

    dataset = LungSegDataSet(args.datapath)

    net = ResNetUNet(n_class=1).to(device)

    optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad,
                                        net.parameters()),
                                 lr=1e-4)
    scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
                                                step_size=25,
                                                gamma=0.1)
    trainloader = torch.utils.data.DataLoader(dataset,
                                              batch_size=args.bstrain,
                                              shuffle=True,
                                              num_workers=args.nworkers)
    for epoch in range(args.maxepoch):
        scheduler.step()
        net = train(epoch, net, trainloader, optimizer, device)
    net = net.to('cpu')
    state = net.state_dict()
    torch.save(state, 'lungseg_net2.pth')
コード例 #3
0
        DataLoader(val_set,
                   batch_size=args['batch_size'],
                   shuffle=False,
                   num_workers=args['n_work'],
                   pin_memory=args['pin'])
    }

    model = ResNetUNet(args['num_class']).to(device)

    # if args['initialization'] == 'xavier':
    # 	model._initialize_()
    # elif args['initialization'] == 'kaiming':
    # 	model._kaiming_initialize_()

    if args['optimizer_choice'] == "SGD":
        optimizer_ft = torch.optim.SGD(model.parameters(),
                                       lr=0.001,
                                       momentum=0.9)

    elif args['optimizer_choice'] == "Adam":
        optimizer_ft = torch.optim.Adam(filter(lambda p: p.requires_grad,
                                               model.parameters()),
                                        lr=1e-2)
    else:
        exit('Wrong optimizer')

    if args['lr_scheduler'] == "ReduceLROnPlateau":
        exp_lr_scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
            optimizer_ft,
            mode='min',
            factor=0.1,
コード例 #4
0
ファイル: run.py プロジェクト: hoainv99/seg_lung
from tools.config import Cfg
from train import Trainer
from model import ResNetUNet
from torch.optim import Adam
import torch
config = Cfg.load_config_from_file('config/config.yml')
params = {
    'print_every': 10,
    'valid_every': 5 * 10,
    'iters': 100000,
    'n_classses': 2
}

config['trainer'].update(params)
model = ResNetUNet(config['trainer']['n_classes'])
optimizer = Adam(model.parameters(), lr=1e-6)
train = Trainer(config, model, optimizer, pretrained=False)
x, y = next(iter(train.train_data_loader))
train.train()