Exemplo n.º 1
0
# ------------ val data ------------
transform_val = Compose(Resize(resize_shape), ToTensor(),
                        Normalize(mean=mean, std=std))
val_dataset = Dataset_Type(Dataset_Path[dataset_name], "val", transform_val)
val_loader = DataLoader(val_dataset,
                        batch_size=8,
                        collate_fn=val_dataset.collate,
                        num_workers=4)

# ------------ preparation ------------
net = LaneNet(pretrained=True, **exp_cfg['model'])
net = net.to(device)
net = torch.nn.DataParallel(net)

optimizer = optim.SGD(net.parameters(), **exp_cfg['optim'])
lr_scheduler = PolyLR(optimizer, 0.9, exp_cfg['MAX_ITER'])
best_val_loss = 1e6


def train(epoch):
    print("Train Epoch: {}".format(epoch))
    net.train()
    train_loss = 0
    train_loss_bin_seg = 0
    train_loss_var = 0
    train_loss_dist = 0
    train_loss_reg = 0

    progressbar = tqdm(range(len(train_loader)))
Exemplo n.º 2
0
                        collate_fn=val_dataset.collate,
                        num_workers=4)

# Tests
testing = False
test_size = 0.1
if testing:
    train_loader = Subsample(train_loader, test_size)
    val_loader = Subsample(val_loader, test_size)

# ------------ preparation ------------
net = LaneNet(pretrained=True, **exp_cfg['model'])
net = net.to(device)
net = torch.nn.DataParallel(net)

optimizer = optim.Adam(net.parameters(), **exp_cfg['optim'])
# optimizer = optim.SGD(net.parameters(), **exp_cfg['optim'])
lr_scheduler = PolyLR(optimizer, 0.9, exp_cfg['MAX_ITER'])
best_val_loss = 1e6


def train(epoch):
    print("Train Epoch: {}".format(epoch))
    net.train()
    train_loss = 0
    train_loss_bin_seg = 0
    train_loss_var = 0
    train_loss_dist = 0
    train_loss_reg = 0

    progressbar = tqdm(range(len(train_loader)))