limit_val_num_samples=100 if debug else None)

# accumulation_steps = 8

prepare_batch = prepare_batch_fp32


# Image denormalization function to plot predictions with images
def img_denormalize(nimg):
    img = denormalize(nimg, mean=mean, std=std)
    return img[(0, 1, 2), :, :]


#################### Model ####################

model = LWRefineNet(num_channels=5, num_classes=num_classes)

#################### Solver ####################

num_epochs = 100

criterion = nn.CrossEntropyLoss(weight=torch.tensor([0.1, 3.0]))

lr = 0.05
weight_decay = 5e-4
momentum = 0.9
nesterov = True
optimizer = optim.SGD(model.parameters(),
                      lr=1.0,
                      momentum=momentum,
                      weight_decay=weight_decay,
Ejemplo n.º 2
0
    val_batch_size=val_batch_size,
    pin_memory=True,
    train_sampler=train_sampler,
    limit_train_num_samples=100 if debug else None,
    limit_val_num_samples=100 if debug else None)

accumulation_steps = 2

prepare_batch = prepare_batch_fp32

# Image denormalization function to plot predictions with images
img_denormalize = partial(denormalize, mean=mean, std=std)

#################### Model ####################

model = LWRefineNet(num_channels=3, num_classes=num_classes)

#################### Solver ####################

num_epochs = 50

criterion = nn.CrossEntropyLoss(weight=torch.tensor([1.0, 1.0]))

lr = 0.001
weight_decay = 1e-4
optimizer = optim.Adam(model.parameters(), lr=1.0, weight_decay=weight_decay)

le = len(train_loader)


def lambda_lr_scheduler(iteration, lr0, n, a):
std = (5.0, 5.0, 5.0)
max_value = 1.0

transforms = A.Compose(
    [A.Normalize(mean=mean, std=std, max_pixel_value=max_value),
     ToTensorV2()])

_, data_loader, _ = get_train_val_loaders(
    train_ds,
    val_ds,
    train_transforms=transforms,
    val_transforms=transforms,
    batch_size=batch_size,
    num_workers=num_workers,
    val_batch_size=batch_size,
    pin_memory=True,
)

prepare_batch = inference_prepare_batch_f32

# Image denormalization function to plot predictions with images
img_denormalize = partial(denormalize, mean=mean, std=std)

#################### Model ####################

model = LWRefineNet(num_channels=3, num_classes=num_classes)
run_uuid = "bf1fa0a668cd4d4da7de6f2c77b6bebb"
weights_filename = "checkpoint_model_28000.pth"

has_targets = True
Ejemplo n.º 4
0
    pin_memory=True,
    train_sampler=train_sampler,
    limit_train_num_samples=100 if debug else None,
    limit_val_num_samples=100 if debug else None
)

accumulation_steps = 2

prepare_batch = prepare_batch_fp32

# Image denormalization function to plot predictions with images
img_denormalize = partial(denormalize, mean=mean, std=std)

#################### Model ####################

model = LWRefineNet(num_channels=3, num_classes=num_classes)

#################### Solver ####################

num_epochs = 100

names = ["cross entropy loss", "jaccard loss"]
xentropy = nn.CrossEntropyLoss(weight=torch.tensor([0.1, 2.0]))
jaccard_loss = SoftmaxJaccardWithLogitsLoss()
criterion = SumOfLosses([xentropy, jaccard_loss], coeffs=[1.0, 2.0], names=names, total_loss_name="supervised batch loss")

output_names = names + ["supervised batch loss", ]

lr = 0.045
weight_decay = 5e-4
momentum = 0.9