def _build_trainer(config_file: Dict): train = DataLoader( PairedDataset.from_config(config_file["train"]), config_file["batch_size"], shuffle=True, drop_last=False, ) val = DataLoader( PairedDataset.from_config(config_file["val"]), config_file["batch_size"], shuffle=True, drop_last=False, ) trainer = Trainer(config_file, train=train, val=val) trainer.train() return trainer
train_transform = get_transforms(config["train"]["transform"]) val_transform = get_transforms(config["val"]["transform"]) train_ds = PascalCRFSegmentationDataset( config["train"]["path"], transform=train_transform, image_set="train", masks_folder=config["train"]["masks"], scale_factor=config["crf"]["scale_factor"], ) val_ds = PascalCRFSegmentationDataset( config["val"]["path"], transform=val_transform, image_set="validation", masks_folder=config["val"]["masks"], scale_factor=config["crf"]["scale_factor"], ) train_dl = torch.utils.data.DataLoader(train_ds, batch_size=config["batch_size"], shuffle=True, num_workers=12) val_dl = torch.utils.data.DataLoader(val_ds, batch_size=config["batch_size"], shuffle=True, num_workers=12) trainer = Trainer(config, train_dl, val_dl) trainer.train()