def run(self): ##### Create identifiers myconfig = self.config ##### Set network specifics # check if patch size isn't too large. If so make it smaller # patchsize = myconfig["Patch size"] patchsize = set_patch_size(myconfig) ##### Load model from source step loc_model = myconfig["Model path"] dataset = SingleInstanceDataset( myconfig['Nifti paths'], brainmask_path=myconfig['Brainmask path'], transform=ToTensor()) dataloader = DataLoader(dataset, batch_size=1, num_workers=0, shuffle=False) logDataLoader(dataloader, self.tmpdir) evaluator = StandardEvaluator.loadFromCheckpoint(loc_model) sample_batched = next(iter(dataloader)) images = sample_batched["data"] header_sources = sample_batched["header_source"] resultpaths = [os.path.join(self.tmpdir, 'segmentation.nii.gz')] evaluator.segmentNifti(images, header_sources, patchsize, resultpaths) self.logger.info('Nifti image segmented for.') self.tearDown()
def getDataloader(self): n_workers = self.config['Num Workers'] batchsize = self.config['Batch size'] sequences = self.config["Sequences"] loc = os.path.join(self.datadir, self.config["Nifti Source"]) # locate, organize and split the dataset dataset = Brats18.fromFile(loc) #Data specifics splits = self.config['Splits'] testsplits = self.config['Testsplits'] dataset.saveSplits(self.tmpdir) targetsize = tuple(self.config["Patch size"]) imgsize = targetsize # initialize input transforms transforms = [RandomCrop(output_size=imgsize), ToTensor()] transform = Compose(transforms) # prepare the training set loader trainset = dataset.getDataset(splits, sequences, transform=transform) trainset.saveListOfPatients(os.path.join(self.tmpdir, 'trainset.json')) self.logger.info('Generating patches with input size ' + str(imgsize) + ' and outputsize ' + str(targetsize)) trainloader = DataLoader(trainset, batch_size=batchsize, num_workers=n_workers, shuffle=True) # prepare the testing set loader if len(testsplits) > 0: testset = dataset.getDataset(testsplits, sequences, transform=transform) testloader = DataLoader(testset, batch_size=batchsize, num_workers=n_workers, shuffle=True) else: testloader = None # plot and save samples of a mini-batch logDataLoader(trainloader, self.tmpdir) return trainloader, testloader
def getDataloader(self): batchsize = self.config['Batch size'] loc = os.path.join(self.datadir, self.config["Nifti Source"]) sequences = self.config[ "Sequences"] if "Sequences" in self.config else None if self.config['Dataset'] == 'Brats18': dataset = Brats18.fromFile(loc) elif self.config['Dataset'] == 'BTD': if self.config['Brainmask'] == False: dataset = BTD.fromFile(loc, brainmask=False) else: dataset = BTD.fromFile(loc, brainmask=True) elif self.config['Dataset'] == 'Hippocampus': dataset = Hippocampus.fromFile(loc) if "Splits from File" in self.config: dataset.setSplits(self.config["Splits from File"]) elif "Crossval Splits" in self.config: dataset.createCVSplits(self.config["Crossval Splits"]) #### Data specifics splits = self.config['Splits'] testsplits = self.config['Testsplits'] dataset.saveSplits(self.tmpdir) # check if patch size isn't too large. If so make it smaller # targetsize = tuple(self.config["Patch size"]) targetsize = tuple(set_patch_size(self.config)) imgsize = targetsize transforms = [RandomCrop(output_size=imgsize), ToTensor()] if 'Whole Tumor' in self.config and self.config["Whole Tumor"]: transforms = [BinarySegmentation()] + transforms transform = Compose(transforms) if 'Target' in self.config and self.config['Target'] == 'Brainmask': trainset = dataset.getBrainmaskDataset(splits, sequences, transform=transform) else: trainset = dataset.getDataset(splits, sequences, transform=transform) trainset.saveListOfPatients(os.path.join(self.tmpdir, 'trainset.json')) self.logger.info('Generating patches with input size ' + str(imgsize) + ' and outputsize ' + str(targetsize)) n_workers = self.config['Num Workers'] trainloader = DataLoader(trainset, batch_size=batchsize, num_workers=n_workers, shuffle=True) if len(testsplits) > 0: if 'Target' in self.config and self.config['Target'] == 'Brainmask': testset = dataset.getBrainmaskDataset(testsplits, sequences, transform=transform) else: testset = dataset.getDataset(testsplits, sequences, transform=transform) testloader = DataLoader(testset, batch_size=batchsize, num_workers=n_workers, shuffle=True) else: testloader = None logDataLoader(trainloader, self.tmpdir) return trainloader, testloader
def getDataloader(self): batchsize = self.config['Batch size'] loc = os.path.join(self.datadir, self.config["Nifti Source"]) sequences = self.config[ "Sequences"] if "Sequences" in self.config else None # Loading the specialized config settings from file print("normalization technique is:", self.config["technique"]) print("using otsu threshold for normalization:", self.config["using_otsu_ROI"]) print("resampling factor is:", self.config["resampling_factor"]) if self.config['Dataset'] == 'Brats18': dataset = Brats18.fromFile(loc) elif self.config['Dataset'] == 'BTD': print(self.config['Brainmask']) if self.config['Brainmask'] == False: dataset = BTD.fromFile(loc, brainmask=False) else: dataset = BTD.fromFile(loc, brainmask=True) elif self.config['Dataset'] == 'Hippocampus': dataset = Hippocampus.fromFile(loc) elif self.config['Dataset'] == 'LipoData': dataset = LipoData.fromFile(loc) elif self.config['Dataset'] == 'LitsData': dataset = LitsData.fromFile(loc) elif self.config['Dataset'] == 'ErgoData': dataset = ErgoData.fromFile(loc) if "Splits from File" in self.config: dataset.setSplits(self.config["Splits from File"]) elif "Crossval Splits" in self.config: dataset.createCVSplits(self.config["Crossval Splits"]) #### Data specifics splits = self.config['Splits'] testsplits = self.config['Testsplits'] dataset.saveSplits(self.tmpdir) # check if patch size isn't too large. If so make it smaller # targetsize = tuple(self.config["Patch size"]) targetsize = tuple(set_patch_size(self.config)) imgsize = targetsize transforms = [RandomCrop(output_size=imgsize), ToTensor()] if 'Whole Tumor' in self.config and self.config["Whole Tumor"]: transforms = [BinarySegmentation()] + transforms transform = Compose(transforms) if 'Target' in self.config and self.config['Target'] == 'Brainmask': trainset = dataset.getBrainmaskDataset(splits, sequences, transform=transform) else: trainset = dataset.getDataset(splits, sequences, transform=transform, preprocess_config=self.config) trainset.saveListOfPatients(os.path.join(self.tmpdir, 'trainset.json')) self.logger.info('Generating patches with input size ' + str(imgsize) + ' and outputsize ' + str(targetsize)) n_workers = self.config['Num Workers'] now = datetime.now() print('train loader is initializing', now) trainloader = DataLoader(trainset, batch_size=batchsize, num_workers=n_workers, shuffle=True) later = datetime.now() print('train loader is done initializing', later - now) if len(testsplits) > 0: if 'Target' in self.config and self.config['Target'] == 'Brainmask': testset = dataset.getBrainmaskDataset(testsplits, sequences, transform=transform) else: testset = dataset.getDataset(testsplits, sequences, transform=transform, preprocess_config=self.config) testloader = DataLoader(testset, batch_size=batchsize, num_workers=n_workers, shuffle=True) else: testloader = None logDataLoader(trainloader, self.tmpdir) return trainloader, testloader