def __init__(self, activation: str = None, reduction: Reduction = Reduction('mean')): super().__init__('jaccard', func=multiclass_jaccard, activation=activation, reduction=reduction)
def __init__(self, name: str, func: callable, activation: str = None, reduction: Reduction = Reduction('mean')): super().__init__(name, func, activation) self._reduction = reduction
def __init__(self, activation: str = None, reduction: Reduction = Reduction('mean')): super().__init__('dice', func=multiclass_dice, activation=activation, reduction=reduction)
def __init__(self, fold_indices: {}): model = self.create_model().cuda() dir = os.path.join('data', 'indices') train_dts = [] for indices in fold_indices['train']: train_dts.append( create_augmented_dataset(is_train=True, indices_path=os.path.join( dir, indices + '.npy'))) val_dts = create_augmented_dataset(is_train=False, indices_path=os.path.join( dir, fold_indices['val'] + '.npy')) self._train_data_producer = DataProducer(DatasetsContainer(train_dts), batch_size=self.batch_size, num_workers=8). \ global_shuffle(True).pin_memory(True) self._val_data_producer = DataProducer(val_dts, batch_size=self.batch_size, num_workers=8). \ global_shuffle(True).pin_memory(True) self.train_stage = TrainStage(self._train_data_producer, SegmentationMetricsProcessor('train')) self.val_stage = ValidationStage( self._val_data_producer, SegmentationMetricsProcessor('validation')) loss = BCEDiceLoss(0.5, 0.5, reduction=Reduction('mean')).cuda() optimizer = Adam(params=model.parameters(), lr=1e-4) super().__init__(model, [self.train_stage, self.val_stage], loss, optimizer)
def __init__(self, eps: float = 1, activation: str = None, reduction: Reduction = Reduction('mean')): super().__init__() self._activation = Activation(activation) self._reduction = reduction self._eps = eps
def test_dice(self): for batch_reduction in ['sum', 'mean']: for multiclass_reduction in ['sum', 'mean']: for eps in [1, 1e-7]: with self.subTest( batch_reduction=batch_reduction, multiclass_reduction=multiclass_reduction, eps=eps): self._test_loss( DiceLoss(eps=eps, reduction=Reduction(batch_reduction)), MulticlassSegmentationLoss( DiceLoss(eps=eps, reduction=Reduction(batch_reduction)), reduction=Reduction(multiclass_reduction)), batch_reduction=batch_reduction, multiclass_rediction=multiclass_reduction, eps=eps)
def __init__(self, stage_name: str, activation: str = None, reduction: Reduction = Reduction('mean')): super().__init__() self.add_metrics_group( MetricsGroup(stage_name).add( MulticlassJaccardMetric(activation=activation, reduction=reduction)).add( MulticlassDiceMetric( activation=activation, reduction=reduction)))
def __init__(self, bce_w: float, dice_w: float, eps: float = 1, activation: str = None, reduction: Reduction = Reduction('mean'), class_weights: [] = None): if class_weights is None: bce_loss = torch.nn.BCELoss() else: bce_loss = torch.nn.BCELoss(torch.tensor(class_weights)) dice_loss = DiceLoss(eps=eps, activation=activation, reduction=reduction) super().__init__([bce_loss, dice_loss], [bce_w, dice_w])
def __init__(self, base_loss: Module, reduction: Reduction = Reduction('sum')): super().__init__() self._base_loss = base_loss self._reduction = reduction