Beispiel #1
0
    def create(
        self,
        arch,
        num_output_channels,
        num_input_channels,
        loss,
        lr,
        optimizer,
        lrsch,
        momentum=0.9,
        weight_decay=5e-4,
        pretrained=False,
        size_input=388,
        num_classes=8,
        backbone='preactresnet',
        num_filters=32,
        breal='real',
        alpha=2,
        beta=2,
    ):
        """
        Create
            -arch (string): architecture
            -loss (string):
            -lr (float): learning rate
            -optimizer (string) :
            -lrsch (string): scheduler learning rate
            -pretrained (bool)
        """
        cfg_opt = {'momentum': momentum, 'weight_decay': weight_decay}
        #cfg_scheduler={ 'step_size':100, 'gamma':0.1  }
        cfg_scheduler = {'mode': 'min', 'patience': 10}
        cfg_model = {'num_filters': num_filters}

        self.num_classes = num_classes

        super(ClassNeuralNet, self).create(
            arch,
            num_output_channels,
            num_input_channels,
            loss,
            lr,
            optimizer,
            lrsch,
            pretrained,
            cfg_opt=cfg_opt,
            cfg_scheduler=cfg_scheduler,
            cfg_model=cfg_model,
        )
        self.size_input = size_input
        self.backbone = backbone
        self.num_filters = num_filters

        self.topk = nloss.TopkAccuracy()

        self.logger_train = Logger('Train', ['loss', 'loss_bce'], ['topk'],
                                   self.plotter)
        self.logger_val = Logger('Val  ', ['loss', 'loss_bce'], ['topk'],
                                 self.plotter)
        self.breal = breal
    def create(
            self,
            arch,
            num_output_channels,
            num_input_channels,
            loss,
            lr,
            optimizer,
            lrsch,
            momentum=0.9,
            weight_decay=5e-4,
            pretrained=False,
            topk=(1, ),
            size_input=128,
    ):
        """
        Create
        Args:
            arch (string): architecture
            num_output_channels,
            num_input_channels,
            loss (string):
            lr (float): learning rate
            momentum,
            optimizer (string) :
            lrsch (string): scheduler learning rate
            pretrained (bool)
        """

        cfg_opt = {'momentum': 0.9, 'weight_decay': 5e-4}
        cfg_scheduler = {'step_size': 100, 'gamma': 0.1}

        super(NeuralNetClassifier, self).create(
            arch,
            num_output_channels,
            num_input_channels,
            loss,
            lr,
            optimizer,
            lrsch,
            pretrained,
            cfg_opt=cfg_opt,
            cfg_scheduler=cfg_scheduler,
        )

        self.size_input = size_input
        self.accuracy = nloss.TopkAccuracy(topk)
        self.cnf = nloss.ConfusionMeter(self.num_output_channels,
                                        normalized=True)
        self.visheatmap = gph.HeatMapVisdom(env_name=self.nameproject)

        # Set the graphic visualization
        self.metrics_name = ['top{}'.format(k) for k in topk]
        self.logger_train = Logger('Trn', ['loss'], self.metrics_name,
                                   self.plotter)
        self.logger_val = Logger('Val', ['loss'], self.metrics_name,
                                 self.plotter)
    def create(
        self,
        arch,
        num_output_channels,
        num_input_channels,
        loss,
        lr,
        optimizer,
        lrsch,
        momentum=0.9,
        weight_decay=5e-4,
        pretrained=False,
        th=0.0,
        size_input=32,
    ):
        """
        Create
        Args:
            arch (string): architecture
            num_output_channels, 
            num_input_channels,  
            loss (string):
            lr (float): learning rate
            momentum,
            optimizer (string) : 
            lrsch (string): scheduler learning rate
            pretrained (bool)
        """

        cfg_opt = {'momentum': 0.9, 'weight_decay': 5e-4}
        cfg_scheduler = {'step_size': 50, 'gamma': 0.1}

        super(NeuralNetClassifier, self).create(
            arch,
            num_output_channels,
            num_input_channels,
            loss,
            lr,
            optimizer,
            lrsch,
            pretrained,
            cfg_opt=cfg_opt,
            cfg_scheduler=cfg_scheduler,
        )

        self.size_input = size_input
        self.accuracy = nloss.MultAccuracyV1(th)
        self.f_score = nloss.F_score(threshold=th, beta=2)

        #self.cnf = nloss.ConfusionMeter( self.num_output_channels, normalized=True )
        #self.visheatmap = gph.HeatMapVisdom( env_name=self.nameproject )

        # Set the graphic visualization
        self.logger_train = Logger('Trn', ['loss'], ['acc', 'f1'],
                                   self.plotter)
        self.logger_val = Logger('Val', ['loss'], ['acc', 'f1'], self.plotter)
Beispiel #4
0
    def create(self, 
        arch, 
        num_output_channels,
        num_input_channels,        
        loss,
        lr,
        optimizer,
        lrsch,
        momentum=0.9,
        weight_decay=5e-4,
        pretrained=False,
        size_input=388,
        num_classes=8,
        ):
        """
        Create    
        Args:        
            -arch (string): architecture
            -num_output_channels,
            -num_input_channels,  
            -loss (string):
            -lr (float): learning rate
            -optimizer (string) : 
            -lrsch (string): scheduler learning rate
            -pretrained (bool)
            -
        """        
        super(AttentionGMMNeuralNet, self).create( 
            arch, 
            num_output_channels,
            num_input_channels,        
            loss,
            lr,
            optimizer,
            lrsch,
            momentum,
            weight_decay,
            pretrained,
            size_input,
            num_classes,          
        )

        self.logger_train = Logger( 'Train', ['loss', 'loss_gmm', 'loss_bce', 'loss_att' ], [ 'topk', 'gmm'], self.plotter  )
        self.logger_val   = Logger( 'Val  ', ['loss', 'loss_gmm', 'loss_bce', 'loss_att' ], [ 'topk', 'gmm'], self.plotter )
Beispiel #5
0
    def create(
            self,
            arch,
            num_output_channels,
            num_input_channels,
            loss,
            lr,
            momentum,
            optimizer,
            lrsch,
            pretrained=False,
            topk=(1, ),
    ):
        """
        Create
        Args:
            @arch (string): architecture
            @num_output_channels, 
            @num_input_channels,  
            @loss (string):
            @lr (float): learning rate
            @momentum,
            @optimizer (string) : 
            @lrsch (string): scheduler learning rate
            @pretrained (bool)
        """
        super(NeuralNetClassifier,
              self).create(arch, num_output_channels, num_input_channels, loss,
                           lr, momentum, optimizer, lrsch, pretrained)
        self.accuracy = nloss.Accuracy(topk)
        self.cnf = nloss.ConfusionMeter(self.num_output_channels,
                                        normalized=True)
        self.visheatmap = gph.HeatMapVisdom(env_name=self.nameproject)

        # Set the graphic visualization
        self.metrics_name = ['top{}'.format(k) for k in topk]
        self.logger_train = Logger('Trn', ['loss'], self.metrics_name,
                                   self.plotter)
        self.logger_val = Logger('Val', ['loss'], self.metrics_name,
                                 self.plotter)
    def create(
        self,
        arch,
        num_output_channels,
        num_input_channels,
        loss,
        lr,
        momentum,
        optimizer,
        lrsch,
        pretrained=False,
        size_input=388,
    ):
        """
        Create            
            -arch (string): architecture
            -loss (string):
            -lr (float): learning rate
            -optimizer (string) : 
            -lrsch (string): scheduler learning rate
            -pretrained (bool)
        """
        super(SegmentationNeuralNet,
              self).create(arch, num_output_channels, num_input_channels, loss,
                           lr, momentum, optimizer, lrsch, pretrained)
        self.size_input = size_input

        self.accuracy = nloss.Accuracy()
        self.dice = nloss.Dice()

        # Set the graphic visualization
        self.logger_train = Logger('Train', ['loss'], ['accs', 'dices'],
                                   self.plotter)
        self.logger_val = Logger('Val  ', ['loss'], ['accs', 'dices'],
                                 self.plotter)

        self.visheatmap = gph.HeatMapVisdom(env_name=self.nameproject,
                                            heatsize=(100, 100))
        self.visimshow = gph.ImageVisdom(env_name=self.nameproject,
                                         imsize=(100, 100))
    def create(self,
               arch,
               num_output_channels,
               num_input_channels,
               loss,
               lr,
               optimizer,
               lrsch,
               momentum=0.9,
               weight_decay=5e-4,
               pretrained=False,
               size_input=388,
               cascade_type='none'):
        """
        Create
        Args:
            -arch (string): architecture
            -num_output_channels, 
            -num_input_channels, 
            -loss (string):
            -lr (float): learning rate
            -optimizer (string) : 
            -lrsch (string): scheduler learning rate
            -pretrained (bool)
            
        """

        cfg_opt = {'momentum': momentum, 'weight_decay': weight_decay}
        cfg_scheduler = {'step_size': 100, 'gamma': 0.1}

        super(SegmentationNeuralNet, self).create(arch,
                                                  num_output_channels,
                                                  num_input_channels,
                                                  loss,
                                                  lr,
                                                  optimizer,
                                                  lrsch,
                                                  pretrained,
                                                  cfg_opt=cfg_opt,
                                                  cfg_scheduler=cfg_scheduler)
        self.size_input = size_input
        self.num_output_channels = num_output_channels
        self.cascade_type = cascade_type
        self.segs_per_forward = 7

        if self.cascade_type == 'none':
            self.step = self.default_step
        elif self.cascade_type == 'ransac':
            self.step = self.ransac_step
        elif self.cascade_type == 'ransac2':
            self.step = self.ransac_step2
        elif self.cascade_type == 'simple':
            self.step = self.cascate_step
        else:
            raise "Cascada not found"

        self.accuracy = nloss.Accuracy()
        if num_output_channels == 2:
            dice_dim = (1, )
        if num_output_channels == 4:
            dice_dim = (1, 2, 3)

        self.dice = nloss.Dice(dice_dim)

        # Set the graphic visualization
        self.logger_train = Logger('Train', ['loss'], ['accs', 'dices'],
                                   self.plotter)
        self.logger_val = Logger('Val  ', ['loss'], ['accs', 'dices', 'PQ'],
                                 self.plotter)

        self.visheatmap = gph.HeatMapVisdom(env_name=self.nameproject,
                                            heatsize=(256, 256))
        self.visimshow = gph.ImageVisdom(env_name=self.nameproject,
                                         imsize=(256, 256))
        if self.half_precision:
            self.scaler = torch.cuda.amp.GradScaler()