Example #1
0
    def build_final_activation(self, model_config=None):
        model_config = self.get(
            'model') if model_config is None else model_config
        model_class = list(model_config.keys())[0]
        final_layer = None
        final_activation_config = model_config[model_class].pop(
            'final_activation', True)
        if not final_activation_config:
            return
        elif final_activation_config is True:
            final_activation_config = {}
        for layer_name, kwargs in final_activation_config.items():
            layer_class = locate(layer_name, [
                'embeddingutils.models.submodules',
                'inferno.extensions.layers.convolutional'
            ])
            final_layer = layer_class(**kwargs)

        model_config[model_class]['final_activation'] = \
            Conv3D(in_channels=model_config[model_class]['out_channels'],
                   out_channels=model_config[model_class]['out_channels'],
                   kernel_size=3)

        if final_layer is not None:
            model_config[model_class]['final_activation'] = nn.Sequential(
                model_config[model_class]['final_activation'], final_layer)
    def build_final_activation(self, model_config=None):
        model_config = self.get(
            'model') if model_config is None else model_config
        model_class = list(model_config.keys())[0]

        final_activation = model_config[model_class].pop(
            'final_activation', None)
        if final_activation is None:
            return
        final_activation = locate(final_activation, ['torch.nn'])
        model_config[model_class]['final_activation'] = \
            final_activation()
    def inferno_build_metric(self):
        metric_config = self.get('trainer/metric')
        frequency = metric_config.pop('evaluate_every', (25, 'iterations'))

        self.trainer.evaluate_metric_every(frequency)
        if metric_config:
            assert len(metric_config) == 1
            for class_name, kwargs in metric_config.items():
                cls = locate(class_name)
                kwargs['offsets'] = self.get('global/offsets')
                print(f'Building metric of class "{cls.__name__}"')
                metric = cls(**kwargs)
                self.trainer.build_metric(metric)
        self.set('trainer/metric/evaluate_every', frequency)
Example #4
0
 def parse_and_wrap_losses(self, config, transforms, losses, weights,
                           loss_names):
     default_weight = config.pop('weight', 1)
     for class_name, kwargs in config.items():
         loss_names.append(kwargs.pop('name', class_name))
         weights.append(kwargs.pop('weight', default_weight))
         print(f'Adding {loss_names[-1]} with weight {weights[-1]}')
         loss_class = locate(class_name, [
             'embeddingutils.loss', 'SegTags.loss',
             'inferno.extensions.criteria.set_similarity_measures',
             'torch.nn'
         ])
         if issubclass(loss_class, WeightedLoss):
             kwargs['trainer'] = self.trainer
         losses.append(
             LossWrapper(criterion=loss_class(**kwargs),
                         transforms=transforms))
Example #5
0
 def inferno_build_metric(self):
     metric_config = self.get('trainer/metric')
     frequency = metric_config.pop('evaluate_every', (25, 'iterations'))
     self.trainer.evaluate_metric_every(frequency)
     if metric_config:
         assert len(metric_config) == 1
         for class_name, kwargs in metric_config.items():
             cls = locate(class_name)
             if issubclass(cls, SFRPMetric):
                 kwargs['trainer'] = self.trainer
             print(f'Building metric of class "{cls.__name__}"')
             metric = cls(**kwargs)
             if hasattr(self, 'metric_pre'):
                 pre = self.metric_pre()
                 self.trainer.build_metric(lambda prediction, target:
                                           metric(*pre(prediction, target)))
             else:
                 self.trainer.build_metric(metric)
     self.set('trainer/metric/evaluate_every', frequency)