コード例 #1
0
    def __init__(self, cfg: Config) -> None:
        super().__init__()  # type: ignore

        self.logger: Union[LoggerCollection, WandbLogger, Any]
        self.wandb: Run

        self.cfg = cfg

        self.model: LightningModule = instantiate(self.cfg.experiment.model,
                                                  self.cfg)

        self.criterion = MSELoss()

        # Metrics
        self.train_mse = MeanSquaredError()
        self.train_mae = MeanAbsoluteError()
        self.val_mse = MeanSquaredError()
        self.val_mae = MeanAbsoluteError()
        self.test_mse = MeanSquaredError()
        self.test_mae = MeanAbsoluteError()
        self.test_results = []
        train_params = self.cfg.experiment.synop_train_features
        target_param = self.cfg.experiment.target_parameter
        all_params = add_param_to_train_params(train_params, target_param)
        feature_names = list(list(zip(*all_params))[1])
        self.target_param_index = [x
                                   for x in feature_names].index(target_param)
コード例 #2
0
    def __init__(self, hparams):
        super(TextRemovalModel, self).__init__()
        self.save_hyperparameters()

        # Hyperparameters
        self.hparams = hparams
        self.lr = hparams.lr

        # Networks
        self.mask_refine_net = MaskRefineNet(hparams.mask_refine_weights)
        self.mask_refine_net.freeze()
        self.edge_complete_net = EdgeCompleteNet(hparams.edge_complete_weights)
        self.edge_complete_net.freeze()
        self.net = load_network(hparams)

        # Losses
        self.l1_loss = nn.L1Loss()
        self.gradient_loss = GradientLoss(type=hparams.gradient_loss_type)
        self.perceptual_loss = PerceptualLoss()

        # Metrics
        self.train_psnr = PSNR()
        self.val_metrics = MetricCollection([PSNR(), MeanAbsoluteError()])
コード例 #3
0
    def __init__(self, cfg: Config) -> None:
        super().__init__()  # type: ignore

        self.logger: Union[LoggerCollection, WandbLogger, Any]
        self.wandb: Run

        self.cfg = cfg

        self.model: LightningModule = instantiate(self.cfg.experiment.model,
                                                  self.cfg)

        self.criterion = MSELoss()

        # Metrics
        self.train_mse = MeanSquaredError()
        self.train_mae = MeanAbsoluteError()
        self.val_mse = MeanSquaredError()
        self.val_mae = MeanAbsoluteError()
        self.test_mse = MeanSquaredError()
        self.test_mae = MeanAbsoluteError()
        self.test_results = []
コード例 #4
0
def test_v1_5_metric_regress():
    ExplainedVariance.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        ExplainedVariance()

    MeanAbsoluteError.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        MeanAbsoluteError()

    MeanSquaredError.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        MeanSquaredError()

    MeanSquaredLogError.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        MeanSquaredLogError()

    target = torch.tensor([3, -0.5, 2, 7])
    preds = torch.tensor([2.5, 0.0, 2, 8])
    explained_variance._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        res = explained_variance(preds, target)
    assert torch.allclose(res, torch.tensor(0.9572), atol=1e-4)

    x = torch.tensor([0., 1, 2, 3])
    y = torch.tensor([0., 1, 2, 2])
    mean_absolute_error._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        assert mean_absolute_error(x, y) == 0.25

    mean_relative_error._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        assert mean_relative_error(x, y) == 0.125

    mean_squared_error._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        assert mean_squared_error(x, y) == 0.25

    mean_squared_log_error._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        res = mean_squared_log_error(x, y)
    assert torch.allclose(res, torch.tensor(0.0207), atol=1e-4)

    PSNR.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        PSNR()

    R2Score.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        R2Score()

    SSIM.__init__._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        SSIM()

    preds = torch.tensor([[0.0, 1.0], [2.0, 3.0]])
    target = torch.tensor([[3.0, 2.0], [1.0, 0.0]])
    psnr._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        res = psnr(preds, target)
    assert torch.allclose(res, torch.tensor(2.5527), atol=1e-4)

    target = torch.tensor([3, -0.5, 2, 7])
    preds = torch.tensor([2.5, 0.0, 2, 8])
    r2score._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        res = r2score(preds, target)
    assert torch.allclose(res, torch.tensor(0.9486), atol=1e-4)

    preds = torch.rand([16, 1, 16, 16])
    target = preds * 0.75
    ssim._warned = False
    with pytest.deprecated_call(match='It will be removed in v1.5.0'):
        res = ssim(preds, target)
    assert torch.allclose(res, torch.tensor(0.9219), atol=1e-4)
コード例 #5
0
class BaseS2SRegressor(pl.LightningModule):
    def __init__(self, cfg: Config) -> None:
        super().__init__()  # type: ignore

        self.logger: Union[LoggerCollection, WandbLogger, Any]
        self.wandb: Run

        self.cfg = cfg

        self.model: LightningModule = instantiate(self.cfg.experiment.model,
                                                  self.cfg)

        self.criterion = MSELoss()

        # Metrics
        self.train_mse = MeanSquaredError()
        self.train_mae = MeanAbsoluteError()
        self.val_mse = MeanSquaredError()
        self.val_mae = MeanAbsoluteError()
        self.test_mse = MeanSquaredError()
        self.test_mae = MeanAbsoluteError()
        self.test_results = []
        train_params = self.cfg.experiment.synop_train_features
        target_param = self.cfg.experiment.target_parameter
        all_params = add_param_to_train_params(train_params, target_param)
        feature_names = list(list(zip(*all_params))[1])
        self.target_param_index = [x
                                   for x in feature_names].index(target_param)

    # -----------------------------------------------------------------------------------------------
    # Default PyTorch Lightning hooks
    # -----------------------------------------------------------------------------------------------
    def on_fit_start(self) -> None:
        """
        Hook before `trainer.fit()`.

        Attaches current wandb run to `self.wandb`.
        """
        if isinstance(self.logger, LoggerCollection):
            for logger in self.logger:  # type: ignore
                if isinstance(logger, WandbLogger):
                    self.wandb = logger.experiment  # type: ignore
        elif isinstance(self.logger, WandbLogger):
            self.wandb = self.logger.experiment  # type: ignore

    def on_save_checkpoint(self, checkpoint: Dict[str, Any]) -> None:
        """
        Hook on checkpoint saving.

        Adds config and RNG states to the checkpoint file.
        """
        checkpoint['cfg'] = self.cfg

    # ----------------------------------------------------------------------------------------------
    # Optimizers
    # ----------------------------------------------------------------------------------------------
    def configure_optimizers(
        self
    ) -> Union[Optimizer, Tuple[List[Optimizer],
                                List[_LRScheduler]]]:  # type: ignore
        """
        Define system optimization procedure.

        See https://pytorch-lightning.readthedocs.io/en/latest/common/lightning_module.html#configure-optimizers.

        Returns
        -------
        Union[Optimizer, Tuple[List[Optimizer], List[_LRScheduler]]]
            Single optimizer or a combination of optimizers with learning rate schedulers.
        """
        optimizer: Optimizer = instantiate(self.cfg.optim.optimizer,
                                           params=self.parameters(),
                                           _convert_='all')

        if self.cfg.optim.scheduler is not None:
            # if self.cfg.optim.scheduler._target_ == "torch.optim.lr_scheduler.LambdaLR":
            lambda_lr = instantiate(self.cfg.optim.lambda_lr,
                                    warmup_epochs=self.cfg.optim.warmup_epochs,
                                    decay_epochs=self.cfg.optim.decay_epochs,
                                    starting_lr=self.cfg.optim.starting_lr,
                                    base_lr=self.cfg.optim.optimizer.lr,
                                    final_lr=self.cfg.optim.final_lr)

            scheduler: _LRScheduler = instantiate(  # type: ignore
                self.cfg.optim.scheduler,
                optimizer=optimizer,
                lr_lambda=lambda epoch: lambda_lr.transformer_lr_scheduler(
                    epoch),
                _convert_='all',
                verbose=True)

            print(optimizer, scheduler)
            return [optimizer], [scheduler]
        else:
            print(optimizer)
            return optimizer

    def _reduce(self, outputs: List[Any], key: str):
        return torch.stack([out[key] for out in outputs]).mean().detach()

    # ----------------------------------------------------------------------------------------------
    # Loss
    # ----------------------------------------------------------------------------------------------
    def calculate_loss(self, outputs: torch.Tensor,
                       targets: torch.Tensor) -> torch.Tensor:
        """
        Compute loss value of a batch.

        In this simple case just forwards computation to default `self.criterion`.

        Parameters
        ----------
        outputs : torch.Tensor
            Network outputs with shape (batch_size, n_classes).
        targets : torch.Tensor
            Targets (ground-truth labels) with shape (batch_size).

        Returns
        -------
        torch.Tensor
            Loss value.
        """
        return self.criterion(outputs, targets)

    def training_epoch_end(self, outputs: List[Any]) -> None:
        """
        Log training metrics.

        Parameters
        ----------
        outputs : list[Any]
            List of dictionaries returned by `self.training_step` with batch metrics.
        """
        step = self.current_epoch + 1

        metrics = {
            'epoch': float(step),
            'train_rmse': math.sqrt(float(self.train_mse.compute().item())),
            'train_mae': float(self.train_mae.compute().item())
        }

        self.train_mse.reset()
        self.train_mae.reset()

        # Average additional metrics over all batches
        for key in outputs[0]:
            metrics[key] = float(self._reduce(outputs, key).item())

        self.logger.log_metrics(metrics, step=step)

    def validation_epoch_end(self, outputs: List[Any]) -> None:
        """
        Log validation metrics.

        Parameters
        ----------
        outputs : list[Any]
            List of dictionaries returned by `self.validation_step` with batch metrics.
        """
        step = self.current_epoch + 1 if not self.trainer.running_sanity_check else self.current_epoch  # type: ignore

        metrics = {
            'epoch': float(step),
            'val_rmse': math.sqrt(float(self.val_mse.compute().item())),
            'val_mae': float(self.val_mae.compute().item())
        }

        self.val_mse.reset()
        self.val_mae.reset()

        # Average additional metrics over all batches
        for key in outputs[0]:
            metrics[key] = float(self._reduce(outputs, key).item())

        self.logger.log_metrics(metrics, step=step)
        self.log("ptl/val_loss", metrics['val_rmse'])

    def test_epoch_end(self, outputs: List[Any]) -> None:
        """
        Log test metrics.

        Parameters
        ----------
        outputs : list[Any]
            List of dictionaries returned by `self.test_step` with batch metrics.
        """
        step = self.current_epoch + 1 if not self.trainer.running_sanity_check else self.current_epoch  # type: ignore

        metrics = {
            'epoch': float(step),
            'test_rmse': math.sqrt(float(self.test_mse.compute().item())),
            'test_mae': float(self.test_mae.compute().item())
        }

        self.test_mse.reset()
        self.test_mae.reset()

        self.logger.log_metrics(metrics, step=step)

        # save results to view
        labels = [
            item for sublist in [x['labels'] for x in outputs]
            for item in sublist
        ]

        inputs_dates = [
            item for sublist in [x['inputs_dates'] for x in outputs]
            for item in sublist
        ]

        labels_dates = [
            item for sublist in [x['targets_dates'] for x in outputs]
            for item in sublist
        ]

        out = [
            item for sublist in [x['output'] for x in outputs]
            for item in sublist
        ]

        inputs = [
            item for sublist in [x['input'] for x in outputs]
            for item in sublist
        ]

        self.test_results = {
            'labels': copy.deepcopy(labels),
            'output': copy.deepcopy(out),
            'inputs': copy.deepcopy(inputs),
            'inputs_dates': copy.deepcopy(inputs_dates),
            'targets_dates': copy.deepcopy(labels_dates)
        }