예제 #1
0
def test_convert_params():
    """Test conversion of params to a dict."""

    # Test normal dict, make sure it is unchanged
    params = {"foo": "bar", 1: 23}
    assert type(params) == dict
    params = _convert_params(params)
    assert type(params) == dict
    assert params["foo"] == "bar"
    assert params[1] == 23

    # Test None conversion
    params = None
    assert type(params) != dict
    params = _convert_params(params)
    assert type(params) == dict
    assert params == {}

    # Test conversion of argparse Namespace
    opt = "--max_epochs 1".split(" ")
    parser = ArgumentParser()
    parser = Trainer.add_argparse_args(parent_parser=parser)
    params = parser.parse_args(opt)

    assert type(params) == Namespace
    params = _convert_params(params)
    assert type(params) == dict
    assert params["gpus"] is None
예제 #2
0
 def log_hyperparams(self, params: Union[Dict[str, Any],
                                         Namespace]) -> None:
     # TODO: HACK figure out where this is being set to true
     self.experiment.debug = self.debug
     params = _convert_params(params)
     params = _flatten_dict(params)
     self.experiment.argparse(Namespace(**params))
예제 #3
0
def test_sanitize_callable_params():
    """Callback function are not serializiable.

    Therefore, we get them a chance to return something and if the returned type is not accepted, return None.
    """
    opt = "--max_epochs 1".split(" ")
    parser = ArgumentParser()
    parser = Trainer.add_argparse_args(parent_parser=parser)
    params = parser.parse_args(opt)

    def return_something():
        return "something"

    params.something = return_something

    def wrapper_something():
        return return_something

    params.wrapper_something_wo_name = lambda: lambda: "1"
    params.wrapper_something = wrapper_something

    params = _convert_params(params)
    params = _flatten_dict(params)
    params = _sanitize_callable_params(params)
    assert params["gpus"] == "None"
    assert params["something"] == "something"
    assert params["wrapper_something"] == "wrapper_something"
    assert params["wrapper_something_wo_name"] == "<lambda>"
예제 #4
0
    def log_hyperparams(self, params: Union[Dict[str, Any],
                                            Namespace]) -> None:
        params = _convert_params(params)
        params = _flatten_dict(params)
        for k, v in params.items():
            if len(str(v)) > 250:
                rank_zero_warn(
                    f"Mlflow only allows parameters with up to 250 characters. Discard {k}={v}",
                    category=RuntimeWarning)
                continue

            self.experiment.log_param(self.run_id, k, v)
예제 #5
0
    def log_hyperparams(
            self, params: Union[Dict[str, Any],
                                Namespace]) -> None:  # skipcq: PYL-W0221
        r"""
        Log hyper-parameters to the run.

        Hyperparams will be logged under the "<prefix>/hyperparams" namespace.

        Note:

            You can also log parameters by directly using the logger instance:
            ``neptune_logger.experiment["model/hyper-parameters"] = params_dict``.

            In this way you can keep hierarchical structure of the parameters.

        Args:
            params: `dict`.
                Python dictionary structure with parameters.

        Example::

            from pytorch_lightning.loggers import NeptuneLogger

            PARAMS = {
                "batch_size": 64,
                "lr": 0.07,
                "decay_factor": 0.97
            }

            neptune_logger = NeptuneLogger(
                api_key="ANONYMOUS",
                project="common/pytorch-lightning-integration"
            )

            neptune_logger.log_hyperparams(PARAMS)
        """
        params = _convert_params(params)
        params = _sanitize_callable_params(params)

        parameters_key = self.PARAMETERS_KEY
        parameters_key = self._construct_path_with_prefix(parameters_key)

        self.run[parameters_key] = params
예제 #6
0
    def log_hyperparams(self,
                        params: Union[Dict[str, Any], Namespace],
                        metrics: Optional[Dict[str, Any]] = None) -> None:
        """Record hyperparameters. TensorBoard logs with and without saved hyperparameters are incompatible, the
        hyperparameters are then not displayed in the TensorBoard. Please delete or move the previously saved logs
        to display the new ones with hyperparameters.

        Args:
            params: a dictionary-like container with the hyperparameters
            metrics: Dictionary with metric names as keys and measured quantities as values
        """

        params = _convert_params(params)

        # store params to output
        if _OMEGACONF_AVAILABLE and isinstance(params, Container):
            self.hparams = OmegaConf.merge(self.hparams, params)
        else:
            self.hparams.update(params)

        # format params into the suitable for tensorboard
        params = _flatten_dict(params)
        params = self._sanitize_params(params)

        if metrics is None:
            if self._default_hp_metric:
                metrics = {"hp_metric": -1}
        elif not isinstance(metrics, dict):
            metrics = {"hp_metric": metrics}

        if metrics:
            self.log_metrics(metrics, 0)
            exp, ssi, sei = hparams(params, metrics)
            writer = self.experiment._get_file_writer()
            writer.add_summary(exp)
            writer.add_summary(ssi)
            writer.add_summary(sei)
예제 #7
0
 def log_hyperparams(self, params: Union[Dict[str, Any],
                                         Namespace]) -> None:
     params = _convert_params(params)
     self.experiment.log_hparams(params)
예제 #8
0
 def log_hyperparams(self, params):
     params = _convert_params(params)
     params = _sanitize_params(params)
     self.logged_params = params
예제 #9
0
 def log_hyperparams(self, params: Union[Dict[str, Any],
                                         Namespace]) -> None:
     params = _convert_params(params)
     params = _flatten_dict(params)
     params = _sanitize_callable_params(params)
     self.experiment.config.update(params, allow_val_change=True)