Example #1
0
    def __call__(self, manager=None):
        """Executes the evaluator extension.

        Unlike usual extensions, this extension can be executed without passing
        a manager object. This extension reports the performance on validation
        dataset using the :func:`~reporting.report` function.
        Thus, users can use this extension independently from any manager
        by manually configuring a :class:`~Reporter` object.

        Args:
            manager (~pytorch_pfn_extras.training.ExtensionsManager): Manager
                object that invokes this extension. It can be omitted
                in case of calling this extension manually.

        Returns:
            dict: Result dictionary that contains mean statistics of values
            reported by the evaluation function.

        """
        # set up a reporter
        reporter = reporting.Reporter()
        if self.name is not None:
            prefix = self.name + '/'
        else:
            prefix = ''
        for name, target in self._targets.items():
            reporter.add_observer(prefix + name, target)
            reporter.add_observers(prefix + name, target.named_modules())

        with reporter:
            with torch.no_grad():
                result = self.evaluate()

        reporting.report(result)
        return result
    def __init__(
            self,
            handler: 'BaseHandler',
            models: Union[torch.nn.Module, Dict[str, torch.nn.Module]],
            *,
            progress_bar: bool = False,
            metrics: Optional[List['MetricType']] = None,
    ):
        super().__init__()

        if not isinstance(models, dict):
            if not isinstance(models, torch.nn.Module):
                raise ValueError(
                    'model must be an instance of dict or toch.nn.Module')
            self.models = {'main': models}
        else:
            self.models = models

        self.handler = handler
        self._progress_bar = progress_bar
        self._reporter = reporting.Reporter()
        self._metrics = [] if metrics is None else metrics
        for name, model in self.models.items():
            self._reporter.add_observer(name, model)
            self._reporter.add_observers(
                name, model.named_modules())
Example #3
0
    def __init__(
            self,
            models: Union[torch.nn.Module, Dict[str, torch.nn.Module]],
            optimizers: Union[torch.optim.Optimizer, Dict[str, torch.optim.Optimizer]],
            max_epochs: int,
            extensions: Optional[List['extension_module.ExtensionLike']],
            out_dir: str,
            writer: Optional[writing.Writer],
            stop_trigger: 'trigger_module.TriggerLike' = None
    ) -> None:
        if extensions is None:
            extensions = []
        if stop_trigger is None:
            self._stop_trigger = trigger_module.get_trigger(
                (max_epochs, 'epoch'))
        else:
            self._stop_trigger = trigger_module.get_trigger(
                stop_trigger)
        if writer is None:
            writer = writing.SimpleWriter(out_dir=out_dir)
        # triggers are stateful, so we need to make a copy for internal use
        self._internal_stop_trigger = copy.deepcopy(self._stop_trigger)
        self.observation: Dict[str, reporting.ReportValue] = {}
        self._out = out_dir
        self.writer = writer
        self.reporter = reporting.Reporter()
        self._start_extensions_called = False

        if not isinstance(models, dict):
            if not isinstance(models, torch.nn.Module):
                raise ValueError(
                    'model must be an instance of dict or toch.nn.Module')
            self._models = {'main': models}
        else:
            self._models = models
        if not isinstance(optimizers, dict):
            # TODO(ecastill) Optimizer type is not checked because of tests
            # using mocks and other classes
            self._optimizers = {'main': optimizers}
        else:
            self._optimizers = optimizers

        for name, model in self._models.items():
            self.reporter.add_observer(name, model)
            self.reporter.add_observers(
                name, model.named_modules())
        self.max_epochs = max_epochs
        self._start_iteration = 0
        # Defer!
        self._start_time: Optional[float] = None
        self._iters_per_epoch: Optional[int] = None
        self._extensions: Dict[str, _ExtensionEntry] = collections.OrderedDict()
        for ext in extensions:
            self.extend(ext)

        # Initialize the writer
        self.writer.initialize(self.out)
Example #4
0
    def __init__(
        self,
        models: Union[torch.nn.Module, Mapping[str, torch.nn.Module]],
        optimizers: Union[torch.optim.Optimizer,
                          Mapping[str, torch.optim.Optimizer]],
        max_epochs: int,
        extensions: Optional[Sequence['extension_module.ExtensionLike']],
        out_dir: str,
        writer: Optional[writing.Writer],
        stop_trigger: 'trigger_module.TriggerLike' = None,
        transform_model: _TransformModel = default_transform_model,
        enable_profile: bool = False,
    ) -> None:
        if extensions is None:
            extensions = []
        if stop_trigger is None:
            self._stop_trigger = trigger_module.get_trigger(
                (max_epochs, 'epoch'))
        else:
            self._stop_trigger = trigger_module.get_trigger(stop_trigger)
        if writer is None:
            writer = writing.SimpleWriter(out_dir=out_dir)
        # triggers are stateful, so we need to make a copy for internal use
        self._internal_stop_trigger = copy.deepcopy(self._stop_trigger)
        self.observation: reporting.Observation = {}
        self._out = out_dir
        self.writer = writer
        self.reporter = reporting.Reporter()
        self._transform_model = transform_model
        self._start_extensions_called = False
        self._run_on_error_called = False

        # Indicates whether models can be accessed from extensions in the
        # current iteration.
        # The defualt value (True) indicates that it is allowed to access
        # models before starting a training loop.
        self._model_available = True

        if isinstance(models, collections.abc.Mapping):
            self._models = models
        else:
            if not isinstance(models, torch.nn.Module):
                raise ValueError(
                    'model must be an instance of dict or toch.nn.Module')
            self._models = {'main': models}
        if isinstance(optimizers, collections.abc.Mapping):
            self._optimizers = optimizers
        else:
            # TODO(ecastill) Optimizer type is not checked because of tests
            # using mocks and other classes
            self._optimizers = {'main': optimizers}

        for name, model in self._models.items():
            # TODO we should not initialize extensions at this point
            # so, we cannot use `self.models`
            model = self._transform_model(name, model)
            self.reporter.add_observer(name, model)
            self.reporter.add_observers(name, model.named_modules())
        self.max_epochs = max_epochs
        self._start_iteration = 0
        # Defer!
        self._start_time: Optional[float] = None
        self.__iters_per_epoch: Optional[int] = None
        self._extensions: Dict[
            str, extension_module.ExtensionEntry] = collections.OrderedDict()
        for ext in extensions:
            self.extend(ext)

        self._enable_profile = enable_profile
        # Initialize the writer
        self.writer.initialize(self.out)