Exemplo n.º 1
0
    def load(self, model_path):
        """
        Load the Estimator state (model and possibly with optimizer) from provided model_path.
        The model file should be generated by the save method of this estimator, or by
        ``torch.save(state_dict, model_path)``, where `state_dict` can be obtained by
        the ``state_dict()`` method of a pytorch model.

        :param model_path: path to the saved model.
        :return:
        """

        from zoo.pipeline.api.torch import TorchModel
        import os

        try:
            pytorch_model = self.get_model()
            pytorch_model.load_state_dict(torch.load(model_path))
            self.model = TorchModel.from_pytorch(pytorch_model)
        except Exception:
            raise ValueError(
                "Cannot load the PyTorch model. Please check your model path.")

        optim_path = self._get_optimizer_path(model_path)
        if os.path.isfile(optim_path):
            try:
                self.optimizer = OptimMethod.load(optim_path)
            except Exception:
                raise ValueError(
                    "Cannot load the optimizer. Only `bigdl.optim.optimizer."
                    "OptimMethod` is supported for loading.")
        else:
            self.optimizer = None

        self.estimator = SparkEstimator(self.model, self.optimizer,
                                        self.model_dir)
Exemplo n.º 2
0
    def load_orca_checkpoint(self, path, version, prefix=None):
        """
        Load existing checkpoint

        :param path: Path to the existing checkpoint.
        :param version: checkpoint version, which is the suffix of model.* file,
               i.e., for model.4 file, the version is 4.
        :param prefix: optimMethod prefix, for example 'optimMethod-TorchModelf53bddcc'
        :return:
        """
        import os
        from bigdl.nn.layer import Model
        from bigdl.optim.optimizer import OptimMethod
        assert prefix is not None, "You should provide optimMethod prefix, " \
                                   "for example 'optimMethod-TorchModelf53bddcc'"
        try:
            self.model = Model.load(
                os.path.join(path, "model.{}".format(version)))
            optimizer = OptimMethod.load(
                os.path.join(path, "{}.{}".format(prefix, version)))
        except Exception:
            raise ValueError(
                "Cannot load PyTorch checkpoint, please check your checkpoint path "
                "and checkpoint type.")
        self.estimator = SparkEstimator(self.model, optimizer, self.model_dir)
Exemplo n.º 3
0
    def load_orca_checkpoint(self, path, version, prefix=None):
        """
        Load existing checkpoint

        :param path: Path to the existing checkpoint.
        :param version: checkpoint version, which is the suffix of model.* file,
        i.e., for modle.4 file, the version is 4.
        :param prefix: optimMethod prefix, for example 'optimMethod-Sequentialf53bddcc'
        :return:
        """
        from bigdl.nn.layer import Model, Container
        from bigdl.optim.optimizer import OptimMethod
        import os
        try:
            self.model = Model.load(
                os.path.join(path, "model.{}".format(version)))
            assert isinstance(self.model, Container), \
                "The loaded model should be a Container, please check your checkpoint type."
            self.optimizer = OptimMethod.load(
                os.path.join(path, "{}.{}".format(prefix, version)))
        except Exception:
            raise ValueError(
                "Cannot load BigDL checkpoint, please check your checkpoint path "
                "and checkpoint type.")
        self.estimator = SparkEstimator(self.model, self.optimizer,
                                        self.model_dir)
        self.nn_estimator = NNEstimator(self.model, self.loss,
                                        self.feature_preprocessing,
                                        self.label_preprocessing)
        if self.optimizer is not None:
            self.nn_estimator.setOptimMethod(self.optimizer)
        self.nn_model = NNModel(
            self.model, feature_preprocessing=self.feature_preprocessing)
Exemplo n.º 4
0
    def load_orca_checkpoint(self, path, version=None, prefix=None):
        """
        Load existing checkpoint. To load a specific checkpoint, please provide both `version` and
        `perfix`. If `version` is None, then the latest checkpoint will be loaded.

        :param path: Path to the existing checkpoint (or directory containing Orca checkpoint
               files).
        :param version: checkpoint version, which is the suffix of model.* file, i.e., for
               modle.4 file, the version is 4. If it is None, then load the latest checkpoint.
        :param prefix: optimMethod prefix, for example 'optimMethod-TorchModelf53bddcc'.
        :return:
        """
        import os
        from bigdl.nn.layer import Model
        from bigdl.optim.optimizer import OptimMethod
        from zoo.orca.learn.utils import find_latest_checkpoint
        from zoo.pipeline.api.torch import TorchModel

        if version is None:
            path, prefix, version = find_latest_checkpoint(path, model_type="pytorch")
            if path is None:
                raise ValueError("Cannot find PyTorch checkpoint, please check your checkpoint"
                                 " path.")
        else:
            assert prefix is not None, "You should provide optimMethod prefix, " \
                                       "for example 'optimMethod-TorchModelf53bddcc'"

        try:
            loaded_model = Model.load(os.path.join(path, "model.{}".format(version)))
            self.model = TorchModel.from_value(loaded_model.value)
            self.optimizer = OptimMethod.load(os.path.join(path, "{}.{}".format(prefix, version)))
        except Exception:
            raise ValueError("Cannot load PyTorch checkpoint, please check your checkpoint path "
                             "and checkpoint type.")
        self.estimator = SparkEstimator(self.model, self.optimizer, self.model_dir)
Exemplo n.º 5
0
 def load_orca_checkpoint(self, path, version, prefix=None):
     import os
     from bigdl.nn.layer import Model
     from bigdl.optim.optimizer import OptimMethod
     assert prefix is not None, "You should provide optimMethod prefix, " \
                                "for example 'optimMethod-TorchModelf53bddcc'"
     try:
         self.model = Model.load(os.path.join(path, "model.{}".format(version)))
         optimizer = OptimMethod.load(os.path.join(path, "{}.{}".format(prefix, version)))
     except Exception:
         raise ValueError("Cannot load PyTorch checkpoint, please check your checkpoint path "
                          "and checkpoint type.")
     self.estimator = SparkEstimator(self.model, optimizer, self.model_dir)
Exemplo n.º 6
0
    def load_orca_checkpoint(self, path, version=None, prefix=None):
        """
        Load existing checkpoint. To load a specific checkpoint, please provide both `version`
        and `perfix`. If `version` is None, then the latest checkpoint under the specified
        directory will be loaded.

        :param path: Path to the existing checkpoint (or directory containing Orca checkpoint
               files).
        :param version: checkpoint version, which is the suffix of model.* file, i.e., for
               modle.4 file, the version is 4. If it is None, then load the latest checkpoint.
        :param prefix: optimMethod prefix, for example 'optimMethod-Sequentialf53bddcc'
        :return:
        """
        from bigdl.nn.layer import Model, Container
        from bigdl.optim.optimizer import OptimMethod
        from zoo.orca.learn.utils import find_latest_checkpoint
        import os

        if version is None:
            path, prefix, version = find_latest_checkpoint(path,
                                                           model_type="bigdl")
            if path is None:
                raise ValueError(
                    "Cannot find BigDL checkpoint, please check your checkpoint"
                    " path.")
        else:
            assert prefix is not None, "You should provide optimMethod prefix, " \
                                       "for example 'optimMethod-TorchModelf53bddcc'"

        try:
            self.model = Model.load(
                os.path.join(path, "model.{}".format(version)))
            assert isinstance(self.model, Container), \
                "The loaded model should be a Container, please check your checkpoint type."
            self.optimizer = OptimMethod.load(
                os.path.join(path, "{}.{}".format(prefix, version)))
        except Exception:
            raise ValueError(
                "Cannot load BigDL checkpoint, please check your checkpoint path "
                "and checkpoint type.")
        self.estimator = SparkEstimator(self.model, self.optimizer,
                                        self.model_dir)
        self.nn_estimator = NNEstimator(self.model, self.loss,
                                        self.feature_preprocessing,
                                        self.label_preprocessing)
        if self.optimizer is not None:
            self.nn_estimator.setOptimMethod(self.optimizer)
        self.nn_model = NNModel(
            self.model, feature_preprocessing=self.feature_preprocessing)
Exemplo n.º 7
0
 def load_orca_checkpoint(self, path, version, prefix=None):
     from bigdl.nn.layer import Model, Container
     from bigdl.optim.optimizer import OptimMethod
     import os
     try:
         self.model = Model.load(os.path.join(path, "model.{}".format(version)))
         assert isinstance(self.model, Container), \
             "The loaded model should be a Container, please check your checkpoint type."
         self.optimizer = OptimMethod.load(os.path.join(path,
                                                        "{}.{}".format(prefix, version)))
     except Exception:
         raise ValueError("Cannot load BigDL checkpoint, please check your checkpoint path "
                          "and checkpoint type.")
     self.estimator = SparkEstimator(self.model, self.optimizer, self.model_dir)
     self.nn_estimator = NNEstimator(self.model, self.loss, self.feature_preprocessing,
                                     self.label_preprocessing)
     if self.optimizer is not None:
         self.nn_estimator.setOptimMethod(self.optimizer)
     self.nn_model = NNModel(self.model, feature_preprocessing=self.feature_preprocessing)
Exemplo n.º 8
0
 def load(self, checkpoint, loss=None):
     from zoo.orca.learn.utils import find_latest_checkpoint
     from bigdl.nn.layer import Model
     from bigdl.optim.optimizer import OptimMethod
     import os
     if loss is not None:
         from zoo.pipeline.api.torch import TorchLoss
         self.loss = TorchLoss.from_pytorch(loss)
     path, prefix, version = find_latest_checkpoint(checkpoint,
                                                    model_type="pytorch")
     if path is None:
         raise ValueError(
             "Cannot find PyTorch checkpoint, please check your checkpoint path."
         )
     try:
         self.model = Model.load(
             os.path.join(path, "model.{}".format(version)))
         optimizer = OptimMethod.load(
             os.path.join(path, "{}.{}".format(prefix, version)))
     except Exception:
         raise ValueError(
             "Cannot load PyTorch checkpoint, please check your checkpoint path "
             "and checkpoint type.")
     self.estimator = SparkEstimator(self.model, optimizer, self.model_dir)
Exemplo n.º 9
0
    def load(self,
             checkpoint,
             optimizer=None,
             loss=None,
             feature_preprocessing=None,
             label_preprocessing=None,
             model_dir=None,
             is_checkpoint=False):
        if loss is not None:
            self.loss = loss
        if optimizer is not None:
            self.optimizer = optimizer
        if feature_preprocessing is not None:
            self.feature_preprocessing = feature_preprocessing
        if label_preprocessing is not None:
            self.label_preprocessing = label_preprocessing
        if model_dir is not None:
            self.model_dir = model_dir

        if is_checkpoint:
            from zoo.orca.learn.utils import find_latest_checkpoint
            from zoo.pipeline.api.net import Net
            from bigdl.nn.layer import Model, Container
            from bigdl.optim.optimizer import OptimMethod
            import os
            path, prefix, version = find_latest_checkpoint(checkpoint,
                                                           model_type="bigdl")
            if path is None:
                raise ValueError(
                    "Cannot find BigDL checkpoint, please check your checkpoint path."
                )
            try:
                self.model = Model.load(
                    os.path.join(path, "model.{}".format(version)))
                assert isinstance(self.model, Container), \
                    "The loaded model should be a Container, please check your checkpoint type."
                self.optimizer = OptimMethod.load(
                    os.path.join(path, "{}.{}".format(prefix, version)))
            except Exception:
                raise ValueError(
                    "Cannot load BigDL checkpoint, please check your checkpoint path "
                    "and checkpoint type.")
            self.estimator = SparkEstimator(self.model, self.optimizer,
                                            self.model_dir)
            self.nn_estimator = NNEstimator(self.model, self.loss,
                                            self.feature_preprocessing,
                                            self.label_preprocessing)
            if self.optimizer is not None:
                self.nn_estimator.setOptimMethod(self.optimizer)
            self.nn_model = NNModel(
                self.model, feature_preprocessing=self.feature_preprocessing)
        else:
            from zoo.pipeline.api.net import Net
            self.model = Net.load_bigdl(checkpoint + ".bigdl",
                                        checkpoint + ".bin")

            self.nn_estimator = NNEstimator(self.model, self.loss,
                                            self.feature_preprocessing,
                                            self.label_preprocessing)
            if self.optimizer is None:
                from bigdl.optim.optimizer import SGD
                self.optimizer = SGD()
            self.nn_estimator.setOptimMethod(self.optimizer)
            self.estimator = SparkEstimator(self.model, self.optimizer,
                                            self.model_dir)
            self.nn_model = NNModel(
                self.model, feature_preprocessing=self.feature_preprocessing)
        return self