Exemplo n.º 1
0
    def from_model(
        cls,
        booster: lightgbm.Booster,
        *,
        path: os.PathLike,
        preprocessor: Optional["Preprocessor"] = None,
    ) -> "LightGBMCheckpoint":
        """Create a :py:class:`~ray.air.checkpoint.Checkpoint` that stores a LightGBM
        model.

        Args:
            booster: The LightGBM model to store in the checkpoint.
            path: The directory where the checkpoint will be stored.
            preprocessor: A fitted preprocessor to be applied before inference.

        Returns:
            An :py:class:`LightGBMCheckpoint` containing the specified ``Estimator``.

        Examples:
            >>> from ray.train.lightgbm import LightGBMCheckpoint
            >>> import lightgbm
            >>>
            >>> booster = lightgbm.Booster()  # doctest: +SKIP
            >>> checkpoint = LightGBMCheckpoint.from_model(booster, path=".")  # doctest: +SKIP # noqa: #501

            You can use a :py:class:`LightGBMCheckpoint` to create an
            :py:class:`~ray.train.lightgbm.LightGBMPredictor` and preform inference.

            >>> from ray.train.lightgbm import LightGBMPredictor
            >>>
            >>> predictor = LightGBMPredictor.from_checkpoint(checkpoint)  # doctest: +SKIP # noqa: #501
        """
        booster.save_model(os.path.join(path, MODEL_KEY))

        if preprocessor:
            save_preprocessor_to_dir(preprocessor, path)

        checkpoint = cls.from_directory(path)

        return checkpoint
Exemplo n.º 2
0
def to_air_checkpoint(
    path: str,
    booster: lightgbm.Booster,
    preprocessor: Optional["Preprocessor"] = None,
) -> Checkpoint:
    """Convert a pretrained model to AIR checkpoint for serve or inference.

    Args:
        path: The directory path where model and preprocessor steps are stored to.
        booster: A pretrained lightgbm model.
        preprocessor: A fitted preprocessor. The preprocessing logic will
            be applied to serve/inference.
    Returns:
        A Ray Air checkpoint.
    """
    booster.save_model(os.path.join(path, MODEL_KEY))

    if preprocessor:
        save_preprocessor_to_dir(preprocessor, path)

    checkpoint = Checkpoint.from_directory(path)

    return checkpoint
Exemplo n.º 3
0
 def dump(self, model: lgb.Booster) -> FilesContextManager:
     with tempfile.TemporaryDirectory(prefix='ebonite_lightgbm_dump') as f:
         path = os.path.join(f, self.model_path)
         model.save_model(path)
         yield Blobs({self.model_path: LocalFileBlob(path)})