예제 #1
0
파일: helpers.py 프로젝트: sreev/lale
    def get_equivalent_lale_op(sklearn_obj, fitted):
        module_names = ["lale.lib.sklearn", "lale.lib.autoai_libs"]
        from lale.operators import make_operator, TrainedIndividualOp

        lale_wrapper_found = False
        class_name = sklearn_obj.__class__.__name__
        for module_name in module_names:
            module = importlib.import_module(module_name)
            try:
                class_ = getattr(module, class_name)
                lale_wrapper_found = True
                break
            except AttributeError:
                continue
        else:
            class_ = make_operator(sklearn_obj, name=class_name)

        if not fitted:  #If fitted is False, we do not want to return a Trained operator.
            lale_op = class_
        else:
            lale_op = TrainedIndividualOp(class_._name, class_._impl,
                                          class_._schemas)
        class_ = lale_op(**sklearn_obj.get_params())
        if lale_wrapper_found:
            class_._impl_instance()._wrapped_model = copy.deepcopy(sklearn_obj)
        else:  # If there is no lale wrapper, there is no _wrapped_model
            class_._impl = copy.deepcopy(sklearn_obj)
        return class_
예제 #2
0
    def get_equivalent_lale_op(sklearn_obj, fitted):
        # Validate that the sklearn_obj is a valid sklearn-compatible object
        if sklearn_obj is None or not hasattr(sklearn_obj, "get_params"):
            raise ValueError(
                "The input pipeline has a step that is not scikit-learn compatible."
            )
        module_names = ["lale.lib.sklearn", "lale.lib.autoai_libs"]
        from lale.operators import TrainedIndividualOp, make_operator

        lale_wrapper_found = False
        class_name = sklearn_obj.__class__.__name__
        for module_name in module_names:
            module = importlib.import_module(module_name)
            try:
                class_ = getattr(module, class_name)
                lale_wrapper_found = True
                break
            except AttributeError:
                continue
        else:
            class_ = make_operator(sklearn_obj, name=class_name)

        if (
                not fitted
        ):  # If fitted is False, we do not want to return a Trained operator.
            lale_op = class_
        else:
            lale_op = TrainedIndividualOp(class_._name, class_._impl,
                                          class_._schemas)

        orig_hyperparams = sklearn_obj.get_params()
        higher_order = False
        for hp_name, hp_val in orig_hyperparams.items():
            higher_order = higher_order or hasattr(hp_val, "get_params")
        if higher_order:
            hyperparams = {}
            for hp_name, hp_val in orig_hyperparams.items():
                if hasattr(hp_val, "get_params"):
                    nested_op = get_equivalent_lale_op(hp_val, fitted)
                    hyperparams[hp_name] = nested_op
                else:
                    hyperparams[hp_name] = hp_val
        else:
            hyperparams = orig_hyperparams

        class_ = lale_op(**hyperparams)
        if lale_wrapper_found:
            wrapped_model = copy.deepcopy(sklearn_obj)
            class_._impl_instance()._wrapped_model = wrapped_model
        else:  # If there is no lale wrapper, there is no _wrapped_model
            class_._impl = copy.deepcopy(sklearn_obj)
        return class_
예제 #3
0
파일: helpers.py 프로젝트: ozgurgul/lale
    def get_equivalent_lale_op(sklearn_obj, fitted):
        module_names = ["lale.lib.sklearn", "lale.lib.autoai_libs"]
        from lale.operators import make_operator, TrainedIndividualOp

        lale_wrapper_found = False
        class_name = sklearn_obj.__class__.__name__
        for module_name in module_names:
            module = importlib.import_module(module_name)
            try:
                class_ = getattr(module, class_name)
                lale_wrapper_found = True
                break
            except AttributeError:
                continue
        else:
            class_ = make_operator(sklearn_obj, name=class_name)

        if not fitted:  #If fitted is False, we do not want to return a Trained operator.
            lale_op = class_
        else:
            lale_op = TrainedIndividualOp(class_._name, class_._impl,
                                          class_._schemas)

        orig_hyperparams = sklearn_obj.get_params()
        higher_order = False
        for hp_name, hp_val in orig_hyperparams.items():
            higher_order = higher_order or hasattr(hp_val, 'get_params')
        if higher_order:
            hyperparams = {}
            for hp_name, hp_val in orig_hyperparams.items():
                if hasattr(hp_val, 'get_params'):
                    nested_op = get_equivalent_lale_op(hp_val, fitted)
                    hyperparams[hp_name] = nested_op
                else:
                    hyperparams[hp_name] = hp_val
        else:
            hyperparams = orig_hyperparams

        class_ = lale_op(**hyperparams)
        if lale_wrapper_found:
            wrapped_model = copy.deepcopy(sklearn_obj)
            class_._impl_instance()._wrapped_model = wrapped_model
        else:  # If there is no lale wrapper, there is no _wrapped_model
            class_._impl = copy.deepcopy(sklearn_obj)
        return class_