示例#1
0
    def test_constraint_dropping(self):
        from lale.lib.sklearn import LogisticRegression
        from lale.operators import make_operator
        from lale.search.schema2search_space import op_to_search_space

        orig_schemas = LogisticRegression._schemas
        mod_schemas = {
            **orig_schemas,
            "properties": {
                **orig_schemas["properties"],
                "hyperparams": {
                    "allOf": [
                        s if i == 0 else {**s, "forOptimizer": False}
                        for i, s in enumerate(
                            orig_schemas["properties"]["hyperparams"]["allOf"]
                        )
                    ]
                },
            },
        }
        orig_space = op_to_search_space(LogisticRegression)
        mod_op = make_operator(LogisticRegression._impl_class(), mod_schemas)
        mod_space = op_to_search_space(mod_op)
        # dropping constraints makes the search space smaller
        self.assertGreater(len(str(orig_space)), len(str(mod_space)))
示例#2
0
def op_to_search_space_grids(
        op: PlannedOperator,
        pgo: Optional[PGO] = None,
        data_schema: Dict[str, Any] = {}) -> List[SearchSpaceGrid]:
    search_space = op_to_search_space(op, pgo=pgo, data_schema=data_schema)
    grids = search_space_to_grids(search_space)
    return grids
示例#3
0
文件: op2hp.py 项目: MSaber9/lale
def hyperopt_search_space(
    op: "PlannedOperator", schema=None, pgo: Optional[PGO] = None, data_schema={}
):

    search_space = op_to_search_space(op, pgo=pgo, data_schema=data_schema)
    if search_space:
        name = op.name()

        if should_print_search_space("true", "all", "backend", "hyperopt"):
            print(
                f"hyperopt search space for {name}: {search_space_to_hp_str(search_space, name)}"
            )
        return search_space_to_hp_expr(search_space, name)
    else:
        return None
示例#4
0
    def test_override_int_param2(self):
        from lale.lib.sklearn import PCA
        from lale.search.schema2search_space import op_to_search_space
        from lale.search.search_space import SearchSpaceNumber, SearchSpaceObject

        pca = PCA.customize_schema(
            relevantToOptimizer=["iterated_power"],
            iterated_power=schemas.Float(
                minimum=0,
                minimumForOptimizer=1,
                maximum=6,
                maximumForOptimizer=5,
                exclusiveMaximumForOptimizer=False,
                exclusiveMinimumForOptimizer=True,
            ),
        )
        search = op_to_search_space(pca)
        assert isinstance(search, SearchSpaceObject)
        num_space = list(search.choices)[0][0]
        assert isinstance(num_space, SearchSpaceNumber)
        self.assertEqual(num_space.minimum, 1)
        self.assertEqual(num_space.maximum, 5)
        self.assertFalse(num_space.exclusiveMaximum)
        self.assertTrue(num_space.exclusiveMinimum)
示例#5
0
    def test_override_float_param1(self):
        from lale.lib.sklearn import PCA
        from lale.search.schema2search_space import op_to_search_space
        from lale.search.search_space import SearchSpaceNumber, SearchSpaceObject

        pca = PCA.customize_schema(
            relevantToOptimizer=["tol"],
            tol=schemas.Float(
                minimum=0.25,
                minimumForOptimizer=0,
                maximum=0.5,
                maximumForOptimizer=1.0,
                exclusiveMaximum=True,
                exclusiveMinimum=False,
            ),
        )
        search = op_to_search_space(pca)
        assert isinstance(search, SearchSpaceObject)
        num_space = list(search.choices)[0][0]
        assert isinstance(num_space, SearchSpaceNumber)
        self.assertEqual(num_space.minimum, 0.25)
        self.assertEqual(num_space.maximum, 0.5)
        self.assertTrue(num_space.exclusiveMaximum)
        self.assertFalse(num_space.exclusiveMinimum)
示例#6
0
def op_to_search_space_grids(op:PlannedOperator,
                             pgo:Optional[PGO]=None)->List[SearchSpaceGrid]:
    search_space = op_to_search_space(op, pgo=pgo)
    grids = search_space_to_grids(search_space)
    return grids