Пример #1
0
def run_blendsearch_tune_w_budget(time_budget_s=10):
    """run BlendSearch with given time_budget_s"""
    algo = BlendSearch(
        metric="mean_loss",
        mode="min",
        space={
            "width": tune.uniform(0, 20),
            "height": tune.uniform(-100, 100),
            "activation": tune.choice(["relu", "tanh"]),
        },
    )
    algo.set_search_properties(config={"time_budget_s": time_budget_s})
    algo = ConcurrencyLimiter(algo, max_concurrent=4)
    scheduler = AsyncHyperBandScheduler()
    analysis = tune.run(
        easy_objective,
        metric="mean_loss",
        mode="min",
        search_alg=algo,
        scheduler=scheduler,
        time_budget_s=time_budget_s,
        num_samples=-1,
        config={
            "steps": 100,
        },
    )

    print("Best hyperparameters found were: ", analysis.best_config)
Пример #2
0
    def testBlendSearch(self):
        from ray.tune.suggest.flaml import BlendSearch

        searcher = BlendSearch(space=self.config, metric=self.metric_name, mode="max")

        self._save(searcher)

        searcher = BlendSearch(space=self.config, metric=self.metric_name, mode="max")
        self._restore(searcher)
Пример #3
0
def test_blendsearch_tune(smoke_test=True):
    try:
        from ray import tune
        from ray.tune.suggest import ConcurrencyLimiter
        from ray.tune.schedulers import AsyncHyperBandScheduler
        from ray.tune.suggest.flaml import BlendSearch
    except ImportError:
        print("ray[tune] is not installed, skipping test")
        return
    import numpy as np

    algo = BlendSearch()
    algo = ConcurrencyLimiter(algo, max_concurrent=4)
    scheduler = AsyncHyperBandScheduler()
    analysis = tune.run(
        easy_objective,
        metric="mean_loss",
        mode="min",
        search_alg=algo,
        scheduler=scheduler,
        num_samples=10 if smoke_test else 100,
        config={
            "steps": 100,
            "width": tune.uniform(0, 20),
            "height": tune.uniform(-100, 100),
            # This is an ignored parameter.
            "activation": tune.choice(["relu", "tanh"]),
            "test4": np.zeros((3, 1)),
        },
    )

    print("Best hyperparameters found were: ", analysis.best_config)
Пример #4
0
    def testBlendSearch(self):
        from ray.tune.suggest.flaml import BlendSearch

        out = tune.run(
            _invalid_objective,
            search_alg=BlendSearch(points_to_evaluate=[
                {
                    "report": 1.0
                },
                {
                    "report": 2.1
                },
                {
                    "report": 3.1
                },
                {
                    "report": 4.1
                },
            ]),
            config=self.config,
            metric="_metric",
            mode="max",
            num_samples=16,
            reuse_actors=False,
        )

        best_trial = out.best_trial
        self.assertLessEqual(best_trial.config["report"], 2.0)
Пример #5
0
    def testConvergenceBlendSearch(self):
        from ray.tune.suggest.flaml import BlendSearch

        np.random.seed(0)
        searcher = BlendSearch()
        analysis = self._testConvergence(searcher, patience=200)

        assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-2)
Пример #6
0
    def testBlendSearch(self):
        from ray.tune.suggest.flaml import BlendSearch

        out = tune.run(_invalid_objective,
                       search_alg=BlendSearch(),
                       config=self.config,
                       metric="_metric",
                       mode="max",
                       num_samples=8,
                       reuse_actors=False)

        best_trial = out.best_trial
        self.assertLessEqual(best_trial.config["report"], 2.0)
Пример #7
0
    def set_basic_conf(self):
        space = {
            "height": tune.uniform(-100, 100),
            "width": tune.randint(0, 100),
            "time_budget_s": 10,
        }

        def cost(param, reporter):
            reporter(loss=(param["height"] - 14)**2 - abs(param["width"] - 3))

        search_alg = BlendSearch(
            space=space,
            metric="loss",
            mode="min",
            seed=20,
        )

        return search_alg, cost
Пример #8
0
def run_blendsearch_tune(smoke_test=False):
    algo = BlendSearch()
    algo = ConcurrencyLimiter(algo, max_concurrent=4)
    scheduler = AsyncHyperBandScheduler()
    analysis = tune.run(
        easy_objective,
        metric="mean_loss",
        mode="min",
        search_alg=algo,
        scheduler=scheduler,
        num_samples=10 if smoke_test else 100,
        config={
            "steps": 100,
            "width": tune.uniform(0, 20),
            "height": tune.uniform(-100, 100),
            # This is an ignored parameter.
            "activation": tune.choice(["relu", "tanh"])
        })

    print("Best hyperparameters found were: ", analysis.best_config)