Example #1
0
def setup_tune_scheduler():
    from ray.tune.suggest.skopt import SkOptSearch
    from ray.tune.suggest.suggestion import ConcurrencyLimiter
    from skopt import Optimizer

    exp_metrics = workload.exp_metric()

    search_space, dim_names = workload.create_skopt_space()
    algo = ConcurrencyLimiter(
        SkOptSearch(
            Optimizer(search_space),
            dim_names,
            **exp_metrics,
        ),
        3,
    )

    scheduler = FluidBandScheduler(
        max_res=3,
        reduction_factor=3,
        **exp_metrics,
    )
    return dict(
        search_alg=algo,
        scheduler=scheduler,
        trial_executor=MyRayTrialExecutor(),
        resources_per_trial=com.detect_baseline_resource(),
    )
Example #2
0
def setup_tune_scheduler():
    search_space = workload.create_search_space()

    experiment_metrics = workload.exp_metric()
    scheduler = HyperBandScheduler(time_attr="training_iteration",
                                   max_t=81,
                                   reduction_factor=3,
                                   **experiment_metrics)

    return dict(
        search_alg=VariantGenerator(),
        scheduler=scheduler,
        config=search_space,
        resources_per_trial=com.detect_baseline_resource(),
    )
Example #3
0
def setup_tune_scheduler(num_worker):
    search_space = workload.create_search_space()
    # experiment_metrics = workload.exp_metric()
    asha_parallel = AsyncHyperBandSchedulerWithParalelism(
        # set a large max_t such that ASHA will always promot to next rung,
        # until something reaches target accuracy
        max_t=int(1e10),
        reduction_factor=3,
        **workload.exp_metric())

    return dict(
        scheduler=asha_parallel,
        search_alg=VariantGenerator(max_concurrent=sched_algo()),
        config=search_space,
        resources_per_trial=com.detect_baseline_resource(),
    )
Example #4
0
def setup_tune_scheduler():
    search_space = workload.create_search_space()

    scheduler = ASHAScheduler(
        # set a large max_t such that ASHA will always promot to next rung,
        # until something reaches target accuracy
        max_t=int(1000),
        reduction_factor=3,
        **workload.exp_metric(),
    )
    return dict(
        search_alg=VariantGenerator(),
        scheduler=scheduler,
        config=search_space,
        resources_per_trial=com.detect_baseline_resource(),
    )
Example #5
0
def setup_tune_scheduler():
    ss, custom_explore = workload.create_sample_space()
    search_space = workload.create_search_space()

    scheduler = PopulationBasedTraining(time_attr="training_iteration",
                                        perturbation_interval=5,
                                        hyperparam_mutations=ss,
                                        custom_explore_fn=custom_explore,
                                        **workload.exp_metric())

    return dict(
        scheduler=scheduler,
        config=search_space,
        # num_samples in PBT only sets population
        num_samples=10,
        resources_per_trial=com.detect_baseline_resource(),
    )
Example #6
0
def setup_tune_scheduler():
    # BOHB uses ConfigSpace for their hyperparameter search space
    config_space = workload.create_ch()

    experiment_metrics = workload.exp_metric()
    bohb_search = TuneBOHB(config_space, **experiment_metrics)
    bohb_hyperband = HyperBandForBOHB(
        time_attr="training_iteration",
        max_t=81,
        reduction_factor=3,
        **experiment_metrics
    )

    return dict(
        scheduler=bohb_hyperband,
        search_alg=bohb_search,
        resources_per_trial=com.detect_baseline_resource(),
    )