def generate_min_point(feature_value, model):
    # finding the minimum point based on the model

    max_expected_improvement = 0
    max_threadpool_sizes = []
    if not gd.random_eval_check:
        eval_pool = gd.eval_pool
    else:
        eval_pool = selecting_random_point(Config.EVAL_POINT_SIZE,
                                           Config.PARAMETER_BOUNDS,
                                           feature_value=feature_value)

    min_percentile, min_eval_value = generate_min_point_based_on_distance(
        feature_value)
    explore_factor = 0.01
    for eval_point in range(len(eval_pool)):
        check_point = list(eval_pool[eval_point])
        for f_val in feature_value:
            check_point.append(f_val)

        max_expected_improvement, max_threadpool_sizes = bayesian_expected_improvement(
            check_point, max_expected_improvement, max_threadpool_sizes,
            min_percentile, explore_factor, model)

    if max_expected_improvement == 0:
        next_x = min_eval_value
    else:
        idx = np.random.randint(0, len(max_threadpool_sizes))
        next_x = max_threadpool_sizes[idx]

    min_x = list(next_x)

    return min_x
Esempio n. 2
0
def find_next_threadpool_size(threadpool_and_concurrency_data, percentile_data,
                              trade_off_level, model, concurrency):
    min_threadpool_size, min_percentile = update_min_point(
        threadpool_and_concurrency_data, percentile_data, concurrency, model)

    if min_percentile is None:
        next_threadpool_size = min_threadpool_size
        trade_off_level = Config.DEFAULT_TRADE_OFF_LEVEL
    else:
        max_expected_improvement = 0
        max_threadpool_sizes = []
        if not gd.random_eval_check:
            eval_pool = gd.eval_pool
        else:
            eval_pool = selecting_random_point(Config.EVAL_POINT_SIZE,
                                               Config.PARAMETER_BOUNDS,
                                               feature_value=concurrency)

        for eval_point in range(len(eval_pool)):
            check_point = list(eval_pool[eval_point])
            for concurrency_val in concurrency:
                check_point.append(concurrency_val)

            max_expected_improvement, max_threadpool_sizes = bayesian_expected_improvement(
                check_point, max_expected_improvement, max_threadpool_sizes,
                min_percentile, trade_off_level, model)

        next_threadpool_size, trade_off_level = next_x_point_selection(
            max_expected_improvement, min_threadpool_size, trade_off_level,
            max_threadpool_sizes)

    return next_threadpool_size, trade_off_level
Esempio n. 3
0
def main():
    one_parameter = False

    workload_ini = Cg.workload_array

    # bounds for the gaussian
    thread_pool_max = Cg.thread_pool_max
    thread_pool_min = Cg.thread_pool_min

    max_iterations = Cg.number_of_iterations

    # number of initial points for the gaussian
    number_of_training_points = Cg.number_of_training_points

    # call initial functions
    if len(workload_ini) == 1:
        one_parameter = True
        x_plot_data, y_plot_data = data_plot.initial_plot()
        x_data, y_data, parameter_history = get_training_points(
            number_of_training_points)
    else:
        workload = workload_config(workload_ini, max_iterations)
        x_plot_data, y_plot_data, z_plot_data = data_plot.initial_2d_plot()
        x_data, y_data, parameter_history = get_training_points(
            number_of_training_points, workload)
        reference_array, minimum_ref_array = min_value_finder.min_array(
            x_plot_data, y_plot_data, z_plot_data)

    # fit initial data to gaussian model
    model = thread_pool_tuning_model(x_data, y_data)

    # exploration and exploitation trade off value
    trade_off_level = 0.1

    # use bayesian optimization
    for iteration in range(max_iterations):
        if one_parameter:
            minimum = min(y_data)
            x_location = y_data.index(min(y_data))
            min_x = x_data[x_location]
        else:
            print("workers -", workload[iteration])
            minimum, min_x = min_value_finder.min_point_find(
                x_value=x_data,
                y_value=y_data,
                feature_val=workload[iteration])
            print(minimum)
            print(min_x)

        max_expected_improvement = 0
        max_points = []

        print("trade_off_level -", trade_off_level)
        print("inter -", iteration)

        for evaluating_pool_size in range(thread_pool_min,
                                          thread_pool_max + 1):
            if one_parameter:
                pool_size = evaluating_pool_size
            else:
                pool_size = [evaluating_pool_size, workload[iteration]]

            max_expected_improvement, max_points = bayesian_opt.bayesian_expected_improvement(
                pool_size, max_expected_improvement, max_points, minimum,
                trade_off_level, model)

        next_x, next_y, trade_off_level = bayesian_opt.next_x_point_selection(
            max_expected_improvement, min_x, trade_off_level, max_points,
            one_parameter)

        print("EI -", max_expected_improvement)
        print("Next x- ", next_x)
        # Data appending
        parameter_history.append(next_x)
        y_data.append(next_y)
        x_data.append(next_x)
        print("Next y- ", next_y)

        # fit new data to gaussian process
        model = thread_pool_tuning_model(x_data, y_data)

        if one_parameter:
            data_plot.data_plot(next_x, iteration, model, x_plot_data,
                                y_plot_data, parameter_history, y_data)

        print("-------------------------------------")

        #time.sleep(5)

    print("minimum found : %f", min(y_data))