Ejemplo n.º 1
0
def compare_optimization(setting: SettingNew,
                         opt_methods: List[OptMethod],
                         number_l=1) -> List[float]:
    """Measures time for different optimizations"""
    new = True
    print_x = False

    list_of_bounds: List[float] = []
    list_of_times: List[float] = []
    list_of_approaches: List[str] = []

    for opt in opt_methods:
        start = timer()
        if opt == OptMethod.GRID_SEARCH:
            theta_bounds = [(0.1, 4.0)]

            bound_list = theta_bounds[:]
            for _i in range(1, number_l + 1):
                bound_list.append((0.9, 4.0))

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).grid_search(
                                    bound_list=bound_list, delta=0.1)

        elif opt == OptMethod.PATTERN_SEARCH:
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).pattern_search(
                                    start_list=start_list,
                                    delta=3.0,
                                    delta_min=0.01)

        elif opt == OptMethod.NELDER_MEAD:
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l
            start_simplex = InitialSimplex(parameters_to_optimize=number_l +
                                           1).gao_han(start_list=start_list)

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).nelder_mead(
                                    simplex=start_simplex, sd_min=10**(-2))

        elif opt == OptMethod.BASIN_HOPPING:
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l

            bound = OptimizeNew(
                setting_new=setting, new=new,
                print_x=print_x).basin_hopping(start_list=start_list)

        elif opt == OptMethod.SIMULATED_ANNEALING:
            simul_anneal_param = SimAnnealParams()
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).sim_annealing(
                                    start_list=start_list,
                                    sim_anneal_params=simul_anneal_param)

        elif opt == OptMethod.DIFFERENTIAL_EVOLUTION:
            theta_bounds = [(0.1, 4.0)]

            bound_list = theta_bounds[:]
            for _i in range(1, number_l + 1):
                bound_list.append((0.9, 4.0))

            bound = OptimizeNew(
                setting_new=setting, new=new,
                print_x=print_x).diff_evolution(bound_list=bound_list)

        elif opt == OptMethod.BFGS:
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).bfgs(start_list=start_list)

        elif opt == OptMethod.GS_OLD:
            theta_bounds = [(0.1, 4.0)]

            bound_list = theta_bounds[:]
            for _i in range(1, number_l + 1):
                bound_list.append((0.9, 4.0))

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).grid_search_old(
                                    bound_list=bound_list, delta=0.1)

        elif opt == OptMethod.NM_OLD:
            nelder_mead_param = NelderMeadParameters()
            theta_start = 0.5

            start_list = [theta_start] + [1.0] * number_l
            start_simplex = InitialSimplex(parameters_to_optimize=number_l +
                                           1).gao_han(start_list=start_list)

            bound = OptimizeNew(setting_new=setting, new=new,
                                print_x=print_x).nelder_mead_old(
                                    simplex=start_simplex,
                                    nelder_mead_param=nelder_mead_param,
                                    sd_min=10**(-2))

        else:
            raise NameError("Optimization parameter {0} is infeasible".format(
                opt.name))

        stop = timer()
        list_of_bounds.append(bound)
        list_of_times.append(stop - start)
        list_of_approaches.append(opt.name)

    print("list_of_approaches: ", list_of_approaches)
    print("list_of_times: ", list_of_times)
    print("list_of_bounds: ")
    return list_of_bounds
    from utils.perform_parameter import PerformParameter
    from nc_operations.perform_enum import PerformEnum
    from nc_service.constant_rate_server import ConstantRate
    from nc_arrivals.markov_modulated import MMOOFluid

    DELAY_4 = PerformParameter(perform_metric=PerformEnum.DELAY, value=0.0001)

    MMOO_1 = MMOOFluid(mu=1.0, lamb=2.2, burst=3.4)
    MMOO_2 = MMOOFluid(mu=3.6, lamb=1.6, burst=0.4)
    CONST_RATE_1 = ConstantRate(rate=2.0)
    CONST_RATE_2 = ConstantRate(rate=0.3)

    SIMPLEX_START = np.array([[0.1], [0.3]])
    # SIMPLEX_START = np.array([[100], [200]])
    SIMPLEX_START_NEW = np.array([[0.1, 2.0], [0.3, 1.2], [0.4, 1.1]])
    SIMPLEX_RAND = InitialSimplex(parameters_to_optimize=1).uniform_dist(
        max_theta=0.6, max_l=2.0)

    NM_PARAM_SET = NelderMeadParameters()

    SETTING = FatCrossPerform(
        arr_list=[MMOO_1, MMOO_2],
        ser_list=[CONST_RATE_1, CONST_RATE_2],
        perform_param=DELAY_4)

    OPTI_OLD = Optimize(setting=SETTING, print_x=True)
    print(OPTI_OLD.grid_search(bound_list=[(0.1, 4.0)], delta=0.1))
    print(OPTI_OLD.pattern_search(start_list=[0.5], delta=3.0, delta_min=0.01))
    print(Optimize.nelder_mead(self=OPTI_OLD, simplex=SIMPLEX_RAND))
    print(
        Optimize.nelder_mead_old(
            self=OPTI_OLD,
Ejemplo n.º 3
0
def compare_mitigator(setting: SettingMitigator,
                      opt_method: OptMethod,
                      number_l=1,
                      print_x=False) -> Tuple[float, float]:
    """Compare standard_bound with the new Lyapunov standard_bound."""

    if opt_method == OptMethod.GRID_SEARCH:
        delta_val = 0.1
        theta_bounds = [(delta_val, 4.0)]

        standard_bound = Optimize(setting=setting,
                                  number_param=1,
                                  print_x=print_x).grid_search(
                                      grid_bounds=theta_bounds,
                                      delta=delta_val)

        bound_array = theta_bounds[:]
        for _i in range(1, number_l + 1):
            bound_array.append((1.0 + delta_val, 4.0))

        h_mit_bound = OptimizeMitigator(setting_h_mit=setting,
                                        number_param=number_l + 1,
                                        print_x=print_x).grid_search(
                                            grid_bounds=bound_array,
                                            delta=delta_val)

    elif opt_method == OptMethod.PATTERN_SEARCH:
        theta_start = 0.5

        start_list = [theta_start]

        standard_bound = Optimize(setting=setting,
                                  number_param=1,
                                  print_x=print_x).pattern_search(
                                      start_list=start_list,
                                      delta=3.0,
                                      delta_min=0.01)

        start_list_new = [theta_start] + [1.0] * number_l

        h_mit_bound = OptimizeMitigator(setting_h_mit=setting,
                                        number_param=number_l + 1,
                                        print_x=print_x).pattern_search(
                                            start_list=start_list_new,
                                            delta=3.0,
                                            delta_min=0.01)

        # This part is there to overcome opt_method issues
        if h_mit_bound > standard_bound:
            h_mit_bound = standard_bound

    elif opt_method == OptMethod.NELDER_MEAD:
        theta_start = 0.5

        start_list = [theta_start]
        start_simplex = InitialSimplex(parameters_to_optimize=1).gao_han(
            start_list=start_list)

        standard_bound = Optimize(setting=setting,
                                  number_param=1,
                                  print_x=print_x).nelder_mead(
                                      simplex=start_simplex, sd_min=10**(-2))

        start_list_new = [theta_start] + [1.0] * number_l
        start_simplex_new = InitialSimplex(parameters_to_optimize=number_l +
                                           1).gao_han(
                                               start_list=start_list_new)

        h_mit_bound = OptimizeMitigator(setting_h_mit=setting,
                                        number_param=number_l + 1,
                                        print_x=print_x).nelder_mead(
                                            simplex=start_simplex_new,
                                            sd_min=10**(-2))

        # This part is there to overcome opt_method issues
        if h_mit_bound > standard_bound:
            h_mit_bound = standard_bound

    elif opt_method == OptMethod.BASIN_HOPPING:
        theta_start = 0.5

        start_list = [theta_start]

        standard_bound = Optimize(
            setting=setting, number_param=1,
            print_x=print_x).basin_hopping(start_list=start_list)

        start_list_new = [theta_start] + [1.0] * number_l

        h_mit_bound = OptimizeMitigator(
            setting_h_mit=setting, number_param=number_l + 1,
            print_x=print_x).basin_hopping(start_list=start_list_new)

        # This part is there to overcome opt_method issues
        if h_mit_bound > standard_bound:
            h_mit_bound = standard_bound

    elif opt_method == OptMethod.DUAL_ANNEALING:
        theta_bounds = [(0.1, 4.0)]

        standard_bound = Optimize(
            setting=setting, number_param=1,
            print_x=print_x).dual_annealing(bound_list=theta_bounds)

        bound_array = theta_bounds[:]
        for _i in range(1, number_l + 1):
            bound_array.append((0.9, 4.0))

        h_mit_bound = OptimizeMitigator(
            setting_h_mit=setting, number_param=number_l + 1,
            print_x=print_x).dual_annealing(bound_list=bound_array)

        # This part is there to overcome opt_method issues
        if h_mit_bound > standard_bound:
            h_mit_bound = standard_bound

    elif opt_method == OptMethod.DIFFERENTIAL_EVOLUTION:
        theta_bounds = [(0.1, 8.0)]

        standard_bound = Optimize(
            setting=setting, number_param=1,
            print_x=print_x).diff_evolution(bound_list=theta_bounds)

        bound_array = theta_bounds[:]
        for _i in range(1, number_l + 1):
            bound_array.append((0.9, 8.0))

        h_mit_bound = OptimizeMitigator(
            setting_h_mit=setting, number_param=number_l + 1,
            print_x=print_x).diff_evolution(bound_list=bound_array)

    else:
        raise NameError(
            f"Optimization parameter {opt_method.name} is infeasible")

    # This part is there to overcome opt_method issues
    if h_mit_bound > standard_bound:
        h_mit_bound = standard_bound

    if standard_bound == 0 or h_mit_bound == 0:
        standard_bound = nan
        h_mit_bound = nan

    return standard_bound, h_mit_bound
Ejemplo n.º 4
0
def compare_time(setting: SettingMitigator,
                 opt_method: OptMethod,
                 number_l=1) -> tuple:
    """Compare computation times."""

    if opt_method == OptMethod.GRID_SEARCH:
        bound_array = [(0.1, 4.0)]

        start = timer()
        Optimize(setting=setting,
                 number_param=1).grid_search(grid_bounds=bound_array,
                                             delta=0.1)
        stop = timer()
        time_standard = stop - start

        for _ in range(1, number_l + 1):
            bound_array.append((0.9, 4.0))

        start = timer()
        OptimizeMitigator(setting_h_mit=setting, number_param=number_l +
                          1).grid_search(grid_bounds=bound_array, delta=0.1)
        stop = timer()
        time_lyapunov = stop - start

    elif opt_method == OptMethod.PATTERN_SEARCH:
        start_list = [0.5]

        start = timer()
        Optimize(setting=setting,
                 number_param=1).pattern_search(start_list=start_list,
                                                delta=3.0,
                                                delta_min=0.01)
        stop = timer()
        time_standard = stop - start

        start_list = [0.5] + [1.0] * number_l

        start = timer()
        OptimizeMitigator(setting_h_mit=setting, number_param=number_l +
                          1).pattern_search(start_list=start_list,
                                            delta=3.0,
                                            delta_min=0.01)
        stop = timer()
        time_lyapunov = stop - start

    elif opt_method == OptMethod.NELDER_MEAD:
        start_simplex = InitialSimplex(parameters_to_optimize=1).uniform_dist(
            max_theta=1.0)

        start = timer()
        Optimize(setting=setting,
                 number_param=1).nelder_mead(simplex=start_simplex,
                                             sd_min=10**(-2))
        stop = timer()
        time_standard = stop - start

        start_simplex_new = InitialSimplex(parameters_to_optimize=number_l +
                                           1).uniform_dist(max_theta=1.0,
                                                           max_l=2.0)

        start = timer()
        OptimizeMitigator(setting_h_mit=setting, number_param=number_l +
                          1).nelder_mead(simplex=start_simplex_new,
                                         sd_min=10**(-2))
        stop = timer()
        time_lyapunov = stop - start

    elif opt_method == OptMethod.DUAL_ANNEALING:
        bound_array = [(0.1, 4.0)]

        start = timer()
        Optimize(setting=setting,
                 number_param=1).dual_annealing(bound_list=bound_array)
        stop = timer()
        time_standard = stop - start

        for _ in range(1, number_l + 1):
            bound_array.append((0.9, 4.0))

        start = timer()
        OptimizeMitigator(setting_h_mit=setting, number_param=number_l +
                          1).dual_annealing(bound_list=bound_array)
        stop = timer()
        time_lyapunov = stop - start

    else:
        raise NameError(
            f"Optimization parameter {opt_method.name} is infeasible")

    return time_standard, time_lyapunov
Ejemplo n.º 5
0
def compute_improvement(setting: SettingNew,
                        opt_method: OptMethod,
                        number_l=1,
                        print_x=False,
                        show_warn=False) -> tuple:
    """Compare standard_bound with the new Lyapunov bound."""

    if opt_method == OptMethod.GRID_SEARCH:
        theta_bounds = [(0.1, 4.0)]

        standard_bound = Optimize(setting=setting,
                                  print_x=print_x,
                                  show_warn=show_warn).grid_search(
                                      bound_list=theta_bounds, delta=0.1)

        bound_array = theta_bounds[:]
        for _i in range(1, number_l + 1):
            bound_array.append((0.9, 4.0))

        new_bound = OptimizeNew(setting_new=setting,
                                print_x=print_x,
                                show_warn=show_warn).grid_search(
                                    bound_list=bound_array, delta=0.1)

    elif opt_method == OptMethod.PATTERN_SEARCH:
        theta_start = 0.5

        start_list = [theta_start]

        standard_bound = Optimize(setting=setting,
                                  print_x=print_x,
                                  show_warn=show_warn).pattern_search(
                                      start_list=start_list,
                                      delta=3.0,
                                      delta_min=0.01)

        start_list_new = [theta_start] + [1.0] * number_l

        new_bound = OptimizeNew(setting_new=setting,
                                print_x=print_x,
                                show_warn=show_warn).pattern_search(
                                    start_list=start_list_new,
                                    delta=3.0,
                                    delta_min=0.01)

        # This part is there to overcome opt_method issues
        if new_bound > standard_bound:
            new_bound = standard_bound

    elif opt_method == OptMethod.NELDER_MEAD:
        theta_start = 0.5

        start_list = [theta_start]
        start_simplex = InitialSimplex(parameters_to_optimize=1).gao_han(
            start_list=start_list)

        standard_bound = Optimize(setting=setting,
                                  print_x=print_x,
                                  show_warn=show_warn).nelder_mead(
                                      simplex=start_simplex, sd_min=10**(-2))

        start_list_new = [theta_start] + [1.0] * number_l
        start_simplex_new = InitialSimplex(parameters_to_optimize=number_l +
                                           1).gao_han(
                                               start_list=start_list_new)

        new_bound = OptimizeNew(setting_new=setting,
                                print_x=print_x,
                                show_warn=show_warn).nelder_mead(
                                    simplex=start_simplex_new, sd_min=10**(-2))

        # This part is there to overcome opt_method issues
        if new_bound > standard_bound:
            new_bound = standard_bound

    elif opt_method == OptMethod.BASIN_HOPPING:
        theta_start = 0.5

        start_list = [theta_start]

        standard_bound = Optimize(
            setting=setting, print_x=print_x,
            show_warn=show_warn).basin_hopping(start_list=start_list)

        start_list_new = [theta_start] + [1.0] * number_l

        new_bound = OptimizeNew(
            setting_new=setting, print_x=print_x,
            show_warn=show_warn).basin_hopping(start_list=start_list_new)

        # This part is there to overcome opt_method issues
        if new_bound > standard_bound:
            new_bound = standard_bound

    elif opt_method == OptMethod.SIMULATED_ANNEALING:
        simul_anneal_param = SimAnnealParams()
        theta_start = 0.5

        start_list = [theta_start]

        standard_bound = Optimize(setting=setting,
                                  print_x=print_x,
                                  show_warn=show_warn).sim_annealing(
                                      start_list=start_list,
                                      sim_anneal_params=simul_anneal_param)

        start_list_new = [theta_start] + [1.0] * number_l

        new_bound = OptimizeNew(setting_new=setting,
                                print_x=print_x,
                                show_warn=show_warn).sim_annealing(
                                    start_list=start_list_new,
                                    sim_anneal_params=simul_anneal_param)

        # This part is there to overcome opt_method issues
        if new_bound > standard_bound:
            new_bound = standard_bound

    elif opt_method == OptMethod.DIFFERENTIAL_EVOLUTION:
        theta_bounds = [(0.1, 8.0)]

        standard_bound = Optimize(
            setting=setting,
            print_x=print_x).diff_evolution(bound_list=theta_bounds)

        bound_array = theta_bounds[:]
        for _i in range(1, number_l + 1):
            bound_array.append((0.9, 8.0))

        new_bound = OptimizeNew(
            setting_new=setting,
            print_x=print_x).diff_evolution(bound_list=bound_array)

    else:
        raise NameError(
            f"Optimization parameter {opt_method.name} is infeasible")

    # This part is there to overcome opt_method issues
    if new_bound > standard_bound:
        new_bound = standard_bound

    if standard_bound == 0 or new_bound == 0:
        standard_bound = nan
        new_bound = nan

    return standard_bound, new_bound