def get_method_options(prob, capital_type):
    """ Returns a dictionary of method options. """
    methods = prob.methods
    all_method_options = {}
    for meth in methods:
        # wrap up
        curr_options = load_options(blackbox_opt_args)
        if meth in ['hyperopt', 'smac', 'gpyopt', 'pdoo']:
            curr_options.redo_evals_for_true_val = True
            if meth == 'hyperopt':
                curr_options.algo = tpe.suggest
                curr_options.space = hp.uniform
        elif meth == 'spearmint':
            e1_file_path = os.path.dirname(os.path.realpath(__file__))
            curr_options.redo_evals_for_true_val = True
            curr_options.noisy = NOISY_EVALS
            curr_options.exp_dir = os.path.join(
                e1_file_path, 'Spearmint/' + prob.study_name.split('-')[0])
            print(curr_options.exp_dir)
            curr_options.pkg_dir = '/zfsauton3/home/kkandasa/projects/Boss/Spearmint/spearmint'
            curr_options.pkg_dir = 'Insert-location-of-spearmint-dir-here'
        elif meth.startswith('dragonfly-mf'):
            euc_mf_gpb_args = get_all_mf_euc_gp_bandit_args()
            curr_options = load_options(euc_mf_gpb_args)
        elif meth == 'dragonfly':
            euc_gpb_args = get_all_euc_gp_bandit_args()
            curr_options = load_options(euc_gpb_args)
        else:
            # Assuming its one of ml, ml+post_sampling etc.
            euc_gpb_args = get_all_euc_gp_bandit_args()
            curr_options = load_options(euc_gpb_args)
        curr_options.capital_type = capital_type
        # Add to all_method_options
        all_method_options[meth] = curr_options
    return all_method_options
示例#2
0
def get_method_options(prob, capital_type):
  """ Returns the method options. """
  methods = prob.methods
  all_method_options = {}
  for meth in methods:
    if meth == 'rand':
      curr_options = load_options(cp_random_optimiser_args)
    elif meth == 'ga':
      curr_options = load_options(cpga_opt_args)
    elif meth.startswith('dragonfly'):
      if meth.startswith('dragonfly-mf'):
        curr_options = load_options(get_all_mf_cp_gp_bandit_args())
      else:
        curr_options = load_options(get_all_cp_gp_bandit_args())
      meth_parts = meth.split('_')
      if len(meth_parts) == 2:
        curr_options.acq_opt_method = meth_parts[-1]
    elif meth in ['hyperopt', 'gpyopt', 'smac']:
      curr_options = load_options(blackbox_opt_args)
      curr_options = Namespace(redo_evals_for_true_val=False)
    else:
      raise ValueError('Unknown method %s.'%(meth))
    curr_options.capital_type = capital_type
    all_method_options[meth] = curr_options
  return all_method_options
def get_method_options(prob, capital_type):
    """ Returns the method options. """
    methods = prob.methods
    all_method_options = {}
    for meth in methods:
        if meth == 'rand':
            curr_options = load_options(cp_random_optimiser_args)
        elif meth == 'ga':
            curr_options = load_options(cpga_opt_args)
        elif meth.startswith('dragonfly'):
            if meth.startswith('dragonfly-mf'):
                curr_options = load_options(get_all_mf_cp_gp_bandit_args())
            else:
                curr_options = load_options(get_all_cp_gp_bandit_args())
            meth_parts = meth.split('_')
            if len(meth_parts) == 2:
                curr_options.acq_opt_method = meth_parts[-1]
        elif meth in ['hyperopt', 'gpyopt', 'smac', 'spearmint']:
            curr_options = load_options(blackbox_opt_args)
            curr_options = Namespace(redo_evals_for_true_val=True)
        else:
            raise ValueError('Unknown method %s.' % (meth))
        # Some additional data we will need for Spearmint
        if meth == 'spearmint':
            # Swap in the following after you download and install Spearmint
            curr_options.exp_dir = '/home/karun/boss/e3_cp/Spearmint/' + \
                                   prob.study_name.split('-')[0]
            curr_options.pkg_dir = '/home/karun/Spearmint/spearmint'
        curr_options.capital_type = capital_type
        all_method_options[meth] = curr_options
    return all_method_options
def build_local_cp_ga_optimiser(func_caller,
                                cp_domain,
                                nn_mutate_op,
                                worker_manager,
                                max_capital,
                                mode='asy',
                                orderings=None,
                                options=None,
                                reporter="silent"):
    """ A GA optimiser on Cartesian product space from the function caller. """

    log = logging.getLogger("LocalGA Optimiser")

    if not isinstance(func_caller, ExperimentCaller):
        func_caller = CPFunctionCaller(func_caller,
                                       cp_domain,
                                       domain_orderings=orderings)
    options = load_options(blackbox_opt_args, partial_options=options)
    options.mode = mode
    options.capital_type = 'return_value'

    start_time = timer()
    log.info(
        f"Starting GA optimisation with capital {max_capital} ({options.capital_type})."
    )
    result = LocalCPGAOptimiser(func_caller,
                                nn_mutate_op,
                                worker_manager,
                                options=options,
                                reporter=reporter).optimise(max_capital)
    end_time = timer()
    log.info(f"GA optimisation finished, took {(end_time - start_time):.2f}s.")
    return result
    def __init__(self,
                 func_caller,
                 nn_morphs_op,
                 worker_manager=None,
                 options=None,
                 reporter=None,
                 ask_tell_mode=False):
        """ Constructor. """
        self.nn_morphs_op = nn_morphs_op

        options = load_options(blackbox_opt_args, partial_options=options)
        assert options.prev_evaluations is not None
        self.pivots = [
            qinfo.point for qinfo in options.prev_evaluations.qinfos
        ]
        options.prev_evaluations = None
        options.init_capital = 0

        super().__init__(func_caller,
                         worker_manager,
                         model=None,
                         options=options,
                         reporter=reporter,
                         ask_tell_mode=ask_tell_mode)

        self.nn_idx = \
            [dom.get_type() for dom in self.domain.list_of_domains].index("neural_network")
        self.single_mutation_ops = \
            [get_default_mutation_op(dom) for dom in self.domain.list_of_domains]
        self.points_to_evaluate = None
 def __init__(self,
              func_caller,
              worker_manager=None,
              single_mutation_ops=None,
              single_crossover_ops=None,
              options=None,
              reporter=None,
              ask_tell_mode=False):
     """ Constructor. """
     options = load_options(ga_opt_args, partial_options=options)
     super(CPGAOptimiser, self).__init__(func_caller,
                                         worker_manager,
                                         mutation_op=self._mutation_op,
                                         options=options,
                                         reporter=reporter,
                                         ask_tell_mode=ask_tell_mode)
     self._set_up_single_mutation_ops(single_mutation_ops)
     self._set_up_single_crossover_ops(single_crossover_ops)
    def __init__(self,
                 func_caller,
                 worker_manager=None,
                 mutation_op=None,
                 options=None,
                 reporter=None,
                 ask_tell_mode=False):
        options = load_options(ga_opt_args, partial_options=options)
        options.init_capital = options.population_size

        super().__init__(func_caller,
                         worker_manager,
                         model=None,
                         options=options,
                         reporter=reporter,
                         ask_tell_mode=ask_tell_mode)
        self.mutation_op = mutation_op
        self.pool = []
示例#8
0
    def prepare_chemist_options(self, chemist_args, domain_config):
        """ Resets default gp_bandit options with chemist arguments """
        dflt_list_of_options = get_all_cp_gp_bandit_args()
        list_of_options = self.reset_default_options(dflt_list_of_options,
                                                     chemist_args)

        for name, value in domain_config.items():
            list_of_options += [{'name': name, 'default': value}]
        options = load_options(list_of_options, reporter=self.reporter)

        if self.mf_strategy is not None:
            options.mf_strategy = self.mf_strategy
        if isinstance(self.worker_manager, RealWorkerManager):
            options.capital_type = 'realtime'
        elif isinstance(self.worker_manager, SyntheticWorkerManager):
            options.capital_type = 'return_value'
        options.get_initial_qinfos = \
            lambda num: get_cp_domain_initial_qinfos(self.func_caller.domain, num)
        return options
def build_aging_cp_ga_optimiser(func_caller,
                                cp_domain,
                                worker_manager,
                                max_capital,
                                mode='asy',
                                orderings=None,
                                single_mutation_ops=None,
                                single_crossover_ops=None,
                                options=None,
                                reporter="silent"):
    """ A GA optimiser on Cartesian product space from the function caller. """

    log = logging.getLogger("AgingGA Optimiser")

    if not isinstance(func_caller, ExperimentCaller):
        func_caller = CPFunctionCaller(func_caller,
                                       cp_domain,
                                       domain_orderings=orderings)
    options = load_options(ga_opt_args, partial_options=options)
    options.mode = mode
    options.capital_type = 'return_value'
    options.population_size = 100
    options.sample_size = 25

    start_time = timer()
    log.info(
        f"Starting GA optimisation with capital {max_capital} ({options.capital_type})."
    )
    result = CPGAOptimiser(func_caller,
                           worker_manager,
                           single_mutation_ops=single_mutation_ops,
                           single_crossover_ops=single_crossover_ops,
                           options=options,
                           reporter=reporter).optimise(max_capital)
    end_time = timer()
    log.info(f"GA optimisation finished, took {(end_time - start_time):.2f}s.")
    return result
示例#10
0
                {'type': 'discrete', 'items': place_pruned_graph_list},
                {'type': 'discrete', 'items': enable_bfloat16_sendrecv_list},
                {'type': 'discrete', 'items': do_common_subexpression_elimination_list},
                {'type': 'discrete_numeric', 'items': max_folded_constant_list},
                {'type': 'discrete', 'items': do_function_inlining_list},
                {'type': 'discrete_numeric', 'items': global_jit_level_list},
		{'type': 'discrete', 'items': optimizer_list}
                ]

dragonfly_args = [ 
	get_option_specs('report_results_every', False, 2, 'Path to the json or pb config file. '),
	get_option_specs('init_capital', False, None, 'Path to the json or pb config file. '),
	get_option_specs('init_capital_frac', False, 0.07, 'Path to the json or pb config file. '),
	get_option_specs('num_init_evals', False, 2, 'Path to the json or pb config file. ')]

options = load_options(dragonfly_args)
config_params = {'domain': domain_vars}
config = load_config(config_params)
max_num_evals = 60*60*12
moo_objectives = [runtime_eval, acc_eval]
pareto_opt_vals, pareto_opt_pts, history = multiobjective_maximise_functions(moo_objectives, config.domain,max_num_evals,capital_type='realtime',config=config,options=options)
f = open("./output.log","w+")
print(pareto_opt_pts,file=f)
print("\n",file=f)
print(pareto_opt_vals,file=f)
print("\n",file=f)
print(history,file=f)



示例#11
0
    def get_config(self, budget):
        """Function to sample a new configuration
        This function is called inside BOHB to query a new configuration

        Parameters:
        -----------
        budget: float
            the budget for which this configuration is scheduled

        Returns
        -------
        config
            return a valid configuration with parameters and budget
        """
        if not self.is_moo:
            return self.get_config_old(budget)

        logger.debug('start sampling a new configuration.')
        if not self.configs:
            print(
                f"[vincent] self.configs is empty! Use a random config instead."
            )
            sample = self.configspace.sample_configuration()
            sample = ConfigSpace.util.deactivate_inactive_hyperparameters(
                configuration_space=self.configspace,
                configuration=sample.get_dictionary()).get_dictionary()
            sample['TRIAL_BUDGET'] = budget
            return sample
        domain_vars = list()
        for name in self.search_space.keys():
            if isinstance(self.search_space[name][0], (float, int)):
                var_type = 'discrete_numeric'
            else:
                var_type = 'discrete'
            domain_var = {'type': var_type, 'items': self.search_space[name]}
            domain_vars.append(domain_var)
        points = list()
        vals = list()
        true_vals = list()

        print(f"[vincent] self.configs:{self.configs} budget:{budget}")
        print(f"{list(self.search_space.keys())}")
        for conf_array in self.configs[0]:
            first, second = [], []
            for i in range(len(conf_array)):
                item = self.search_space[list(
                    self.search_space.keys())[i]][int(conf_array[i])]
                if isinstance(item, (float, int)):
                    second.append(item)
                else:
                    first.append(item)
            points.append([first, second])
        for idx in range(len(self.losses[0])):
            vals.append([-self.losses[0][idx], -self.runtime[0][idx]])
            true_vals.append([-self.losses[0][idx], -self.runtime[0][idx]])

        print(f"[vincent] len of points:{len(points)}")
        if len(points) > 10:
            vals_array = np.array(vals)
            pareto_index = is_pareto_efficient_simple(vals_array)
            p_idx = []
            np_idx = []
            np_items = []
            for j in range(len(pareto_index)):
                if pareto_index[j] == True:
                    p_idx.append(j)
                else:
                    np_idx.append(j)
                    np_items.append(vals[j])
            print(f"[vincent] pareto_index:{p_idx}")
            print(f"[vincent] not pareto_index:{np_idx}")

            if len(p_idx) >= 5:
                tmp_idx = []
                for j in range(5):
                    tmp_idx.append(p_idx[j])
                points = [points[i] for i in tmp_idx]
                vals = [vals[i] for i in tmp_idx]
                true_vals = [true_vals[i] for i in tmp_idx]
            else:
                num_diff = 5 - len(p_idx)
                print(f"[vincent] diff num:{num_diff}")
                print(f"[vincent] search space:{self.search_space}")
                if self.search_space['PREFERENCE'][0] == "accuracy":
                    acc_items = [-item[0] for item in np_items]
                    sort_n_idx = np.argsort(acc_items)
                    for i in range(num_diff):
                        p_idx.append(sort_n_idx[i])
                    print(f"[vincent] final pareto_index:{p_idx}")
                    points = [points[i] for i in p_idx]
                    vals = [vals[i] for i in p_idx]
                    true_vals = [true_vals[i] for i in p_idx]
                elif self.search_space['PREFERENCE'][0] == "runtime":
                    time_items = [-item[1] for item in np_items]
                    sort_n_idx = np.argsort(time_items)
                    for i in range(num_diff):
                        p_idx.append(sort_n_idx[i])
                    print(f"[vincent] final pareto_index:{p_idx}")
                    points = [points[i] for i in p_idx]
                    vals = [vals[i] for i in p_idx]
                    true_vals = [true_vals[i] for i in p_idx]

            # import random
            # idx_list = random.sample(range(len(points)), 10)
            # print(f"[vincent] random selections list idx_list:{idx_list}")
            # points = [points[i] for i in idx_list]
            # vals = [vals[i] for i in idx_list]
            # true_vals = [true_vals[i] for i in idx_list]

        ## vals = [[acc,-spent time],[acc,-spent time]]
        ## load from memory
        previous_eval = {'qinfos': []}
        for i in range(len(points)):
            tmp = Namespace(point=points[i],
                            val=vals[i],
                            true_val=true_vals[i])
            previous_eval['qinfos'].append(tmp)
        p = Namespace(**previous_eval)
        load_args = [
            get_option_specs('init_capital', False, 1,
                             'Path to the json or pb config file. '),
            get_option_specs(
                'init_capital_frac', False, None,
                'The fraction of the total capital to be used for initialisation.'
            ),
            get_option_specs(
                'num_init_evals', False, 1,
                'The number of evaluations for initialisation. If <0, will use default.'
            ),
            get_option_specs('prev_evaluations', False, p,
                             'Data for any previous evaluations.')
        ]
        options = load_options(load_args)
        config_params = {'domain': domain_vars}
        config = load_config(config_params)
        max_num_evals = 1
        self.dragonfly_config = None

        def fake_func(x):
            if not self.dragonfly_config:
                self.dragonfly_config = x
                print(
                    f"[vincent] x is assigned to self.dragonfly_config:{self.dragonfly_config}"
                )
            return 0

        moo_objectives = [fake_func, fake_func]
        _, _, _ = multiobjective_maximise_functions(moo_objectives,
                                                    config.domain,
                                                    max_num_evals,
                                                    capital_type='num_evals',
                                                    config=config,
                                                    options=options)
        print(
            f"[vincent] self.dragonfly_config after dragonfly:{self.dragonfly_config}"
        )

        ## load prev from the file
        # data_to_save = {'points': points,
        #                 'vals': vals,
        #                 'true_vals': true_vals}
        # print(f"[vincent] data_to_save:{data_to_save}")
        # temp_save_path = './dragonfly.saved'

        # with open(temp_save_path, 'wb') as save_file_handle:
        #     pickle.dump(data_to_save, save_file_handle)

        # load_args = [
        #     get_option_specs('progress_load_from', False, temp_save_path,
        #     'Load progress (from possibly a previous run) from this file.')
        # ]
        # options = load_options(load_args)
        # config_params = {'domain': domain_vars}
        # config = load_config(config_params)
        # max_num_evals = 1
        # self.dragonfly_config = None

        # def fake_func(x):
        #     if not self.dragonfly_config:
        #         self.dragonfly_config = x
        #         print(f"[vincent] x is assigned to self.dragonfly_config:{self.dragonfly_config}")
        #     return 0

        # moo_objectives = [fake_func, fake_func]
        # _, _, _ = multiobjective_maximise_functions(moo_objectives, config.domain,max_num_evals,capital_type='num_evals',config=config,options=options)
        # print(f"[vincent] self.dragonfly_config after dragonfly:{self.dragonfly_config}")
        # import os
        # if os.path.exists(temp_save_path):
        #     os.remove(temp_save_path)

        if not self.dragonfly_config:
            print(
                f"[vincent] Get empty config from dragonfly! Use a random config instead."
            )
            sample = self.configspace.sample_configuration()
        else:
            sample = dict()
            df_idx = 0
            for name in self.search_space.keys():
                sample[name] = self.dragonfly_config[df_idx]
                df_idx += 1

        logger.debug('done sampling a new configuration.')
        sample['TRIAL_BUDGET'] = budget

        print(f'[vincent] sample from get_config:{sample}')

        return sample
示例#12
0
def main():
    """ Main function. """
    options = load_options(get_command_line_args(), cmd_line=True)
    # Load domain and objective
    config = load_config_file(options.config)
    if hasattr(config, 'fidel_space'):
        is_mf = True
    else:
        is_mf = False

    # Load module
    expt_dir = os.path.dirname(
        os.path.abspath(os.path.realpath(options.config)))
    if not os.path.exists(expt_dir):
        raise ValueError("Experiment directory does not exist.")
    sys.path.append(expt_dir)
    obj_module = import_module(config.name, expt_dir)
    sys.path.remove(expt_dir)

    # Set capital
    if options.max_capital < 0:
        raise ValueError(
            'max_capital (time or number of evaluations) must be positive.')

    # Call optimiser
    _print_prefix = 'Maximising' if options.max_or_min == 'max' else 'Minimising'
    call_to_optimise = {
        'single': {
            'max': maximise_function,
            'min': minimise_function
        },
        'single_mf': {
            'max': maximise_multifidelity_function,
            'min': minimise_multifidelity_function
        },
        'multi': {
            'max': multiobjective_maximise_functions,
            'min': multiobjective_minimise_functions
        },
    }
    if not options.is_multi_objective:
        if is_mf:
            print(
                '%s multi-fidelity function on\n Fidelity-Space: %s.\n Domain: %s.\n'
                % (_print_prefix, config.fidel_space, config.domain))
            opt_val, opt_pt, history = call_to_optimise['single_mf'][
                options.max_or_min](obj_module.objective,
                                    fidel_space=None,
                                    domain=None,
                                    fidel_to_opt=config.fidel_to_opt,
                                    fidel_cost_func=obj_module.cost,
                                    max_capital=options.max_capital,
                                    capital_type=options.capital_type,
                                    opt_method=options.opt_method,
                                    config=config,
                                    options=options,
                                    reporter=options.report_progress)
        else:
            print('%s function on Domain: %s.\n' %
                  (_print_prefix, config.domain))
            opt_val, opt_pt, history = call_to_optimise['single'][
                options.max_or_min](obj_module.objective,
                                    domain=None,
                                    max_capital=options.max_capital,
                                    capital_type=options.capital_type,
                                    opt_method=options.opt_method,
                                    config=config,
                                    options=options,
                                    reporter=options.report_progress)
        print('Optimum Value in %d evals: %0.4f' %
              (len(history.curr_opt_points), opt_val))
        print('Optimum Point: %s.' % (opt_pt))
    else:
        if is_mf:
            raise ValueError(
                'Multi-objective multi-fidelity optimisation has not been ' +
                'implemented yet.')
        else:
            # Check format of function caller
            if hasattr(obj_module, 'objectives'):
                objectives_to_pass = obj_module.objectives
                num_objectives = len(objectives_to_pass)
            else:
                num_objectives = obj_module.num_objectives
                objectives_to_pass = (obj_module.compute_objectives,
                                      obj_module.num_objectives)
            print('%s %d multiobjective functions on Domain: %s.\n' %
                  (_print_prefix, num_objectives, config.domain))
            print(objectives_to_pass)
            pareto_values, pareto_points, history = \
              call_to_optimise['multi'][options.max_or_min](objectives_to_pass,
              domain=None, max_capital=options.max_capital, capital_type=options.capital_type,
              opt_method=options.opt_method, config=config, options=options,
              reporter=options.report_progress)
        num_pareto_points = len(pareto_points)
        print('Found %d Pareto Points: %s.' %
              (num_pareto_points, pareto_points))
        print('Corresponding Pareto Values: %s.' % (pareto_values))
示例#13
0
def get_optimiser_options():
    options = load_options(get_all_mf_cp_gp_bandit_args())
    options.acq = "custom_mobo_ts"
    return options
示例#14
0
def main():
    """ Main function. """
    # First load arguments
    all_args = dragonfly_args + get_all_euc_gp_bandit_args() + get_all_cp_gp_bandit_args() \
               + get_all_mf_euc_gp_bandit_args() + get_all_mf_cp_gp_bandit_args() \
               + get_all_euc_moo_gp_bandit_args() + get_all_cp_moo_gp_bandit_args()
    all_args = get_unique_list_of_option_args(all_args)
    options = load_options(all_args, cmd_line=True)

    # Load domain and objective
    config = load_config_file(options.config)
    if hasattr(config, 'fidel_space'):
        is_mf = True
    else:
        is_mf = False
    expt_dir = os.path.dirname(
        os.path.abspath(os.path.realpath(options.config)))
    if not os.path.exists(expt_dir):
        raise ValueError("Experiment directory does not exist.")
    objective_file_name = config.name
    obj_module = imp.load_source(
        objective_file_name, os.path.join(expt_dir,
                                          objective_file_name + '.py'))

    # Set capital
    options.capital_type = 'return_value'
    if options.budget < 0:
        budget = options.max_capital
    else:
        budget = options.budget
    if budget < 0:
        raise ValueError(
            'Specify the budget via argument budget or max_capital.')
    options.max_capital = budget

    # Call optimiser
    _print_prefix = 'Maximising' if options.max_or_min == 'max' else 'Minimising'
    call_to_optimise = {
        'single': {
            'max': maximise_function,
            'min': minimise_function
        },
        'single_mf': {
            'max': maximise_multifidelity_function,
            'min': minimise_multifidelity_function
        },
        'multi': {
            'max': multiobjective_maximise_functions,
            'min': multiobjective_minimise_functions
        },
    }
    if not options.is_multi_objective:
        if is_mf:
            print('%s function on fidel_space: %s, domain %s.' %
                  (_print_prefix, config.fidel_space, config.domain))
            opt_val, opt_pt, history = call_to_optimise['single_mf'][
                options.max_or_min](obj_module.objective,
                                    domain=None,
                                    fidel_space=None,
                                    fidel_to_opt=config.fidel_to_opt,
                                    fidel_cost_func=obj_module.cost,
                                    max_capital=options.max_capital,
                                    config=config,
                                    options=options)
        else:
            print('%s function on domain %s.' % (_print_prefix, config.domain))
            opt_val, opt_pt, history = call_to_optimise['single'][
                options.max_or_min](obj_module.objective,
                                    domain=None,
                                    max_capital=options.max_capital,
                                    config=config,
                                    options=options)
        print('Optimum Value in %d evals: %0.4f' %
              (len(history.curr_opt_points), opt_val))
        print('Optimum Point: %s.' % (opt_pt))
    else:
        if is_mf:
            raise ValueError(
                'Multi-objective multi-fidelity optimisation has not been ' +
                'implemented yet.')
        else:
            print(
                '%s multiobjective functions on domain %s with %d functions.' %
                (_print_prefix, config.domain, len(obj_module.objectives)))
            pareto_values, pareto_points, history = \
              call_to_optimise['multi'][options.max_or_min](obj_module.objectives,
              domain=None, max_capital=options.max_capital, config=config, options=options)
        num_pareto_points = len(pareto_points)
        print('Found %d Pareto Points: %s.' %
              (num_pareto_points, pareto_points))
        print('Corresponding Pareto Values: %s.' % (pareto_values))