def _optimize_by_hyperopt_pkg(self, func_caller, max_capital, options):
   """ Optimizes the function using hyperopt package """
   try:
     from hyperopt import fmin, Trials
   except ImportError:
     raise ImportError('hyperopt package is not installed')
   space = options.space
   algo = options.algo
   param_space = self._get_space_params(space, func_caller.domain.bounds)
   func_to_min = _get_func_to_min_from_func_caller(func_caller)
   trials = Trials()
   best = fmin(func_to_min, space=param_space, algo=algo,
               max_evals=int(max_capital), trials=trials)
   history = Namespace()
   trial_data = trials.trials
   total_num_queries = len(trial_data)
   history.query_step_idxs = [i for i in range(total_num_queries)]
   pts_in_hypopt_format = [trial_data[i]['misc']['vals'].values()
                           for i in range(total_num_queries)]
   history.query_points = [flatten_list_of_lists(pt)
                           for pt in pts_in_hypopt_format]
   history.query_send_times = \
                          [float(trial_data[i]['book_time'].isoformat().split(':')[-1]) \
                           for i in range(total_num_queries)]
   history.query_receive_times = \
                       [float(trial_data[i]['refresh_time'].isoformat().split(':')[-1]) \
                        for i in range(total_num_queries)]
   losses = [-loss for loss in trials.losses()]
   history.query_vals = losses
   history = common_final_operations_for_all_external_packages(history, self.func_caller,
                                                               options)
   return best, history
 def _optimise_with_hyperopt(cls, func_caller, max_capital, meth_options):
     """ Optimise with hyperopt. """
     import hyperopt as hypopt
     hypopt_func_to_min, hypopt_space, hypopt_convert_pt_back = \
       get_hypopt_func_and_space(func_caller.eval_single, func_caller.domain, hypopt)
     hypopt_algo = hypopt.tpe.suggest
     hypopt_trials = hypopt.Trials()
     if hasattr(func_caller, 'fidel_cost_func'):
         max_num_evals = int(
             max_capital /
             func_caller.fidel_cost_func(func_caller.fidel_to_opt))
     else:
         max_num_evals = int(max_capital)
     hypopt.fmin(hypopt_func_to_min,
                 space=hypopt_space,
                 algo=hypopt_algo,
                 max_evals=max_num_evals,
                 trials=hypopt_trials)
     trial_data = hypopt_trials.trials
     num_hypopt_queries = len(trial_data)
     history = Namespace()
     hypopt_labels = trial_data[0]['misc']['vals'].keys()
     hypopt_labels.sort()
     pts_in_hypopt_format = [[
         trial_data[i]['misc']['vals'][key] for key in hypopt_labels
     ] for i in range(num_hypopt_queries)]
     history.query_points = [
         hypopt_convert_pt_back(flatten_list_of_lists(pt))
         for pt in pts_in_hypopt_format
     ]
     history.query_send_times = [elem for elem in range(num_hypopt_queries)]
     history.query_receive_times = [
         elem + 1 for elem in range(num_hypopt_queries)
     ]
     query_vals = [-loss for loss in hypopt_trials.losses()]
     history.query_vals = query_vals
     history = common_final_operations_for_all_external_packages(
         history, func_caller, meth_options)
     return history
  def _optimize_by_spearmint_pkg(self, func_caller, max_capital, options):
    """ Optimizes the function using spearmint package """
    exp_dir = options.exp_dir + '_' + time.strftime("%Y%m%d-%H%M%S")
    out_dir = exp_dir + '/output'
    cur_dir = os.getcwd()
    out_file = cur_dir + '/output-' + time.strftime("%Y%m%d-%H%M%S") + '.txt'
    history = Namespace()
    total_num_queries = int(max_capital)

    os.system('cp -rf ' + options.exp_dir + ' ' + exp_dir)
    config_file = exp_dir + '/config.json'
    with open(config_file, 'r') as _file:
      config = json.load(_file, object_pairs_hook=OrderedDict)
    config['experiment-name'] = config['experiment-name'] + '-' + \
                                time.strftime("%Y%m%d-%H%M%S")
    if '-' in self.study_name:
      dim = int(self.study_name.split('-')[-1])
    else:
      dim = len(config['variables'])
    variables = OrderedDict()
    for i in range(int(dim/len(config['variables']))):
      for var in config['variables']:
        variables[var + '_' + str(i)] = config['variables'][var]
    config.pop('variables')
    config['variables'] = variables
    with open(config_file, 'w') as _file:
      json.dump(config, _file, indent=4)

    # Create output directory
    if not os.path.exists(out_dir):
      os.makedirs(out_dir)

    # Start a sub process to run spearmint package
    os.chdir(options.pkg_dir)
    cmd = 'python main.py ' + exp_dir + ' > ' + out_file + ' 2>&1'
    start_time = timeit.default_timer()
    proc = subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid)
    if options.capital_type == 'realtime':
      count = total_num_queries
      timeout = False
      while True:
        curr_time = timeit.default_timer()
        out_files = os.listdir(out_dir)
        if curr_time - start_time >= count and timeout is False:
          count = len(out_files) + 1
          timeout = True
        if len(out_files) > count:
          os.killpg(os.getpgid(proc.pid), signal.SIGINT)
          break
    else:
      while True:
        out_files = os.listdir(out_dir)
        if len(out_files) > total_num_queries:
          os.killpg(os.getpgid(proc.pid), signal.SIGINT)
          break
    os.chdir(cur_dir)

    # Extract info for plotting
    out_files = sorted(os.listdir(out_dir))[0:total_num_queries]
    query_vals = []
    query_pts = []
    pts = []
    for fname in out_files:
      file_path = out_dir + '/' + fname
      if os.path.getsize(file_path) == 0:
        continue
      point_dict, value = _read_spearmint_query_file(file_path)
      point = [point_dict[k] for k in config['variables'].keys()]
      query_vals.append(value)
      query_pts.append(point)
    query_pts = [flatten_list_of_lists(x) for x in query_pts]
    query_pts = [x for x in query_pts if func_caller.raw_domain.is_a_member(x)]
    history.query_step_idxs = [i for i in range(total_num_queries)]
    history.query_points = [func_caller.get_normalised_domain_coords(x)
                            for x in query_pts]
    history.query_send_times = [0] * total_num_queries
    history.query_receive_times = list(range(1, total_num_queries+1))
    history.query_vals = query_vals
    history = common_final_operations_for_all_external_packages(history, self.func_caller,
                                                                options)
    # Delete temporay files and directories
    os.system('rm -rf ' + out_file)
    os.system('rm -rf ' + exp_dir)
    return history
    def _optimise_with_spearmint(cls, func_caller, max_capital, options,
                                 study_name):
        """ Optimise with spearmint. """
        exp_dir = options.exp_dir + '_' + time.strftime("%Y%m%d-%H%M%S")
        out_dir = exp_dir + '/output'
        cur_dir = os.getcwd()
        out_file = cur_dir + '/output-' + time.strftime(
            "%Y%m%d-%H%M%S") + '.txt'
        history = Namespace()
        if hasattr(func_caller, 'fidel_cost_func'):
            total_num_queries = int(
                max_capital /
                func_caller.fidel_cost_func(func_caller.fidel_to_opt))
        else:
            total_num_queries = int(max_capital)
        os.system('cp -rf ' + options.exp_dir + ' ' + exp_dir)
        config_file = exp_dir + '/config.json'
        with open(config_file, 'r') as _file:
            config = json.load(_file, object_pairs_hook=OrderedDict)
        config['experiment-name'] = config['experiment-name'] + '-' + \
                                    time.strftime("%Y%m%d-%H%M%S")
        if '-' in study_name:
            dim = int(study_name.split('-')[-1])
        else:
            dim = len(config['variables'])
        variables = OrderedDict()
        for i in range(int(dim / len(config['variables']))):
            for var in config['variables']:
                variables[var + '_' + str(i)] = config['variables'][var]
        config.pop('variables')
        config['variables'] = variables
        with open(config_file, 'w') as _file:
            json.dump(config, _file, indent=4)

        # Create output directory
        if not os.path.exists(out_dir):
            os.makedirs(out_dir)

        # Start a sub process to run spearmint package
        os.chdir(options.pkg_dir)
        cmd = 'python main.py ' + exp_dir + ' > ' + out_file + ' 2>&1'
        start_time = timeit.default_timer()
        proc = subprocess.Popen(cmd, shell=True, preexec_fn=os.setsid)
        if options.capital_type == 'realtime':
            count = total_num_queries
            timeout = False
            while True:
                curr_time = timeit.default_timer()
                out_files = os.listdir(out_dir)
                if curr_time - start_time >= count and timeout is False:
                    count = len(out_files) + 1
                    timeout = True
                if len(out_files) > count:
                    os.killpg(os.getpgid(proc.pid), signal.SIGINT)
                    break
        else:
            while True:
                out_files = os.listdir(out_dir)
                if len(out_files) > total_num_queries:
                    os.killpg(os.getpgid(proc.pid), signal.SIGINT)
                    break
        os.chdir(cur_dir)

        # Extract info for plotting
        out_files = sorted(os.listdir(out_dir))[0:total_num_queries]
        query_vals = []
        query_true_vals = []
        query_pts = []
        pts = []
        for fname in out_files:
            file_path = out_dir + '/' + fname
            if os.path.getsize(file_path) == 0:
                continue
            point_dict, value, true_value = _read_spearmint_query_file(
                file_path)
            point = [point_dict[k] for k in config['variables'].keys()]
            query_vals.append(value)
            query_pts.append(point)
            if true_value is None:
                query_true_vals.append(value)
            else:
                query_true_vals.append(true_value)

        query_pts = [flatten_list_of_lists(x) for x in query_pts]
        #query_pts = [x for x in query_pts if func_caller.raw_domain.is_a_member(x)]
        history.query_step_idxs = [i for i in range(total_num_queries)]
        #history.query_points = [func_caller.get_normalised_domain_coords(x)
        #                        for x in query_pts]
        history.query_points = [[]] * total_num_queries
        history.query_send_times = [0] * total_num_queries
        history.query_receive_times = list(range(1, total_num_queries + 1))
        history.query_vals = query_vals
        history.query_true_vals = query_true_vals

        # Query eval times
        history.query_eval_times = \
                             [history.query_receive_times[i] - history.query_send_times[i] \
                              for i in range(len(history.query_receive_times))]

        # Current Optimum values
        history.curr_opt_vals = []
        history.curr_opt_points = []
        curr_max = -np.inf
        curr_opt_point = None
        for idx in range(len(history.query_true_vals)):
            qv = history.query_vals[idx]
            if qv >= curr_max:
                curr_max = qv
                curr_opt_point = history.query_points[idx]
            history.curr_opt_vals.append(curr_max)
            history.curr_opt_points.append(curr_opt_point)

        # Current True optimum values and points
        history.curr_true_opt_vals = []
        history.curr_true_opt_points = []
        curr_max = -np.inf
        curr_true_opt_point = None
        for idx, qtv in enumerate(history.query_true_vals):
            if qtv >= curr_max:
                curr_max = qtv
                curr_true_opt_point = history.query_points[idx]
            history.curr_true_opt_vals.append(curr_max)
            history.curr_true_opt_points.append(curr_true_opt_point)

        # Other data
        history.query_worker_ids = [0] * total_num_queries
        history.query_qinfos = [''] * total_num_queries
        history.job_idxs_of_workers = [None] * total_num_queries
        history.num_jobs_per_worker = [total_num_queries]

        # Delete temporay files and directories
        os.system('rm -rf ' + out_file)
        os.system('rm -rf ' + exp_dir)
        return history