def gen_sample_from_qei(gp,search_domain,sgd_params,num_samples, num_mc=1e4, lhc_iter=2e4): qEI = ExpectedImprovement(gaussian_process=gp, num_mc_iterations=int(num_mc)) optimizer = cGDOpt(search_domain, qEI, sgd_params, int(lhc_iter)) points_to_sample = meio(optimizer, None, num_samples, use_gpu=False, which_gpu=0, max_num_threads=8) qEI.set_current_point(points_to_sample[0]) return points_to_sample, qEI.compute_expected_improvement()
def gp_ei_view(self): """Endpoint for gp_ei POST requests. .. http:post:: /gp/ei Calculates the Expected Improvement (EI) of a set of points, given historical data. :input: :class:`moe.views.schemas.rest.GpEiRequest` :output: :class:`moe.views.schemas.rest.GpEiResponse` :status 200: returns a response :status 500: server error """ params = self.get_params_from_request() # TODO(GH-99): Change REST interface to give points_to_evaluate with shape # (num_to_evaluate, num_to_sample, dim) # Here we assume the shape is (num_to_evaluate, dim) so we insert an axis, making num_to_sample = 1. points_to_evaluate = numpy.array(params.get('points_to_evaluate'))[:, numpy.newaxis, :] points_being_sampled = numpy.array(params.get('points_being_sampled')) num_mc_iterations = params.get('mc_iterations') max_num_threads = params.get('max_num_threads') gaussian_process = _make_gp_from_params(params) expected_improvement_evaluator = ExpectedImprovement( gaussian_process, points_being_sampled=points_being_sampled, num_mc_iterations=num_mc_iterations, ) with timing_context(EI_COMPUTATION_TIMING_LABEL): expected_improvement = expected_improvement_evaluator.evaluate_at_point_list( points_to_evaluate, max_num_threads=max_num_threads, ) return self.form_response({ 'endpoint': self._route_name, 'expected_improvement': expected_improvement.tolist(), })
def test_interface_returns_same_as_cpp(self): """Test that the /gp/ei endpoint does the same thing as the C++ interface.""" tolerance = 1.0e-11 for test_case in self.gp_test_environments: python_domain, python_gp = test_case python_cov, historical_data = python_gp.get_core_data_copy() cpp_cov = SquareExponential(python_cov.hyperparameters) cpp_gp = GaussianProcess(cpp_cov, historical_data) points_to_evaluate = python_domain.generate_uniform_random_points_in_domain( 10) # EI from C++ expected_improvement_evaluator = ExpectedImprovement( cpp_gp, None, ) # TODO(GH-99): Change test case to have the right shape: # (num_to_evaluate, num_to_sample, dim) # Here we assume the shape is (num_to_evaluate, dim) so we insert an axis, making num_to_sample = 1. # Also might be worth testing more num_to_sample values (will require manipulating C++ RNG state). cpp_expected_improvement = expected_improvement_evaluator.evaluate_at_point_list( points_to_evaluate[:, numpy.newaxis, :], ) # EI from REST json_payload = self._build_json_payload( python_domain, python_cov, historical_data, points_to_evaluate.tolist()) resp = self.testapp.post(self.endpoint, json_payload) resp_schema = GpEiResponse() resp_dict = resp_schema.deserialize(json.loads(resp.body)) rest_expected_improvement = numpy.asarray( resp_dict.get('expected_improvement')) self.assert_vector_within_relative( rest_expected_improvement, cpp_expected_improvement, tolerance, )
def compute_next_points_to_sample_response(self, params, optimizer_method_name, route_name, *args, **kwargs): """Compute the next points to sample (and their expected improvement) using optimizer_method_name from params in the request. .. Warning:: Attempting to find ``num_to_sample`` optimal points with ``num_sampled < num_to_sample`` historical points sampled can cause matrix issues under some conditions. Try requesting ``num_to_sample < num_sampled`` points for better performance. To bootstrap more points try sampling at random, or from a grid. :param request_params: the deserialized REST request, containing ei_optimizer_parameters and gp_historical_info :type request_params: a deserialized self.request_schema object as a dict :param optimizer_method_name: the optimization method to use :type optimizer_method_name: string in :const:`moe.views.constant.NEXT_POINTS_OPTIMIZER_METHOD_NAMES` :param route_name: name of the route being called :type route_name: string in :const:`moe.views.constant.ALL_REST_ROUTES_ROUTE_NAME_TO_ENDPOINT` :param ``*args``: extra args to be passed to optimization method :param ``**kwargs``: extra kwargs to be passed to optimization method """ points_being_sampled = numpy.array(params.get('points_being_sampled')) num_to_sample = params.get('num_to_sample') num_mc_iterations = params.get('mc_iterations') max_num_threads = params.get('max_num_threads') gaussian_process = _make_gp_from_params(params) ei_opt_status = {} # TODO(GH-89): Make the optimal_learning library handle this case 'organically' with # reasonable default behavior and remove hacks like this one. if gaussian_process.num_sampled == 0: # If there is no initial data we bootstrap with random points py_domain = _make_domain_from_params(params, python_version=True) next_points = py_domain.generate_uniform_random_points_in_domain( num_to_sample) ei_opt_status['found_update'] = True expected_improvement_evaluator = PythonExpectedImprovement( gaussian_process, points_being_sampled=points_being_sampled, num_mc_iterations=num_mc_iterations, ) else: # Calculate the next best points to sample given the historical data optimizer_class, optimizer_parameters, num_random_samples = _make_optimizer_parameters_from_params( params) if optimizer_class == python_optimization.LBFGSBOptimizer: domain = RepeatedDomain( num_to_sample, _make_domain_from_params(params, python_version=True)) expected_improvement_evaluator = PythonExpectedImprovement( gaussian_process, points_being_sampled=points_being_sampled, num_mc_iterations=num_mc_iterations, mvndst_parameters=_make_mvndst_parameters_from_params( params)) opt_method = getattr( moe.optimal_learning.python.python_version. expected_improvement, optimizer_method_name) else: domain = _make_domain_from_params(params, python_version=False) expected_improvement_evaluator = ExpectedImprovement( gaussian_process, points_being_sampled=points_being_sampled, num_mc_iterations=num_mc_iterations, ) opt_method = getattr( moe.optimal_learning.python.cpp_wrappers. expected_improvement, optimizer_method_name) expected_improvement_optimizer = optimizer_class( domain, expected_improvement_evaluator, optimizer_parameters, num_random_samples=num_random_samples, ) with timing_context(EPI_OPTIMIZATION_TIMING_LABEL): next_points = opt_method( expected_improvement_optimizer, params.get('optimizer_info') ['num_multistarts'], # optimizer_parameters.num_multistarts, num_to_sample, max_num_threads=max_num_threads, status=ei_opt_status, *args, **kwargs) # TODO(GH-285): Use analytic q-EI here # TODO(GH-314): Need to resolve poential issue with NaNs before using q-EI here # It may be sufficient to check found_update == False in ei_opt_status # and then use q-EI, else set EI = 0. expected_improvement_evaluator.current_point = next_points # The C++ may fail to compute EI with some ``next_points`` inputs (e.g., # ``points_to_sample`` and ``points_begin_sampled`` are too close # together or too close to ``points_sampled``). We catch the exception when this happens # and attempt a more numerically robust option. try: expected_improvement = expected_improvement_evaluator.compute_expected_improvement( ) except Exception as exception: self.log.info( 'EI computation failed, probably b/c GP-variance matrix is singular. Error: {0:s}' .format(exception)) # ``_compute_expected_improvement_monte_carlo`` in # :class:`moe.optimal_learning.python.python_version.expected_improvement.ExpectedImprovement` # has a more reliable (but very expensive) way to deal with singular variance matrices. python_ei_eval = PythonExpectedImprovement( expected_improvement_evaluator._gaussian_process, points_to_sample=next_points, points_being_sampled=points_being_sampled, num_mc_iterations=num_mc_iterations, ) expected_improvement = python_ei_eval.compute_expected_improvement( force_monte_carlo=True) return self.form_response({ 'endpoint': route_name, 'points_to_sample': next_points.tolist(), 'status': { 'expected_improvement': expected_improvement, 'optimizer_success': ei_opt_status, }, })