Exemplo n.º 1
0
    def test_mean_var_interface_returns_same_as_cpp(self):
        """Test that the /gp/mean_var endpoint does the same thing as the C++ interface."""
        tolerance = 1.0e-11
        for test_case in self.gp_test_environments:
            python_domain, python_gp = test_case
            python_cov, historical_data = python_gp.get_core_data_copy()

            cpp_cov = SquareExponential(python_cov.hyperparameters)
            cpp_gp = GaussianProcess(cpp_cov, historical_data)

            points_to_evaluate = python_domain.generate_uniform_random_points_in_domain(
                10)

            # mean and var from C++
            cpp_mean = cpp_gp.compute_mean_of_points(points_to_evaluate)
            cpp_var = cpp_gp.compute_variance_of_points(points_to_evaluate)

            # mean and var from REST
            json_payload = self._build_json_payload(
                python_domain, python_cov, historical_data,
                points_to_evaluate.tolist())
            resp = self.testapp.post(self.endpoint, json_payload)
            resp_schema = GpMeanVarResponse()
            resp_dict = resp_schema.deserialize(json.loads(resp.body))
            rest_mean = numpy.asarray(resp_dict.get('mean'))
            rest_var = numpy.asarray(resp_dict.get('var'))

            self.assert_vector_within_relative(rest_mean, cpp_mean, tolerance)
            self.assert_vector_within_relative(rest_var, cpp_var, tolerance)
Exemplo n.º 2
0
    def test_mean_var_interface_returns_same_as_cpp(self):
        """Test that the /gp/mean_var endpoint does the same thing as the C++ interface."""
        tolerance = 1.0e-11
        for test_case in self.gp_test_environments:
            python_domain, python_gp = test_case
            python_cov, historical_data = python_gp.get_core_data_copy()

            cpp_cov = SquareExponential(python_cov.hyperparameters)
            cpp_gp = GaussianProcess(cpp_cov, historical_data)

            points_to_evaluate = python_domain.generate_uniform_random_points_in_domain(10)

            # mean and var from C++
            cpp_mean = cpp_gp.compute_mean_of_points(points_to_evaluate)
            cpp_var = cpp_gp.compute_variance_of_points(points_to_evaluate)

            # mean and var from REST
            json_payload = self._build_json_payload(python_domain, python_cov, historical_data, points_to_evaluate.tolist())
            resp = self.testapp.post(self.endpoint, json_payload)
            resp_schema = GpMeanVarResponse()
            resp_dict = resp_schema.deserialize(json.loads(resp.body))
            rest_mean = numpy.asarray(resp_dict.get('mean'))
            rest_var = numpy.asarray(resp_dict.get('var'))

            self.assert_vector_within_relative(rest_mean, cpp_mean, tolerance)
            self.assert_vector_within_relative(rest_var, cpp_var, tolerance)
Exemplo n.º 3
0
    def test_python_and_cpp_return_same_mu_and_gradient(self):
        """Compare mu/grad mu results from Python & C++, checking seeral random points per test case."""
        num_tests_per_case = 4
        mu_tolerance = 3.0e-13
        grad_mu_tolerance = 3.0e-12

        for test_case in self.gp_test_environments:
            domain, python_gp = test_case
            python_cov, historical_data = python_gp.get_core_data_copy()

            cpp_cov = SquareExponential(python_cov.hyperparameters)
            cpp_gp = GaussianProcess(cpp_cov, historical_data)

            for num_to_sample in self.num_to_sample_list:
                for _ in xrange(num_tests_per_case):
                    points_to_sample = domain.generate_uniform_random_points_in_domain(
                        num_to_sample)

                    cpp_mu = cpp_gp.compute_mean_of_points(points_to_sample)
                    python_mu = python_gp.compute_mean_of_points(
                        points_to_sample)
                    self.assert_vector_within_relative(python_mu, cpp_mu,
                                                       mu_tolerance)

                    cpp_grad_mu = cpp_gp.compute_grad_mean_of_points(
                        points_to_sample)
                    python_grad_mu = python_gp.compute_grad_mean_of_points(
                        points_to_sample)
                    self.assert_vector_within_relative(python_grad_mu,
                                                       cpp_grad_mu,
                                                       grad_mu_tolerance)
def moe_compute_best_pt_info(moe_exp, covariance_info, confidence=None,
                             mean_fxn_info=None, sample_pts=None, debug=False):
    if moe_exp.historical_data.num_sampled <= 0: return None, None
    covar = SquareExponential(covariance_info['hyperparameters'])
    cpp_gp = GaussianProcess(covar, moe_exp.historical_data,
                             mean_fxn_info=mean_fxn_info)
    if (sample_pts == None):
        sample_pts = np.array(moe_exp.historical_data.points_sampled)
    #moe_pts_r = np.array(moe_exp.historical_data.points_sampled)
    #moe_pts_d = moe_exp.domain.generate_uniform_random_points_in_domain(50)
    #sample_pts = np.concatenate((moe_pts_r, moe_pts_d), axis=0)
    cpp_mu = cpp_gp.compute_mean_of_points(sample_pts, debug)
    cpp_var = np.diag(cpp_gp.compute_variance_of_points(sample_pts))
    #print "sample_pts ", sample_pts, "\ncpp_mu ", cpp_mu, "\ncpp_var ", cpp_var
    if confidence is None:
        minidx = np.argmin(cpp_mu)
    else:
        upper_conf = scipy.stats.norm.interval(confidence, loc=cpp_mu,
                                               scale=np.sqrt(cpp_var))[1]
        minidx = np.argmin(upper_conf)
        #print " cpp_var ", cpp_var[minidx], " upper_conf ", upper_conf[minidx]
    #print "cpp_mu ", cpp_mu[minidx], " best_moe_pt ", sample_pts[minidx]
    return [sample_pts[minidx], cpp_mu[minidx], cpp_var[minidx]]
Exemplo n.º 5
0
    def test_python_and_cpp_return_same_mu_and_gradient(self):
        """Compare mu/grad mu results from Python & C++, checking seeral random points per test case."""
        num_tests_per_case = 4
        mu_tolerance = 3.0e-13
        grad_mu_tolerance = 3.0e-12

        for test_case in self.gp_test_environments:
            domain, python_gp = test_case
            python_cov, historical_data = python_gp.get_core_data_copy()

            cpp_cov = SquareExponential(python_cov.hyperparameters)
            cpp_gp = GaussianProcess(cpp_cov, historical_data)

            for num_to_sample in self.num_to_sample_list:
                for _ in xrange(num_tests_per_case):
                    points_to_sample = domain.generate_uniform_random_points_in_domain(num_to_sample)

                    cpp_mu = cpp_gp.compute_mean_of_points(points_to_sample)
                    python_mu = python_gp.compute_mean_of_points(points_to_sample)
                    self.assert_vector_within_relative(python_mu, cpp_mu, mu_tolerance)

                    cpp_grad_mu = cpp_gp.compute_grad_mean_of_points(points_to_sample)
                    python_grad_mu = python_gp.compute_grad_mean_of_points(points_to_sample)
                    self.assert_vector_within_relative(python_grad_mu, cpp_grad_mu, grad_mu_tolerance)
Exemplo n.º 6
0
            current_hist_data.points_sampled_value,
            current_hist_data.points_sampled_noise_variance)

        ### Use new hyperparameters -- this requires instantiating a new GP object
        kg_cov_cpp = cppCovariance(hyperparameters=hypers_GP)
        kg_gp_cpp = GaussianProcess(kg_cov_cpp, new_historical_data)

    # ================================================================================================================ #
    #                                    Find s and point that maximize KG/cost                                        #
    # ================================================================================================================ #
    discrete_pts_x = problem.obj_func_min.get_moe_domain(
    ).generate_uniform_x_points_in_domain(num_discretization)
    discrete_pts = np.hstack(
        (problem.obj_func_min.getSearchDomain()[0, -1] * np.ones(
            (num_discretization, 1)), discrete_pts_x))
    all_mu = kg_gp_cpp.compute_mean_of_points(discrete_pts)
    sorted_idx = np.argsort(all_mu)
    all_S_xprime = discrete_pts[
        sorted_idx[-num_x_prime:], :]  # select the last num_x_prime samples

    # ================================================================================================================ #
    #                  For every s, compute a point of maximum KG/cost (here: minimum -1.0 * KG/cost)                  #
    # ================================================================================================================ #
    #>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
    def min_kg_unit(start_pt_x, s, repQL):
        func_to_min = negative_kg_given_x_prime(
            s, all_S_xprime, problem.obj_func_min.noise_and_cost_func,
            kg_gp_cpp, hyperparameters_noise * repQL_noise_rate[repQL], repQL)
        return bfgs_optimization(
            start_pt_x, func_to_min,
            problem.obj_func_min._search_domain)  # approximate gradient