Exemplo n.º 1
0
    def test_branin_2D_mixed(self):
        n_iter = 20
        fun = Branin(ndim=2)
        xtypes = [INT, FLOAT]
        xlimits = fun.xlimits
        criterion = "EI"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        s = KRG(print_global=False)
        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xtypes=xtypes,
            xlimits=xlimits,
            surrogate=s,
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
        # 3 optimal points possible: [-pi, 12.275], [pi, 2.275], [9.42478, 2.475]
        self.assertTrue(
            np.allclose([[-3, 12.275]], x_opt, rtol=0.2)
            or np.allclose([[3, 2.275]], x_opt, rtol=0.2)
            or np.allclose([[9, 2.475]], x_opt, rtol=0.2))
        self.assertAlmostEqual(0.494, float(y_opt), delta=1)
Exemplo n.º 2
0
    def test_mfk(self):
        self.problems = ["exp", "tanh", "cos"]

        for fname in self.problems:
            prob = TensorProduct(ndim=self.ndim, func=fname)
            sampling = FullFactorial(xlimits=prob.xlimits, clip=True)

            np.random.seed(0)
            xt = sampling(self.nt)
            yt = prob(xt)
            for i in range(self.ndim):
                yt = np.concatenate((yt, prob(xt, kx=i)), axis=1)

            y_lf = 2 * prob(xt) + 2
            x_lf = deepcopy(xt)
            np.random.seed(1)
            xe = sampling(self.ne)
            ye = prob(xe)

            sm = MFK(theta0=[1e-2] * self.ndim)
            if sm.options.is_declared("xlimits"):
                sm.options["xlimits"] = prob.xlimits
            sm.options["print_global"] = False

            sm.set_training_values(xt, yt[:, 0])
            sm.set_training_values(x_lf, y_lf[:, 0], name=0)

            with Silence():
                sm.train()

            t_error = compute_rms_error(sm)
            e_error = compute_rms_error(sm, xe, ye)

            self.assert_error(t_error, 0.0, 1)
            self.assert_error(e_error, 0.0, 1)
Exemplo n.º 3
0
    def test_ff_rectify(self):
        xlimits = np.array([[0.0, 4.0], [0.0, 3.0]])
        sampling = FullFactorial(xlimits=xlimits, clip=True)

        num = 50
        x = sampling(num)
        self.assertEqual((56, 2), x.shape)
Exemplo n.º 4
0
    def test_branin_2D_parallel(self):
        n_iter = 10
        fun = Branin(ndim=2)
        n_parallel = 5
        xlimits = fun.xlimits
        criterion = "EI"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xlimits=xlimits,
            n_parallel=n_parallel,
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)

        # 3 optimal points possible: [-pi, 12.275], [pi, 2.275], [9.42478, 2.475]
        print(x_opt)
        self.assertTrue(
            np.allclose([[-3.14, 12.275]], x_opt, rtol=0.5)
            or np.allclose([[3.14, 2.275]], x_opt, rtol=0.5)
            or np.allclose([[9.42, 2.475]], x_opt, rtol=0.5))
        print("Branin=", x_opt)
        self.assertAlmostEqual(0.39, float(y_opt), delta=1)
Exemplo n.º 5
0
    def run_moe_example_1d():
        import numpy as np
        from smt.applications import MOE
        from smt.sampling_methods import FullFactorial
        import matplotlib.pyplot as plt

        ndim = 1
        nt = 35

        def function_test_1d(x):
            import numpy as np  # Note: only required by SMT doc testing toolchain

            x = np.reshape(x, (-1,))
            y = np.zeros(x.shape)
            y[x < 0.4] = x[x < 0.4] ** 2
            y[(x >= 0.4) & (x < 0.8)] = 3 * x[(x >= 0.4) & (x < 0.8)] + 1
            y[x >= 0.8] = np.sin(10 * x[x >= 0.8])
            return y.reshape((-1, 1))

        x = np.linspace(0, 1, 100)
        ytrue = function_test_1d(x)

        # Training data
        sampling = FullFactorial(xlimits=np.array([[0, 1]]), clip=True)
        np.random.seed(0)
        xt = sampling(nt)
        yt = function_test_1d(xt)

        # Mixture of experts
        print("MOE Experts: ", MOE.AVAILABLE_EXPERTS)

        # MOE1: Find the best surrogate model on the whole domain
        moe1 = MOE(n_clusters=1)
        print("MOE1 enabled experts: ", moe1.enabled_experts)
        moe1.set_training_values(xt, yt)
        moe1.train()
        y_moe1 = moe1.predict_values(x)

        # MOE2: Set nb of cluster with just KRG, LS and IDW surrogate models
        moe2 = MOE(smooth_recombination=False, n_clusters=3, allow=["KRG", "LS", "IDW"])
        print("MOE2 enabled experts: ", moe2.enabled_experts)
        moe2.set_training_values(xt, yt)
        moe2.train()
        y_moe2 = moe2.predict_values(x)

        fig, axs = plt.subplots(1)
        axs.plot(x, ytrue, ".", color="black")
        axs.plot(x, y_moe1)
        axs.plot(x, y_moe2)
        axs.set_xlabel("x")
        axs.set_ylabel("y")
        axs.legend(["Training data", "MOE 1 Prediction", "MOE 2 Prediction"])

        plt.show()
Exemplo n.º 6
0
    def test_rosenbrock_2D(self):
        n_iter = 30
        fun = Rosenbrock(ndim=2)
        xlimits = fun.xlimits
        criterion = "UCB"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        ego = EGO(xdoe=xdoe, n_iter=n_iter, criterion=criterion, xlimits=xlimits)

        x_opt, y_opt, _, _, _, _, _ = ego.optimize(fun=fun)

        self.assertTrue(np.allclose([[1, 1]], x_opt, rtol=0.5))
        self.assertAlmostEqual(0.0, float(y_opt), delta=1)
Exemplo n.º 7
0
    def run_test(self):
        method_name = inspect.stack()[1][3]
        pname = method_name.split("_")[1]
        sname = method_name.split("_")[2]

        prob = self.problems[pname]
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)

        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)
        print(prob(xt, kx=0).shape)
        for i in range(self.ndim):
            yt = np.concatenate((yt, prob(xt, kx=i)), axis=1)

        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        sm0 = self.sms[sname]

        sm = sm0.__class__()
        sm.options = sm0.options.clone()
        if sm.options.is_declared("xlimits"):
            sm.options["xlimits"] = prob.xlimits
        sm.options["print_global"] = False

        if sname in ["KPLS", "KRG", "KPLSK", "GEKPLS"]:
            optname = method_name.split("_")[3]
            sm.options["hyper_opt"] = optname

        sm.set_training_values(xt, yt[:, 0])
        if sm.supports["training_derivatives"]:
            for i in range(self.ndim):
                sm.set_training_derivatives(xt, yt[:, i + 1], i)

        with Silence():
            sm.train()

        t_error = compute_rms_error(sm)
        e_error = compute_rms_error(sm, xe, ye)

        if sm.supports["variances"]:
            sm.predict_variances(xe)

        if pname == "cos":
            self.assertLessEqual(e_error, self.e_errors[sname] + 1.5)
        else:
            self.assertLessEqual(e_error, self.e_errors[sname] + 1e-4)
        self.assertLessEqual(t_error, self.t_errors[sname] + 1e-4)
Exemplo n.º 8
0
    def run_test(self):
        method_name = inspect.stack()[1][3]
        sname = method_name.split("_")[1]

        prob = self.problem
        sampling = FullFactorial(xlimits=prob.xlimits, clip=False)

        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)
        # dyt = {}
        # for kx in range(prob.xlimits.shape[0]):
        #     dyt[kx] = prob(xt, kx=kx)

        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        sm0 = self.sms[sname]

        sm = sm0.__class__()
        sm.options = sm0.options.clone()
        if sm.options.is_declared("xlimits"):
            sm.options["xlimits"] = prob.xlimits
        sm.options["print_global"] = False

        sm.set_training_values(xt, yt)

        sm.update_training_values(yt)
        with Silence():
            sm.train()
        ye0 = sm.predict_values(xe)

        h = 1e-3
        jac_fd = np.zeros((self.ne, self.nt))
        for ind in range(self.nt):
            sm.update_training_values(yt + h * np.eye(self.nt, M=1, k=-ind))
            with Silence():
                sm.train()
            ye = sm.predict_values(xe)

            jac_fd[:, ind] = (ye - ye0)[:, 0] / h

        jac_fd = jac_fd.reshape((self.ne, self.nt, 1))
        jac_an = sm.predict_output_derivatives(xe)[None]

        if print_output:
            print(np.linalg.norm(jac_fd - jac_an))

        self.assert_error(jac_fd, jac_an, rtol=5e-2)
Exemplo n.º 9
0
 def test_find_best_point(self):
     fun = TestEGO.function_test_1d
     xlimits = np.array([[0.0, 25.0]])
     xdoe = FullFactorial(xlimits=xlimits)(3)
     ydoe = fun(xdoe)
     ego = EGO(xdoe=xdoe,
               ydoe=ydoe,
               n_iter=1,
               criterion="UCB",
               xlimits=xlimits,
               n_start=30)
     _, _, _, _, _ = ego.optimize(fun=fun)
     x, _ = ego._find_best_point(xdoe, ydoe)
     self.assertAlmostEqual(6.5, float(x), delta=1)
Exemplo n.º 10
0
    def test_ydoe_option(self):
        n_iter = 10
        fun = Branin(ndim=2)
        xlimits = fun.xlimits
        criterion = "UCB"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        ydoe = fun(xdoe)

        ego = EGO(
            xdoe=xdoe, ydoe=ydoe, n_iter=n_iter, criterion=criterion, xlimits=xlimits
        )
        _, y_opt, _, _, _, _, y_doe = ego.optimize(fun=fun)

        self.assertAlmostEqual(0.39, float(y_opt), delta=1)
Exemplo n.º 11
0
    def test_rosenbrock_2D(self):
        n_iter = 40
        fun = Rosenbrock(ndim=2)
        xlimits = fun.xlimits
        criterion = "UCB"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xlimits=xlimits,
            random_state=0,  # change seed as it fails on travis macos py3.7
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
        self.assertTrue(np.allclose([[1, 1]], x_opt, rtol=0.5))
        self.assertAlmostEqual(0.0, float(y_opt), delta=1)
Exemplo n.º 12
0
    def test_full_factorial(self):
        import numpy as np
        import matplotlib.pyplot as plt

        from smt.sampling_methods import FullFactorial

        xlimits = np.array([[0.0, 4.0], [0.0, 3.0]])
        sampling = FullFactorial(xlimits=xlimits)

        num = 50
        x = sampling(num)

        print(x.shape)

        plt.plot(x[:, 0], x[:, 1], "o")
        plt.xlabel("x")
        plt.ylabel("y")
        plt.show()
Exemplo n.º 13
0
    def test_branin_2d_200(self):
        self.ndim = 2
        self.nt = 200
        self.ne = 200

        prob = Branin(ndim=self.ndim)

        # training data
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)
        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)

        # mixture of experts
        moe = MOE(n_clusters=5)
        moe.set_training_values(xt, yt)
        moe.options["heaviside_optimization"] = True
        moe.train()

        # validation data
        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        rms_error = compute_rms_error(moe, xe, ye)
        self.assert_error(rms_error, 0.0, 1e-1)

        if TestMOE.plot:
            import matplotlib.pyplot as plt
            from mpl_toolkits.mplot3d import Axes3D

            y = moe.analyse_results(x=xe, operation="predict_values")
            plt.figure(1)
            plt.plot(ye, ye, "-.")
            plt.plot(ye, y, ".")
            plt.xlabel(r"$y$ actual")
            plt.ylabel(r"$y$ prediction")

            fig = plt.figure(2)
            ax = fig.add_subplot(111, projection="3d")
            ax.scatter(xt[:, 0], xt[:, 1], yt)
            plt.title("Branin function")
            plt.show()
Exemplo n.º 14
0
    def test_norm1_2d_200(self):
        self.ndim = 2
        self.nt = 200
        self.ne = 200

        prob = LpNorm(ndim=self.ndim)

        # training data
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)
        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)

        # mixture of experts
        moe = MOE(smooth_recombination=False, n_clusters=5)
        moe.set_training_values(xt, yt)
        moe.train()

        # validation data
        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        rms_error = compute_rms_error(moe, xe, ye)
        self.assert_error(rms_error, 0., 1e-1)

        if TestMOE.plot:
            import matplotlib.pyplot as plt
            from mpl_toolkits.mplot3d import Axes3D

            y = moe.predict_values(xe)
            plt.figure(1)
            plt.plot(ye, ye, '-.')
            plt.plot(ye, y, '.')
            plt.xlabel(r'$y$ actual')
            plt.ylabel(r'$y$ prediction')

            fig = plt.figure(2)
            ax = fig.add_subplot(111, projection='3d')
            ax.scatter(xt[:, 0], xt[:, 1], yt)
            plt.title('L1 Norm')
            plt.show()
Exemplo n.º 15
0
 def test_qei_criterion_default(self):
     fun = TestEGO.function_test_1d
     xlimits = np.array([[0.0, 25.0]])
     xdoe = FullFactorial(xlimits=xlimits)(3)
     ydoe = fun(xdoe)
     ego = EGO(xdoe=xdoe,
               ydoe=ydoe,
               n_iter=1,
               criterion="SBO",
               xlimits=xlimits,
               n_start=30)
     ego._setup_optimizer(fun)
     ego.gpr.set_training_values(xdoe, ydoe)
     ego.gpr.train()
     xtest = np.array([[10.0]])
     # test that default virtual point should be equal to 3sigma lower bound kriging interval
     expected = float(
         ego.gpr.predict_values(xtest) -
         3 * np.sqrt(ego.gpr.predict_variances(xtest)))
     actual = float(ego._get_virtual_point(xtest, fun(xtest))[0])
     self.assertAlmostEqual(expected, actual)
Exemplo n.º 16
0
    def test_branin_2d_200(self):
        self.ndim = 2
        self.nt = 200
        self.ne = 200

        prob = Branin(ndim=self.ndim)

        # training data
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)
        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)

        # mixture of experts
        moe = MOE(n_clusters=5)
        moe.set_training_values(xt, yt)
        moe.options['heaviside_optimization'] = True
        moe.train()

        # validation data
        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        rms_error = compute_rms_error(moe, xe, ye)
        self.assert_error(rms_error, 0., 1e-1)

        if TestMOE.plot:
            y = moe.analyse_results(x=xe, operation='predict_values')
            plt.figure(1)
            plt.plot(ye, ye, '-.')
            plt.plot(ye, y, '.')
            plt.xlabel(r'$y$ actual')
            plt.ylabel(r'$y$ prediction')

            fig = plt.figure(2)
            ax = fig.add_subplot(111, projection='3d')
            ax.scatter(xt[:, 0], xt[:, 1], yt)
            plt.title('Branin function')
            plt.show()
Exemplo n.º 17
0
    def run_MF_test(self):
        method_name = inspect.stack()[1][3]
        pname = method_name.split("_")[1]
        sname = method_name.split("_")[2]

        prob = self.problems[pname]
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)

        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)
        print(prob(xt, kx=0).shape)
        for i in range(self.ndim):
            yt = np.concatenate((yt, prob(xt, kx=i)), axis=1)

        y_lf = 2 * prob(xt) + 2
        x_lf = deepcopy(xt)
        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)

        sm0 = self.sms[sname]

        sm = sm0.__class__()
        sm.options = sm0.options.clone()
        if sm.options.is_declared("xlimits"):
            sm.options["xlimits"] = prob.xlimits
        sm.options["print_global"] = False

        sm.set_training_values(xt, yt[:, 0])
        sm.set_training_values(x_lf, y_lf[:, 0], name=0)
        if sm.supports["training_derivatives"]:
            for i in range(self.ndim):
                sm.set_training_derivatives(xt, yt[:, i + 1], i)

        with Silence():
            sm.train()

        t_error = compute_rms_error(sm)
        e_error = compute_rms_error(sm, xe, ye)
Exemplo n.º 18
0
    def test_branin_2D(self):
        n_iter = 20
        fun = Branin(ndim=2)
        xlimits = fun.xlimits
        criterion = "LCB"  #'EI' or 'SBO' or 'LCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xlimits=xlimits,
            random_state=42,
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
        # 3 optimal points possible: [-pi, 12.275], [pi, 2.275], [9.42478, 2.475]
        self.assertTrue(
            np.allclose([[-3.14, 12.275]], x_opt, rtol=0.2)
            or np.allclose([[3.14, 2.275]], x_opt, rtol=0.2)
            or np.allclose([[9.42, 2.475]], x_opt, rtol=0.2))
        self.assertAlmostEqual(0.39, float(y_opt), delta=1)
Exemplo n.º 19
0
    def compute_grid(self,
                     isp_lim,
                     twr_lim,
                     nb_samp,
                     samp_method='full',
                     criterion='m'):
        """Compute the sampling grid fro given `Isp` and `twr` limits and sampling scheme.

        Parameters
        ----------
        isp_lim : iterable
            Specific impulse lower and upper bounds [s]
        twr_lim : iterable
            Thrust/weight ratio lower and upper bounds [-]
        nb_samp : int
            Total number of samples. Must be a perfect square if ``full`` is chosen as `samp_method`
        samp_method : str, optional
            Sampling scheme, ``lhs`` for Latin Hypercube Sampling or ``full`` for Full-Factorial Sampling.
            Default is ``full``
        criterion : str, optional
            Criterion used to construct the LHS design among ``center``, ``maximin``, ``centermaximin``,
            ``correlation``, ``c``, ``m``, ``cm``, ``corr``, ``ese``. ``c``, ``m``, ``cm`` and ``corr`` are
            abbreviations of ``center``, ``maximin``, ``centermaximin`` and ``correlation``, ``respectively``
            Default is ``m``

        """

        self.limits = np.vstack((np.asarray(isp_lim), np.asarray(twr_lim)))

        if samp_method == 'lhs':
            samp = LHS(xlimits=self.limits, criterion=criterion)
        elif samp_method == 'full':
            samp = FullFactorial(xlimits=self.limits)
        else:
            raise ValueError('samp_method must be either lhs or full')

        self.x_samp = samp(nb_samp)
        self.m_prop = np.zeros((nb_samp, 1))
        self.failures = np.zeros((nb_samp, 1))
Exemplo n.º 20
0
    def test_derivatives(self):
        # Construction of the DOE
        ndim = 4
        fun = Sphere(ndim=ndim)
        sampling = FullFactorial(xlimits=fun.xlimits)
        xt = sampling(100)
        yt = fun(xt)

        # Compute the training derivatives
        for i in range(ndim):
            yd = fun(xt, kx=i)
            yt = np.concatenate((yt, yd), axis=1)

        # check KRG models
        sm_krg_c = KRG(poly="constant", print_global=False)
        sm_krg_c.set_training_values(xt, yt[:, 0])
        sm_krg_c.train()
        TestKRG._check_derivatives(sm_krg_c, xt, yt, ndim)

        sm_krg_l = KRG(poly="linear", print_global=False)
        sm_krg_l.set_training_values(xt, yt[:, 0])
        sm_krg_l.train()
        TestKRG._check_derivatives(sm_krg_l, xt, yt, ndim)
Exemplo n.º 21
0
    def test_rosenbrock_2D_parallel(self):
        n_iter = 15
        n_parallel = 5
        fun = Rosenbrock(ndim=2)
        xlimits = fun.xlimits
        criterion = "UCB"  #'EI' or 'SBO' or 'UCB'

        xdoe = FullFactorial(xlimits=xlimits)(10)
        qEI = "KB"
        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xlimits=xlimits,
            n_parallel=n_parallel,
            qEI=qEI,
            evaluator=ParallelEvaluator(),
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
        print("Rosenbrock: ", x_opt)
        self.assertTrue(np.allclose([[1, 1]], x_opt, rtol=0.5))
        self.assertAlmostEqual(0.0, float(y_opt), delta=1)
Exemplo n.º 22
0
    def compute_matrix(self, nb_eval=None):
        """Compute structured matrices for `Isp`, `twr` and `m_prop` to display the training data on a response surface.

        Parameters
        ----------
        nb_eval : int or ``None``
            Number of points included in the matrix if Latin Hypercube Sampling has been used or ``None``.
            Default is ``None``

        Returns
        -------
        isp : ndarray
            Matrix of specific impulses [s]
        twr : ndarray
            Matrix of thrust/weight ratios [-]
        m_mat : ndarray
            Matrix of propellant fractions [-]

        """

        if nb_eval is not None:  # LHS
            samp_eval = FullFactorial(xlimits=self.limits)
            x_eval = samp_eval(nb_eval)
            m_prop_eval = self.trained.predict_values(x_eval)

        else:  # Full-Factorial
            nb_eval = np.size(self.m_prop)
            x_eval = deepcopy(self.x_samp)
            m_prop_eval = deepcopy(self.m_prop)

        isp = np.unique(x_eval[:, 0])
        twr = np.unique(x_eval[:, 1])
        n = int(np.sqrt(nb_eval))
        m_mat = np.reshape(m_prop_eval, (n, n))

        return isp, twr, m_mat
Exemplo n.º 23
0
 def test_ff_weights(self):
     xlimits = np.array([[0.0, 1.0], [0.0, 1.0]])
     sampling = FullFactorial(xlimits=xlimits, weights=[0.25, 0.75])
     num = 10
     x = sampling(num)
     self.assertEqual((10, 2), x.shape)
Exemplo n.º 24
0
    def run_test(self):
        method_name = inspect.stack()[1][3]
        pname = method_name.split("_")[1]
        sname = method_name.split("_")[2]

        prob = self.problems[pname]
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)

        np.random.seed(0)
        xt = sampling(self.nt)
        yt = prob(xt)
        dyt = {}
        for kx in range(prob.xlimits.shape[0]):
            dyt[kx] = prob(xt, kx=kx)

        np.random.seed(1)
        xe = sampling(self.ne)
        ye = prob(xe)
        dye = {}
        for kx in range(prob.xlimits.shape[0]):
            dye[kx] = prob(xe, kx=kx)

        sm0 = self.sms[sname]

        sm = sm0.__class__()
        sm.options = sm0.options.clone()
        if sm.options.is_declared("xlimits"):
            sm.options["xlimits"] = prob.xlimits
        sm.options["print_global"] = False

        sm.set_training_values(xt, yt)

        with Silence():
            sm.train()

        t_error = compute_rms_error(sm)
        e_error = compute_rms_error(sm, xe, ye)

        sm = sm0.__class__()
        sm.options = sm0.options.clone()
        if sm.options.is_declared("xlimits"):
            sm.options["xlimits"] = prob.xlimits
        sm.options["print_global"] = False

        sm.set_training_values(xt, yt)
        for kx in range(prob.xlimits.shape[0]):
            sm.set_training_derivatives(xt, dyt[kx], kx)

        with Silence():
            sm.train()

        ge_t_error = compute_rms_error(sm)
        ge_e_error = compute_rms_error(sm, xe, ye)

        if print_output:
            print("%8s %6s %18.9e %18.9e %18.9e %18.9e" %
                  (pname[:6], sname, t_error, e_error, ge_t_error, ge_e_error))

        self.assert_error(t_error, 0.0, self.t_errors[sname])
        self.assert_error(e_error, 0.0, self.e_errors[sname])
        self.assert_error(ge_t_error, 0.0, self.ge_t_errors[sname])
        self.assert_error(ge_e_error, 0.0, self.ge_e_errors[sname])
"""
Created on Fri Sep 25 22:23:03 2020
Programa creado para el Aspecto 2 del Proyecto Final de IA - Doctorado
@author: Marco Ortiz
"""
#Importamos los módulos que utilizaremos
import numpy as np
import matplotlib.pyplot as plt
import time
from smt.sampling_methods import FullFactorial

#Tomamos el tiempo al inicio de la ejecución
t0 = time.clock()
#Definimos un vector (poblacion inicial) para realizar el sampling
xlimits = np.array([[0.0, 4.0], [0.0, 3.0]])
sampling = FullFactorial(xlimits=xlimits)

num = 50
x = sampling(num)
#Tomamos el tiempo al finalizar la ejecución del algoritmo
t1 = time.clock()

print("El vector de salida tiene forma:", x.shape)
#Imprimimos los puntos seleccionados por el sampling
plt.plot(x[:, 0], x[:, 1], "o")
plt.xlabel("x")
plt.ylabel("y")
plt.show()

print("El tiempo de ejecución del programa fue:", t1 - t0)
print("Si se realizan 30K iteraciones:", 30000 * (t1 - t0))
Exemplo n.º 26
0
    def run_moe_example():
        import numpy as np
        from smt.applications import MOE
        from smt.problems import LpNorm
        from smt.sampling_methods import FullFactorial

        import sklearn
        import matplotlib.pyplot as plt
        from matplotlib import colors
        from mpl_toolkits.mplot3d import Axes3D

        ndim = 2
        nt = 200
        ne = 200

        # Problem: L1 norm (dimension 2)
        prob = LpNorm(ndim=ndim)

        # Training data
        sampling = FullFactorial(xlimits=prob.xlimits, clip=True)
        np.random.seed(0)
        xt = sampling(nt)
        yt = prob(xt)

        # Mixture of experts
        moe = MOE(smooth_recombination=True, n_clusters=5)
        moe.set_training_values(xt, yt)
        moe.train()

        # Validation data
        np.random.seed(1)
        xe = sampling(ne)
        ye = prob(xe)

        # Prediction
        y = moe.predict_values(xe)
        fig = plt.figure(1)
        fig.set_size_inches(12, 11)

        # Cluster display
        colors_ = list(colors.cnames.items())
        GMM = moe.cluster
        weight = GMM.weights_
        mean = GMM.means_
        if sklearn.__version__ < "0.20.0":
            cov = GMM.covars_
        else:
            cov = GMM.covariances_
        prob_ = moe._proba_cluster(xt)
        sort = np.apply_along_axis(np.argmax, 1, prob_)

        xlim = prob.xlimits
        x0 = np.linspace(xlim[0, 0], xlim[0, 1], 20)
        x1 = np.linspace(xlim[1, 0], xlim[1, 1], 20)
        xv, yv = np.meshgrid(x0, x1)
        x = np.array(list(zip(xv.reshape((-1,)), yv.reshape((-1,)))))
        prob = moe._proba_cluster(x)

        plt.subplot(221, projection="3d")
        ax = plt.gca()
        for i in range(len(sort)):
            color = colors_[int(((len(colors_) - 1) / sort.max()) * sort[i])][0]
            ax.scatter(xt[i][0], xt[i][1], yt[i], c=color)
        plt.title("Clustered Samples")

        plt.subplot(222, projection="3d")
        ax = plt.gca()
        for i in range(len(weight)):
            color = colors_[int(((len(colors_) - 1) / len(weight)) * i)][0]
            ax.plot_trisurf(
                x[:, 0], x[:, 1], prob[:, i], alpha=0.4, linewidth=0, color=color
            )
        plt.title("Membership Probabilities")

        plt.subplot(223)
        for i in range(len(weight)):
            color = colors_[int(((len(colors_) - 1) / len(weight)) * i)][0]
            plt.tricontour(x[:, 0], x[:, 1], prob[:, i], 1, colors=color, linewidths=3)
        plt.title("Cluster Map")

        plt.subplot(224)
        plt.plot(ye, ye, "-.")
        plt.plot(ye, y, ".")
        plt.xlabel("actual")
        plt.ylabel("prediction")
        plt.title("Predicted vs Actual")

        plt.show()