示例#1
0
    def test_ego_mixed_integer_full_gaussian(self):
        n_iter = 15
        xtypes = [FLOAT, (ENUM, 3), (ENUM, 2), ORD]
        xlimits = np.array([[-5, 5], ["blue", "red", "green"],
                            ["large", "small"], [0, 2]])
        n_doe = 2
        sampling = MixedIntegerSamplingMethod(
            xtypes,
            xlimits,
            LHS,
            criterion="ese",
            random_state=42,
            output_in_folded_space=True,
        )
        xdoe = sampling(n_doe)
        criterion = "EI"  #'EI' or 'SBO' or 'LCB'
        sm = KRG(print_global=False)
        mixint = MixedIntegerContext(xtypes, xlimits)

        ego = EGO(
            n_iter=n_iter,
            criterion=criterion,
            xdoe=xdoe,
            xtypes=xtypes,
            xlimits=xlimits,
            surrogate=sm,
            enable_tunneling=False,
            random_state=42,
            categorical_kernel=FULL_GAUSSIAN,
        )
        _, y_opt, _, _, _ = ego.optimize(
            fun=TestEGO.function_test_mixed_integer)
示例#2
0
    def test_ego_mixed_integer(self):
        n_iter = 15
        xtypes = [FLOAT, (ENUM, 3), (ENUM, 2), ORD]
        xlimits = np.array([[-5, 5], ["blue", "red", "green"],
                            ["large", "small"], ["0", "2", "3"]])
        n_doe = 2
        sampling = MixedIntegerSamplingMethod(xtypes,
                                              xlimits,
                                              LHS,
                                              criterion="ese",
                                              random_state=42)
        xdoe = sampling(n_doe)
        criterion = "EI"  #'EI' or 'SBO' or 'LCB'
        sm = KRG(print_global=False)
        mixint = MixedIntegerContext(xtypes, xlimits)

        ego = EGO(
            n_iter=n_iter,
            criterion=criterion,
            xdoe=xdoe,
            xtypes=xtypes,
            xlimits=xlimits,
            surrogate=sm,
            enable_tunneling=False,
            random_state=42,
        )
        _, y_opt, _, _, _ = ego.optimize(
            fun=TestEGO.function_test_mixed_integer)

        self.assertAlmostEqual(-15, float(y_opt), delta=5)
示例#3
0
    def test_branin_2D_mixed(self):
        n_iter = 20
        fun = Branin(ndim=2)
        xtypes = [ORD, FLOAT]
        xlimits = fun.xlimits
        criterion = "EI"  #'EI' or 'SBO' or 'LCB'

        sm = KRG(print_global=False)
        mixint = MixedIntegerContext(xtypes, xlimits)
        sampling = MixedIntegerSamplingMethod(xtypes, xlimits, FullFactorial)
        xdoe = sampling(10)

        ego = EGO(
            xdoe=xdoe,
            n_iter=n_iter,
            criterion=criterion,
            xtypes=xtypes,
            xlimits=xlimits,
            surrogate=sm,
            random_state=42,
        )

        x_opt, y_opt, _, _, _ = ego.optimize(fun=fun)
        # 3 optimal points possible: [-pi, 12.275], [pi, 2.275], [9.42478, 2.475]
        self.assertTrue(
            np.allclose([[-3, 12.275]], x_opt, rtol=0.2)
            or np.allclose([[3, 2.275]], x_opt, rtol=0.2)
            or np.allclose([[9, 2.475]], x_opt, rtol=0.2))
        self.assertAlmostEqual(0.494, float(y_opt), delta=1)
示例#4
0
    def run_mixed_integer_lhs_example(self):
        import numpy as np
        import matplotlib.pyplot as plt
        from matplotlib import colors

        from smt.sampling_methods import LHS
        from smt.applications.mixed_integer import (
            FLOAT,
            INT,
            ENUM,
            MixedIntegerSamplingMethod,
        )

        xtypes = [FLOAT, (ENUM, 2)]
        xlimits = [[0.0, 4.0], ["blue", "red"]]
        sampling = MixedIntegerSamplingMethod(xtypes,
                                              xlimits,
                                              LHS,
                                              criterion="ese")

        num = 40
        x = sampling(num)

        print(x.shape)

        cmap = colors.ListedColormap(xlimits[1])
        plt.scatter(x[:, 0], np.zeros(num), c=x[:, 1], cmap=cmap)
        plt.show()
示例#5
0
    def test_mixed_integer_lhs(self):
        import numpy as np
        from mpl_toolkits.mplot3d import Axes3D  # noqa: F401 unused import
        import matplotlib.pyplot as plt

        from smt.sampling_methods import LHS
        from smt.applications.mixed_integer import (
            FLOAT,
            INT,
            ENUM,
            MixedIntegerSamplingMethod,
        )

        xtypes = [(ENUM, 2), FLOAT]
        xlimits = [["blue", "red"], [0.0, 4.0]]
        sampling = MixedIntegerSamplingMethod(xtypes,
                                              xlimits,
                                              LHS,
                                              criterion="ese")

        num = 40
        x = sampling(num)

        print(x.shape)

        fig = plt.figure()
        ax = fig.add_subplot(111, projection="3d")
        ax.scatter(x[:, 0], x[:, 1], x[:, 2], "o")
        ax.set_xlabel("x0 blue (1) or not (0)")
        ax.set_ylabel("x1 red (1) or not (0)")
        ax.set_zlabel("x2 float")
        plt.show()
示例#6
0
 def test_unfolded_xlimits_type(self):
     xtypes = [FLOAT, (ENUM, 2), (ENUM, 2), INT]
     xlimits = np.array([[-5, 5], ["2", "3"], ["4", "5"], [0, 2]])
     sampling = MixedIntegerSamplingMethod(xtypes,
                                           xlimits,
                                           LHS,
                                           criterion="ese")
     doe = sampling(10)
     self.assertEqual((10, 4), doe.shape)
示例#7
0
    def run_ego_mixed_integer_example():
        import numpy as np
        from smt.applications import EGO
        from smt.sampling_methods import FullFactorial
        from smt.applications.mixed_integer import (
            FLOAT,
            INT,
            ENUM,
            MixedIntegerSamplingMethod,
        )

        import sklearn
        import matplotlib.pyplot as plt
        from matplotlib import colors
        from mpl_toolkits.mplot3d import Axes3D
        from scipy.stats import norm
        from smt.surrogate_models import KRG
        from smt.sampling_methods import LHS

        # Regarding the interface, the function to be optimized should handle
        # categorical values as index values in the enumeration type specification.
        # For instance, here "blue" will be passed to the function as the index value 2.
        # This allows to keep the numpy ndarray X handling numerical values.
        def function_test_mixed_integer(X):
            # float
            x1 = X[:, 0]
            #  enum 1
            c1 = X[:, 1]
            x2 = c1 == 0
            x3 = c1 == 1
            x4 = c1 == 2
            #  enum 2
            c2 = X[:, 2]
            x5 = c2 == 0
            x6 = c2 == 1
            # int
            i = X[:, 3]

            y = ((x2 + 2 * x3 + 3 * x4) * x5 * x1 +
                 (x2 + 2 * x3 + 3 * x4) * x6 * 0.95 * x1 + i)
            return y

        n_iter = 15
        xtypes = [FLOAT, (ENUM, 3), (ENUM, 2), INT]
        xlimits = np.array([[-5, 5], ["red", "green", "blue"],
                            ["square", "circle"], [0, 2]])
        criterion = "EI"  #'EI' or 'SBO' or 'UCB'
        qEI = "KB"
        sm = KRG(print_global=False)

        n_doe = 2
        sampling = MixedIntegerSamplingMethod(xtypes,
                                              xlimits,
                                              LHS,
                                              criterion="ese")
        xdoe = sampling(n_doe)
        ydoe = function_test_mixed_integer(xdoe)

        ego = EGO(
            n_iter=n_iter,
            criterion=criterion,
            xdoe=xdoe,
            ydoe=ydoe,
            xtypes=xtypes,
            xlimits=xlimits,
            surrogate=sm,
            qEI=qEI,
        )

        x_opt, y_opt, _, _, y_data = ego.optimize(
            fun=function_test_mixed_integer)
        print("Minimum in x={} with f(x)={:.1f}".format(x_opt, float(y_opt)))
        print("Minimum in typed x={}".format(
            ego.mixint.cast_to_mixed_integer(x_opt)))

        min_ref = -15
        mini = np.zeros(n_iter)
        for k in range(n_iter):
            mini[k] = np.log(np.abs(np.min(y_data[0:k + n_doe - 1]) - min_ref))
        x_plot = np.linspace(1, n_iter + 0.5, n_iter)
        u = max(np.floor(max(mini)) + 1, -100)
        l = max(np.floor(min(mini)) - 0.2, -10)
        fig = plt.figure()
        axes = fig.add_axes([0.1, 0.1, 0.8, 0.8])
        axes.plot(x_plot, mini, color="r")
        axes.set_ylim([l, u])
        plt.title("minimum convergence plot", loc="center")
        plt.xlabel("number of iterations")
        plt.ylabel("log of the difference w.r.t the best")
        plt.show()