def test_mixed_int_krg(self): import numpy as np import matplotlib.pyplot as plt from smt.surrogate_models import KRG from smt.applications.mixed_integer import MixedIntegerSurrogateModel, INT xt = np.array([0.0, 1.0, 2.0, 3.0, 4.0]) yt = np.array([0.0, 1.0, 1.5, 0.5, 1.0]) # xtypes = [FLOAT, INT, (ENUM, 3), (ENUM, 2)] # FLOAT means x1 continuous # INT means x2 integer # (ENUM, 3) means x3, x4 & x5 are 3 levels of the same categorical variable # (ENUM, 2) means x6 & x7 are 2 levels of the same categorical variable sm = MixedIntegerSurrogateModel(xtypes=[INT], xlimits=[[0, 4]], surrogate=KRG(theta0=[1e-2])) sm.set_training_values(xt, yt) sm.train() num = 100 x = np.linspace(0.0, 4.0, num) y = sm.predict_values(x) plt.plot(xt, yt, "o") plt.plot(x, y) plt.xlabel("x") plt.ylabel("y") plt.legend(["Training data", "Prediction"]) plt.show()
def test_mixed_gower(self): from smt.applications.mixed_integer import MixedIntegerSurrogateModel, ENUM from smt.surrogate_models import KRG import matplotlib.pyplot as plt import numpy as np xt = np.linspace(1.0, 5.0, 5) x_train = np.array(["%.2f" % i for i in xt], dtype=object) yt = np.array([0.0, 1.0, 1.5, 0.5, 1.0]) xlimits = [["0.0", "1.0", " 2.0", "3.0", "4.0"]] # Surrogate sm = MixedIntegerSurrogateModel( use_gower_distance=True, xtypes=[(ENUM, 5)], xlimits=xlimits, surrogate=KRG(theta0=[1e-2]), ) sm.set_training_values(x_train, yt) sm.train() # DOE for validation num = 101 x = np.linspace(0, 5, num) x_pred = np.array(["%.2f" % i for i in x], dtype=object) y = sm.predict_values(x_pred) plt.plot(xt, yt, "o") plt.plot(x, y) plt.xlabel("actual") plt.ylabel("prediction") plt.show()
def test_mixed_gower_krg(self): from smt.applications.mixed_integer import ( MixedIntegerSurrogateModel, ENUM, GOWER, ) from smt.surrogate_models import KRG import matplotlib.pyplot as plt import numpy as np xt = np.array([0, 3, 4]) yt = np.array([0.0, 1.0, 1.5]) xlimits = [["0.0", "1.0", " 2.0", "3.0", "4.0"]] # Surrogate sm = MixedIntegerSurrogateModel( categorical_kernel=GOWER, xtypes=[(ENUM, 5)], xlimits=xlimits, surrogate=KRG(theta0=[1e-2]), ) sm.set_training_values(xt, yt) sm.train() # DOE for validation x = np.linspace(0, 5, 5) y = sm.predict_values(x) plt.plot(xt, yt, "o", label="data") plt.plot(x, y, "d", color="red", markersize=3, label="pred") plt.xlabel("x") plt.ylabel("y") plt.legend() plt.show()
def run_mixed_integer_qp_example(self): import numpy as np import matplotlib.pyplot as plt from smt.surrogate_models import QP from smt.applications.mixed_integer import MixedIntegerSurrogateModel, ORD xt = np.array([0.0, 1.0, 2.0, 3.0, 4.0]) yt = np.array([0.0, 1.0, 1.5, 0.5, 1.0]) # xtypes = [FLOAT, ORD, (ENUM, 3), (ENUM, 2)] # FLOAT means x1 continuous # ORD means x2 ordered # (ENUM, 3) means x3, x4 & x5 are 3 levels of the same categorical variable # (ENUM, 2) means x6 & x7 are 2 levels of the same categorical variable sm = MixedIntegerSurrogateModel(xtypes=[ORD], xlimits=[[0, 4]], surrogate=QP()) sm.set_training_values(xt, yt) sm.train() num = 100 x = np.linspace(0.0, 4.0, num) y = sm.predict_values(x) plt.plot(xt, yt, "o") plt.plot(x, y) plt.xlabel("x") plt.ylabel("y") plt.legend(["Training data", "Prediction"]) plt.show()
def test_mixed_full_gaussian_3D(self): from smt.applications.mixed_integer import ( MixedIntegerSurrogateModel, ENUM, FLOAT, ORD, FULL_GAUSSIAN, ) from smt.surrogate_models import KRG import matplotlib.pyplot as plt import numpy as np import itertools xt = np.array([[0, 5, 0], [2, -1, 2], [4, 0.5, 1]]) yt = np.array([[0.0], [1.0], [1.5]]) xlimits = [ ["0.0", "1.0", " 2.0", "3.0", "4.0"], [-5, 5], ["0.0", "1.0", " 2.0", "3.0"], ] # Surrogate sm = MixedIntegerSurrogateModel( categorical_kernel=FULL_GAUSSIAN, xtypes=[(ENUM, 5), ORD, (ENUM, 4)], xlimits=xlimits, surrogate=KRG(theta0=[1e-2]), ) sm.set_training_values(xt, yt) sm.train() # DOE for validation x = np.linspace(0, 4, 5) x2 = np.linspace(-5, 5, 21) x3 = np.linspace(0, 3, 4) x1 = [] for element in itertools.product(x, x2, x3): x1.append(np.array(element)) x_pred = np.array(x1) i = 0 for x in x_pred: print(i, x) i += 1 y = sm.predict_values(x_pred) yvar = sm.predict_variances(x_pred) # prediction are correct on known points self.assertTrue(np.abs(np.sum(np.array([y[80], y[202], y[381]]) - yt)) < 1e-6) self.assertTrue( np.abs(np.sum(np.array([yvar[80], yvar[202], yvar[381]]))) < 1e-6 )
def test_mixed_int_krg(self): import numpy as np import matplotlib.pyplot as plt from smt.surrogate_models import KRG from smt.applications.mixed_integer import MixedIntegerSurrogateModel, INT xt = np.array([0.0, 2.0, 3.0]) yt = np.array([0.0, 1.5, 0.9]) # xtypes = [FLOAT, INT, (ENUM, 3), (ENUM, 2)] # FLOAT means x1 continuous # INT means x2 integer # (ENUM, 3) means x3, x4 & x5 are 3 levels of the same categorical variable # (ENUM, 2) means x6 & x7 are 2 levels of the same categorical variable sm = MixedIntegerSurrogateModel(xtypes=[INT], xlimits=[[0, 4]], surrogate=KRG(theta0=[1e-2])) sm.set_training_values(xt, yt) sm.train() num = 500 x = np.linspace(0.0, 4.0, num) y = sm.predict_values(x) # estimated variance s2 = sm.predict_variances(x) fig, axs = plt.subplots(2) axs[0].plot(xt, yt, "o") axs[0].plot(x, y) axs[0].set_xlabel("x") axs[0].set_ylabel("y") axs[0].legend(["Training data", "Prediction"]) # add a plot with variance axs[1].plot(xt, yt, "o") axs[1].plot(x, y) axs[1].fill_between( np.ravel(x), np.ravel(y - 3 * np.sqrt(s2)), np.ravel(y + 3 * np.sqrt(s2)), color="lightgrey", ) axs[1].set_xlabel("x") axs[1].set_ylabel("y") axs[1].legend( ["Training data", "Prediction", "Confidence Interval 99%"]) plt.show()
def test_mixed_gower_2D(self): from smt.applications.mixed_integer import ( MixedIntegerSurrogateModel, ENUM, FLOAT, GOWER, ) from smt.surrogate_models import KRG import matplotlib.pyplot as plt import numpy as np import itertools xt = np.array([[0, 5], [2, -1], [4, 0.5]]) yt = np.array([[0.0], [1.0], [1.5]]) xlimits = [["0.0", "1.0", " 2.0", "3.0", "4.0"], [-5, 5]] # Surrogate sm = MixedIntegerSurrogateModel( categorical_kernel=GOWER, xtypes=[(ENUM, 5), FLOAT], xlimits=xlimits, surrogate=KRG(theta0=[1e-2], corr="abs_exp"), ) sm.set_training_values(xt, yt) sm.train() # DOE for validation x = np.linspace(0, 4, 5) x2 = np.linspace(-5, 5, 21) x1 = [] for element in itertools.product(x, x2): x1.append(np.array(element)) x_pred = np.array(x1) i = 0 for x in x_pred: print(i, x) i += 1 y = sm.predict_values(x_pred) yvar = sm.predict_variances(x_pred) # prediction are correct on known points self.assertTrue(np.abs(np.sum(np.array([y[20], y[50], y[95]]) - yt)) < 1e-6) self.assertTrue(np.abs(np.sum(np.array([yvar[20], yvar[50], yvar[95]]))) < 1e-6) self.assertEqual(np.shape(y), (105, 1))
def test_mixed_gower_krg(self): import numpy as np import matplotlib.pyplot as plt from smt.surrogate_models import KRG from smt.applications.mixed_integer import MixedIntegerSurrogateModel from smt.applications.mixed_integer import ENUM # xtypes = [FLOAT, INT, (ENUM, 3), (ENUM, 2)] # FLOAT means x1 continuous # INT means x2 integer # (ENUM, 3) means x3, x4 & x5 are 3 levels of the same categorical variable # (ENUM, 2) means x6 & x7 are 2 levels of the same categorical variable xt = np.linspace(1.0, 5.0, 5) x_train = np.array(["%.2f" % i for i in xt], dtype=object) yt = np.array([0.0, 1.0, 1.5, 0.5, 1.0]) xlimits = [["0.0", "1.0", " 2.0", "3.0", "4.0"]] sm = MixedIntegerSurrogateModel( use_gower_distance=True, xtypes=[(ENUM, 5)], xlimits=xlimits, surrogate=KRG(theta0=[1e-2]), ) sm.set_training_values(x_train, yt) sm.train() num = 101 x = np.linspace(0, 5, num) x_pred = np.array(["%.2f" % i for i in x], dtype=object) y = sm.predict_values(x_pred) plt.plot(xt, yt, "o") plt.plot(x, y) plt.xlabel("x") plt.ylabel("y") plt.legend(["Training data", "Prediction"]) plt.show()