Esempio n. 1
0
class MyEvComp(Component):
    doit = Event()
    doit2 = Event()
    doit_count = Int(0, iotype='out')
    doit2_count = Int(0, iotype='out')
    some_int = Int(0, iotype='in')

    def _doit_fired(self):
        self.doit_count += 1
        
    def _doit2_fired(self):
        self.doit2_count += 1
        
    def execute(self):
        pass
Esempio n. 2
0
class SubCounterComp(Component):

    # --- Initialize counter variable ---
    config = Str('', iotype='out', desc='full ID string')
    case = Str('', iotype='in', desc='current DOE case number')
    power_level = Int(100, iotype='in', desc='% thrust setting')

    reset_iteration = Event()

    def _reset_iteration_fired(self):
        self._iteration = 0

    def __init__(self, *args, **kwargs):
        # ---------------------------------------------
        # --- Constructor for the counter component ---
        # ---------------------------------------------
        super(SubCounterComp, self).__init__(*args, **kwargs)

        # --- Edit this line to override default numbering
        # --- If restarting, should equal the subiteration number of the last successful case + 1
        # --- Needs to be 0 if starting a new case
        self._iteration = 7
        self.force_execute = True

    def execute(self):

        self.config = '%s_%s_%s' % (self.case, str(
            self._iteration), str(self.power_level))

        self._iteration += 1

        print '--------------------------------------------------'
        print 'Starting Simulation ', self.config
        print '--------------------------------------------------'
class DrivenComponent(Component):
    """ Just something to be driven and compute results. """

    x0 = Float(1., iotype='in')
    y0 = Float(1., iotype='in')  # used just to get ParameterGroup
    x1 = Float(1., iotype='in')
    x2 = Float(1., iotype='in')
    x3 = Float(1., iotype='in')
    err_event = Event()
    stop_exec = Bool(False, iotype='in')
    rosen_suzuki = Float(0., iotype='out')

    def __init__(self):
        super(DrivenComponent, self).__init__()
        self._raise_err = False

    def _err_event_fired(self):
        self._raise_err = True

    def execute(self):
        """ Compute results from input vector. """
        self.rosen_suzuki = rosen_suzuki(self.x0, self.x1, self.x2, self.x3)
        if self._raise_err:
            self.raise_exception('Forced error', RuntimeError)
        if self.stop_exec:
            self.parent.driver.stop()  # Only valid if sequential!
Esempio n. 4
0
class CycleComponent(Component): 

    design = Event(desc="flag to indicate that the calculations are design conditions")

    def __init__(self): 
        super(CycleComponent, self).__init__()

        self.run_design = False

    def _design_fired(self): 
        self.run_design = True


    def run(self,*args,**kwargs): 
        super(CycleComponent, self).run(*args,**kwargs)
        self.run_design = False
class MultiObjExpectedImprovement(Component):
    best_cases = Slot(
        CaseSet,
        iotype="in",
        desc="CaseIterator which contains only Pareto optimal cases \
                    according to criteria.")

    criteria = Array(
        iotype="in",
        desc="Names of responses to maximize expected improvement around. \
                    Must be NormalDistribution type.")

    predicted_values = Array(
        [0, 0],
        iotype="in",
        dtype=NormalDistribution,
        desc="CaseIterator which contains NormalDistributions for each \
                        response at a location where you wish to calculate EI."
    )

    n = Int(1000,
            iotype="in",
            desc="Number of Monte Carlo Samples with \
                        which to calculate probability of improvement.")

    calc_switch = Enum("PI", ["PI", "EI"],
                       iotype="in",
                       desc="Switch to use either \
                        probability (PI) or expected (EI) improvement.")

    PI = Float(0.0,
               iotype="out",
               desc="The probability of improvement of the next_case.")

    EI = Float(0.0,
               iotype="out",
               desc="The expected improvement of the next_case.")

    reset_y_star = Event(desc='Reset Y* on next execution')

    def __init__(self):
        super(MultiObjExpectedImprovement, self).__init__()
        self.y_star = None

    def _reset_y_star_fired(self):
        self.y_star = None

    def get_y_star(self):
        criteria_count = len(self.criteria)

        flat_crit = self.criteria.ravel()

        try:
            y_star = zip(*[self.best_cases[crit] for crit in self.criteria])
        except KeyError:
            self.raise_exception(
                'no cases in the provided case_set had output '
                'matching the provided criteria, %s' % self.criteria,
                ValueError)

        #sort list on first objective
        y_star = array(y_star)[array([i[0] for i in y_star]).argsort()]
        return y_star

    def _2obj_PI(self, mu, sigma):
        """Calculates the multi-objective probability of improvement
        for a new point with two responses. Takes as input a
        pareto frontier, mean and sigma of new point."""

        y_star = self.y_star

        PI1 = (0.5 + 0.5 * erf(
            (1 / (2**0.5)) * ((y_star[0][0] - mu[0]) / sigma[0])))
        PI3 = (1-(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][0]-mu[0])/sigma[0]))))\
        *(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][1]-mu[1])/sigma[1])))

        PI2 = 0
        if len(y_star) > 1:
            for i in range(len(y_star) - 1):
                PI2=PI2+((0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][0]-mu[0])/sigma[0])))\
                -(0.5+0.5*erf((1/(2**0.5))*((y_star[i][0]-mu[0])/sigma[0]))))\
                *(0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][1]-mu[1])/sigma[1])))
        mcpi = PI1 + PI2 + PI3
        return mcpi

    def _2obj_EI(self, mu, sigma):
        """Calculates the multi-criteria expected improvement
        for a new point with two responses. Takes as input a
        pareto frontier, mean and sigma of new point."""

        y_star = self.y_star
        ybar11 = mu[0]*(0.5+0.5*erf((1/(2**0.5))*((y_star[0][0]-mu[0])/sigma[0])))\
        -sigma[0]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[0][0]-mu[0])**2/sigma[0]**2))
        ybar13 = (mu[0]*(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][0]-mu[0])/sigma[0])))\
        -sigma[0]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[-1][0]-mu[0])**2/sigma[0]**2)))\
        *(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][1]-mu[1])/sigma[1])))

        ybar12 = 0
        if len(y_star) > 1:
            for i in range(len(y_star) - 1):
                ybar12 = ybar12+((mu[0]*(0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][0]-mu[0])/sigma[0])))\
                -sigma[0]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[i+1][0]-mu[0])**2/sigma[0]**2)))\
                -(mu[0]*(0.5+0.5*erf((1/(2**0.5))*((y_star[i][0]-mu[0])/sigma[0])))\
                -sigma[0]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[i][0]-mu[0])**2/sigma[0]**2))))\
                *(0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][1]-mu[1])/sigma[1])))

        ybar1 = (ybar11 + ybar12 + ybar13) / self.PI

        ybar21 = mu[1]*(0.5+0.5*erf((1/(2**0.5))*((y_star[0][1]-mu[1])/sigma[1])))\
        -sigma[1]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[0][1]-mu[1])**2/sigma[1]**2))
        ybar23 = (mu[1]*(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][1]-mu[1])/sigma[1])))\
        -sigma[1]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[-1][1]-mu[1])**2/sigma[1]**2)))\
        *(0.5+0.5*erf((1/(2**0.5))*((y_star[-1][0]-mu[0])/sigma[0])))

        ybar22 = 0
        if len(y_star) > 1:
            for i in range(len(y_star) - 1):
                ybar22 = ybar22+((mu[1]*(0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][1]-mu[1])/sigma[1])))\
                -sigma[1]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[i+1][1]-mu[1])**2/sigma[1]**2)))\
                -(mu[1]*(0.5+0.5*erf((1/(2**0.5))*((y_star[i][1]-mu[1])/sigma[1])))\
                -sigma[1]*(1/((2*pi)**0.5))*exp(-0.5*((y_star[i][1]-mu[1])**2/sigma[1]**2))))\
                *(0.5+0.5*erf((1/(2**0.5))*((y_star[i+1][0]-mu[0])/sigma[0])))

        ybar2 = (ybar21 + ybar22 + ybar23) / self.PI
        dists = [((ybar1 - point[0])**2 + (ybar2 - point[1])**2)**0.5
                 for point in y_star]
        mcei = self.PI * min(dists)
        if isnan(mcei):
            mcei = 0
        return mcei

    def _dom(self, a, b):
        """determines if a completely dominates b
       returns True is if does
    """
        comp = [c1 < c2 for c1, c2 in zip(a, b)]
        if sum(comp) == len(self.criteria):
            return True
        return False

    def _nobj_PI(self, mu, sigma):
        cov = diag(array(sigma)**2)
        rands = random.multivariate_normal(mu, cov, self.n)
        num = 0  # number of cases that dominate the current Pareto set

        for random_sample in rands:
            for par_point in self.y_star:
                #par_point = [p[2] for p in par_point.outputs]
                if self._dom(par_point, random_sample):
                    num = num + 1
                    break
        pi = (self.n - num) / float(self.n)
        return pi

    def execute(self):
        """ Calculates the expected improvement or
        probability of improvement of a candidate
        point given by a normal distribution.
        """
        mu = [objective.mu for objective in self.predicted_values]
        sig = [objective.sigma for objective in self.predicted_values]

        if self.y_star == None:
            self.y_star = self.get_y_star()

        n_objs = len(self.criteria)

        if n_objs == 2:
            """biobjective optimization"""
            self.PI = self._2obj_PI(mu, sig)
            if self.calc_switch == 'EI':
                """execute EI calculations"""
                self.EI = self._2obj_EI(mu, sig)
        if n_objs > 2:
            """n objective optimization"""
            self.PI = self._nobj_PI(mu, sig)
            if self.calc_switch == 'EI':
                """execute EI calculations"""
                self.raise_exception(
                    "EI calculations not supported"
                    " for more than 2 objectives", ValueError)
class Pareto_Min_Dist(Component):
    """Computes the probability that any given point from the primary concept 
    will interesect the pareto frontiers of some other concepts.
    """
    pareto = List(
        [],
        iotype="in",
        desc="List of CaseIterators containing competing local Pareto points")

    criteria = ListStr(
        iotype="in",
        dtype="str",
        desc="Names of responses to maximize expected improvement around. "
        "Must be NormalDistribution type.")

    predicted_values = Array(
        iotype="in",
        dtype=NormalDistribution,
        desc="CaseIterator which contains a NormalDistribution "
        "for each response at a location where you wish to "
        "calculate EI.")

    dist = Float(0.0,
                 iotype="out",
                 desc="minimum distance from a point to other pareto set ")

    reset_pareto = Event()

    def __init__(self, *args, **kwargs):
        super(Pareto_Min_Dist, self).__init__(*args, **kwargs)
        self.y_star_other = None

    def _reset_pareto_fired(self):
        self.y_star_other = None

    def get_pareto(self):
        y_star_other = []

        c = []

        for single_case_list in self.pareto:
            for case in single_case_list:
                for objective in case.outputs:
                    for crit in self.criteria:
                        if crit in objective[0]:
                            #TODO: criteria needs at least two things matching
                            #objective names in CaseIterator outputs, error otherwise
                            c.append(objective[2])
                if c != []:
                    y_star_other.append(c)
                c = []

        return y_star_other

    def _calc_min_dist(self, p, y_star_other):
        """Computes the minimum distance from a candidate point 
        to other_pareto.
        """

        dists = []

        for y in y_star_other:
            d = sqrt(sum([(A - B)**2 for A, B in zip(p, y)]))
            dists.append(d)

        return min(dists)

    def execute(self):
        mu = [objective.mu for objective in self.predicted_values]

        if self.y_star_other == None:
            self.y_star_other = self.get_pareto()

        self.dist = self._calc_min_dist(mu, self.y_star_other)
Esempio n. 7
0
class MetaModel(Component):

    # pylint: disable-msg=E1101
    model = Slot(IComponent,
                 allow_none=True,
                 desc='Slot for the Component or Assembly being '
                 'encapsulated.')
    includes = ListStr(iotype='in',
                       desc='A list of names of variables to be included '
                       'in the public interface.')
    excludes = ListStr(iotype='in',
                       desc='A list of names of variables to be excluded '
                       'from the public interface.')

    warm_start_data = Slot(ICaseIterator,
                           iotype="in",
                           desc="CaseIterator containing cases to use as "
                           "initial training data. When this is set, all "
                           "previous training data is cleared, and replaced "
                           "with data from this CaseIterator")

    surrogate = Dict(
        key_trait=Str,
        value_trait=Slot(ISurrogate),
        allow_none=True,
        desc='Dictionary that provides a mapping between variables and '
        'surrogate models for each output. The "default" '
        'key must be given. It is the default surrogate model for all '
        'outputs. Any specific surrogate models can be '
        'specifed by a key with the desired variable name.')
    surrogate_args = Dict(
        key_trait=Str,
        allow_none=True,
        desc='Dictionary that provides mapping between variables and '
        'arguments that should be passed to the surrogate model. Keys should '
        'match those in the surrogate dictionary. Values can be a list of ordered '
        'arguments, a dictionary of named arguments, or a two-tuple of a list and a dictionary.'
    )

    recorder = Slot(ICaseRecorder, desc='Records training cases')

    # when fired, the next execution will train the metamodel
    train_next = Event()
    #when fired, the next execution will reset all training data
    reset_training_data = Event()

    def __init__(self, *args, **kwargs):
        super(MetaModel, self).__init__(*args, **kwargs)
        self._current_model_traitnames = set()
        self._surrogate_info = {}
        self._surrogate_input_names = []
        self._training_input_history = []
        self._const_inputs = {
        }  # dict of constant training inputs indices and their values
        self._train = False
        self._new_train_data = False
        self._failed_training_msgs = []

        # the following line will work for classes that inherit from MetaModel
        # as long as they declare their traits in the class body and not in
        # the __init__ function.  If they need to create traits dynamically
        # during initialization they'll have to provide the value of
        # _mm_class_traitnames
        self._mm_class_traitnames = set(self.traits(iotype=not_none).keys())

    def _train_next_fired(self):
        self._train = True
        self._new_train_data = True

    def _reset_training_data_fired(self):
        self._training_input_history = []
        self._const_inputs = {}
        self._failed_training_msgs = []

        # remove output history from surrogate_info
        for name, tup in self._surrogate_info.items():
            surrogate, output_history = tup
            self._surrogate_info[name] = (surrogate, [])

    def _warm_start_data_changed(self, oldval, newval):
        self.reset_training_data = True

        #build list of inputs
        for case in newval:
            if self.recorder:
                self.recorder.record(case)
            inputs = []
            for inp_name in self._surrogate_input_names:
                var_name = '.'.join([self.name, inp_name])
                inp_val = case[var_name]
                if inp_val is not None:
                    inputs.append(inp_val)
                else:
                    self.raise_exception(
                        'The variable "%s" was not '
                        'found as an input in one of the cases provided '
                        'for warm_start_data.' % var_name, ValueError)
            #print "inputs", inputs
            self._training_input_history.append(inputs)

            for output_name in self.list_outputs_from_model():
                #grab value from case data
                var_name = '.'.join([self.name, output_name])
                try:
                    val = case.get_output(var_name)
                except KeyError:
                    self.raise_exception(
                        'The output "%s" was not found '
                        'in one of the cases provided for '
                        'warm_start_data' % var_name, ValueError)
                else:  # save to training output history
                    self._surrogate_info[output_name][1].append(val)

        self._new_train_data = True

    def execute(self):
        """If the training flag is set, train the metamodel. Otherwise, 
        predict outputs.
        """

        if self._train:
            if self.model is None:
                self.raise_exception("MetaModel object must have a model!",
                                     RuntimeError)
            try:
                inputs = self.update_model_inputs()

                #print '%s training with inputs: %s' % (self.get_pathname(), inputs)
                self.model.run(force=True)

            except Exception as err:
                self._failed_training_msgs.append(str(err))
            else:  #if no exceptions are generated, save the data
                self._training_input_history.append(inputs)
                self.update_outputs_from_model()
                case_outputs = []

                for name, tup in self._surrogate_info.items():
                    surrogate, output_history = tup
                    case_outputs.append(('.'.join([self.name,
                                                   name]), output_history[-1]))
                # save the case, making sure to add out name to the local input name since
                # this Case is scoped to our parent Assembly
                case_inputs = [
                    ('.'.join([self.name, name]), val)
                    for name, val in zip(self._surrogate_input_names, inputs)
                ]
                if self.recorder:
                    self.recorder.record(
                        Case(inputs=case_inputs, outputs=case_outputs))

            self._train = False
        else:
            #print '%s predicting' % self.get_pathname()
            if self._new_train_data:
                if len(self._training_input_history) < 2:
                    self.raise_exception(
                        "ERROR: need at least 2 training points!",
                        RuntimeError)

                # figure out if we have any constant training inputs
                tcases = self._training_input_history
                in_hist = tcases[0][:]
                # start off assuming every input is constant
                idxlist = range(len(in_hist))
                self._const_inputs = dict(zip(idxlist, in_hist))
                for i in idxlist:
                    val = in_hist[i]
                    for case in range(1, len(tcases)):
                        if val != tcases[case][i]:
                            del self._const_inputs[i]
                            break

                if len(self._const_inputs) == len(in_hist):
                    self.raise_exception(
                        "ERROR: all training inputs are constant.")
                elif len(self._const_inputs) > 0:
                    # some inputs are constant, so we have to remove them from the training set
                    training_input_history = []
                    for inputs in self._training_input_history:
                        training_input_history.append([
                            val for i, val in enumerate(inputs)
                            if i not in self._const_inputs
                        ])
                else:
                    training_input_history = self._training_input_history
                for name, tup in self._surrogate_info.items():
                    surrogate, output_history = tup
                    surrogate.train(training_input_history, output_history)

                self._new_train_data = False

            inputs = []
            for i, name in enumerate(self._surrogate_input_names):
                val = getattr(self, name)
                cval = self._const_inputs.get(i, _missing)
                if cval is _missing:
                    inputs.append(val)
                elif val != cval:
                    self.raise_exception(
                        "ERROR: training input '%s' was a constant value of (%s) but the value has changed to (%s)."
                        % (name, cval, val), ValueError)
            for name, tup in self._surrogate_info.items():
                surrogate = tup[0]
                # copy output to boudary
                setattr(self, name, surrogate.predict(inputs))
Esempio n. 8
0
class MetaModel(Component):

    # pylint: disable-msg=E1101
    model = Slot(IComponent,
                 allow_none=True,
                 desc='Slot for the Component or Assembly being '
                 'encapsulated.')
    includes = List(Str,
                    iotype='in',
                    desc='A list of names of variables to be included '
                    'in the public interface.')
    excludes = List(Str,
                    iotype='in',
                    desc='A list of names of variables to be excluded '
                    'from the public interface.')

    warm_start_data = Slot(ICaseIterator,
                           iotype="in",
                           desc="CaseIterator containing cases to use as "
                           "initial training data. When this is set, all "
                           "previous training data is cleared and replaced "
                           "with data from this CaseIterator.")

    default_surrogate = Slot(
        ISurrogate,
        allow_none=True,
        desc="This surrogate will be used for all "
        "outputs that don't have a specific surrogate assigned "
        "to them in their sur_<name> slot.")

    report_errors = Bool(
        True,
        iotype="in",
        desc=
        "If True, metamodel will report errors reported from the component. "
        "If False, metamodel will swallow the errors but log that they happened and "
        "exclude the case from the training set.")

    recorder = Slot(ICaseRecorder, desc='Records training cases')

    # when fired, the next execution will train the metamodel
    train_next = Event()
    #when fired, the next execution will reset all training data
    reset_training_data = Event()

    def __init__(self, *args, **kwargs):
        super(MetaModel, self).__init__(*args, **kwargs)
        self._surrogate_input_names = None
        self._surrogate_output_names = None
        self._surrogate_overrides = set(
        )  # keeps track of which sur_<name> slots are full
        self._training_data = {}
        self._training_input_history = []
        self._const_inputs = {
        }  # dict of constant training inputs indices and their values
        self._train = False
        self._new_train_data = False
        self._failed_training_msgs = []
        self._default_surrogate_copies = {
        }  # need to maintain separate copy of default surrogate for each sur_* that doesn't
        # have a surrogate defined

        # the following line will work for classes that inherit from MetaModel
        # as long as they declare their traits in the class body and not in
        # the __init__ function.  If they need to create traits dynamically
        # during initialization they'll have to provide the value of
        # _mm_class_traitnames
        self._mm_class_traitnames = set(self.traits(iotype=not_none).keys())

    def _train_next_fired(self):
        self._train = True
        self._new_train_data = True

    def _reset_training_data_fired(self):
        self._training_input_history = []
        self._const_inputs = {}
        self._failed_training_msgs = []

        # remove output history from training_data
        for name in self._training_data:
            self._training_data[name] = []

    def _warm_start_data_changed(self, oldval, newval):
        self.reset_training_data = True

        # build list of inputs
        for case in newval:
            if self.recorder:
                self.recorder.record(case)
            inputs = []
            for inp_name in self.surrogate_input_names():
                var_name = '.'.join([self.name, inp_name])
                try:
                    inp_val = case[var_name]
                except KeyError:
                    pass
                    #self.raise_exception('The variable "%s" was not '
                    #'found as an input in one of the cases provided '
                    #'for warm_start_data.' % var_name, ValueError)
                else:
                    if inp_val is not None:
                        inputs.append(inp_val)
            #print "inputs", inputs
            self._training_input_history.append(inputs)

            for output_name in self.surrogate_output_names():
                #grab value from case data
                var_name = '.'.join([self.name, output_name])
                try:
                    val = case.get_output(var_name)
                except KeyError:
                    self.raise_exception(
                        'The output "%s" was not found '
                        'in one of the cases provided for '
                        'warm_start_data' % var_name, ValueError)
                else:  # save to training output history
                    self._training_data[output_name].append(val)

        self._new_train_data = True

    def execute(self):
        """If the training flag is set, train the metamodel. Otherwise,
        predict outputs.
        """

        if self._train:
            if self.model is None:
                self.raise_exception("MetaModel object must have a model!",
                                     RuntimeError)
            try:
                inputs = self.update_model_inputs()

                #print '%s training with inputs: %s' % (self.get_pathname(), inputs)
                self.model.run(force=True)

            except Exception as err:
                if self.report_errors:
                    raise err
                else:
                    self._failed_training_msgs.append(str(err))
            else:  # if no exceptions are generated, save the data
                self._training_input_history.append(inputs)
                self.update_outputs_from_model()
                case_outputs = []

                for name, output_history in self._training_data.items():
                    case_outputs.append(('.'.join([self.name,
                                                   name]), output_history[-1]))
                # save the case, making sure to add out name to the local input name since
                # this Case is scoped to our parent Assembly
                case_inputs = [
                    ('.'.join([self.name, name]), val)
                    for name, val in zip(self.surrogate_input_names(), inputs)
                ]
                if self.recorder:
                    self.recorder.record(
                        Case(inputs=case_inputs, outputs=case_outputs))

            self._train = False
        else:
            if self.default_surrogate is None and not self._surrogate_overrides:  # NO surrogates defined. just run model and get outputs
                inputs = self.update_model_inputs()
                self.model.run()
                self.update_outputs_from_model()
                return

            #print '%s predicting' % self.get_pathname()
            if self._new_train_data:
                if len(self._training_input_history) < 2:
                    self.raise_exception(
                        "ERROR: need at least 2 training points!",
                        RuntimeError)

                # figure out if we have any constant training inputs
                tcases = self._training_input_history
                in_hist = tcases[0][:]
                # start off assuming every input is constant
                idxlist = range(len(in_hist))
                self._const_inputs = dict(zip(idxlist, in_hist))
                for i in idxlist:
                    val = in_hist[i]
                    for case in range(1, len(tcases)):
                        if val != tcases[case][i]:
                            del self._const_inputs[i]
                            break

                if len(self._const_inputs) == len(in_hist):
                    self.raise_exception(
                        "ERROR: all training inputs are constant.")
                elif len(self._const_inputs) > 0:
                    # some inputs are constant, so we have to remove them from the training set
                    training_input_history = []
                    for inputs in self._training_input_history:
                        training_input_history.append([
                            val for i, val in enumerate(inputs)
                            if i not in self._const_inputs
                        ])
                else:
                    training_input_history = self._training_input_history
                for name, output_history in self._training_data.items():
                    surrogate = self._get_surrogate(name)
                    if surrogate is not None:
                        surrogate.train(training_input_history, output_history)

                self._new_train_data = False

            inputs = []
            for i, name in enumerate(self.surrogate_input_names()):
                val = getattr(self, name)
                cval = self._const_inputs.get(i, _missing)
                if cval is _missing:
                    inputs.append(val)

                elif val != cval:
                    self.raise_exception(
                        "ERROR: training input '%s' was a constant value of (%s) but the value has changed to (%s)."
                        % (name, cval, val), ValueError)

            for name in self._training_data:
                surrogate = self._get_surrogate(name)
                # copy output to boundary
                if surrogate is None:
                    setattr(self, name, getattr(
                        self.model,
                        name))  # no surrogate. use outputs from model
                else:
                    setattr(self, name, surrogate.predict(inputs))