Exemple #1
0
    def replace(self, target_name, newobj):
        """Replace one object with another, attempting to mimic the inputs and connections
        of the replaced object as much as possible.
        """
        tobj = getattr(self, target_name)

        # Save existing driver references.
        refs = {}
        if has_interface(tobj, IComponent):
            for obj in self.__dict__.values():
                if obj is not tobj and is_instance(obj, Driver):
                    refs[obj] = obj.get_references(target_name)

        if has_interface(
                newobj, IComponent
        ):  # remove any existing connections to replacement object
            self.disconnect(newobj.name)
        if hasattr(newobj, 'mimic'):
            try:
                newobj.mimic(
                    tobj)  # this should copy inputs, delegates and set name
            except Exception:
                self.reraise_exception(
                    "Couldn't replace '%s' of type %s with type %s" %
                    (target_name, type(tobj).__name__, type(newobj).__name__))
        conns = self.find_referring_connections(target_name)
        wflows = self.find_in_workflows(target_name)
        target_rgx = re.compile(r'(\W?)%s.' % target_name)
        conns.extend([(u, v)
                      for u, v in self._depgraph.list_autopassthroughs()
                      if re.search(target_rgx, u) is not None
                      or re.search(target_rgx, v) is not None])

        self.add(
            target_name, newobj
        )  # this will remove the old object (and any connections to it)

        # recreate old connections
        for u, v in conns:
            self.connect(u, v)

        # add new object (if it's a Component) to any workflows where target was
        if has_interface(newobj, IComponent):
            for wflow, idx in wflows:
                wflow.add(target_name, idx)

        # Restore driver references.
        if refs:
            for obj in self.__dict__.values():
                if obj is not newobj and is_instance(obj, Driver):
                    obj.restore_references(refs[obj], target_name)

        # Workflows need a reference to their new parent driver
        if is_instance(newobj, Driver):
            newobj.workflow._parent = newobj
    def replace(self, target_name, newobj):
        """Replace one object with another, attempting to mimic the inputs and connections
        of the replaced object as much as possible.
        """
        tobj = getattr(self, target_name)

        # Save existing driver references.
        refs = {}
        if has_interface(tobj, IComponent):
            for obj in self.__dict__.values():
                if obj is not tobj and is_instance(obj, Driver):
                    refs[obj] = obj.get_references(target_name)

        if has_interface(newobj, IComponent):  # remove any existing connections to replacement object
            self.disconnect(newobj.name)
        if hasattr(newobj, "mimic"):
            try:
                newobj.mimic(tobj)  # this should copy inputs, delegates and set name
            except Exception:
                self.reraise_exception(
                    "Couldn't replace '%s' of type %s with type %s"
                    % (target_name, type(tobj).__name__, type(newobj).__name__)
                )
        conns = self.find_referring_connections(target_name)
        wflows = self.find_in_workflows(target_name)
        target_rgx = re.compile(r"(\W?)%s." % target_name)
        conns.extend(
            [
                (u, v)
                for u, v in self._depgraph.list_autopassthroughs()
                if re.search(target_rgx, u) is not None or re.search(target_rgx, v) is not None
            ]
        )

        self.add(target_name, newobj)  # this will remove the old object (and any connections to it)

        # recreate old connections
        for u, v in conns:
            self.connect(u, v)

        # add new object (if it's a Component) to any workflows where target was
        if has_interface(newobj, IComponent):
            for wflow, idx in wflows:
                wflow.add(target_name, idx)

        # Restore driver references.
        if refs:
            for obj in self.__dict__.values():
                if obj is not newobj and is_instance(obj, Driver):
                    obj.restore_references(refs[obj], target_name)
def _find_common_interface(obj1, obj2):
    for iface in (
        IAssembly,
        IComponent,
        IDriver,
        IArchitecture,
        IContainer,
        ICaseIterator,
        ICaseRecorder,
        IDOEgenerator,
    ):
        if has_interface(obj1, iface) and has_interface(obj2, iface):
            return iface
    return None
    def _model_changed(self, oldmodel, newmodel):
        """called whenever the model variable is set or when includes/excludes change."""
        # TODO: check for pre-connected traits on the new model
        # TODO: disconnect traits corresponding to old model (or leave them if the new model has the same ones?)
        # TODO: check for nested MMs?  Is this a problem?
        # TODO: check for name collisions between MetaModel class traits and traits from model

        if newmodel is not None and not has_interface(newmodel, IComponent):
            self.raise_exception('model of type %s does not implement the'
                                 ' IComponent interface' % type(newmodel).__name__,
                                 TypeError)

        self.reset_training_data = True

        if newmodel:
            if not check_model_only_one_level_vartree(newmodel):
                self.raise_exception('metamodels currently do not support multi'
                                     ' level vartrees', TypeError)

        self._update_surrogate_list()

        if newmodel:
            newmodel.parent = self
            newmodel.name = 'model'

        self.config_changed()
    def compute_ordering(self, cgraph):
        """Given a component graph, each driver can determine its iteration
        set and the ordering of its workflow.
        """
        cgraph = cgraph.subgraph(self._full_iter_set)

        # call compute_ordering on all subdrivers
        for name in self._iter_set:
            obj = getattr(self.parent, name)
            if has_interface(obj, IDriver):
                obj.compute_ordering(cgraph)

        self._collapse_subdrivers(cgraph)

        # now figure out the order of our iter_set
        self._ordering = self.workflow._explicit_names + \
                         [n for n in self._iter_set
                           if n not in self.workflow._explicit_names]

        # remove any nodes that got collapsed into subdrivers
        self._ordering = [n for n in self._ordering if n in cgraph]

        self._ordering = gsort(cgraph, self._ordering)

        self.workflow._ordering = self._ordering
    def _get_collapsed_graph(self):
        """Get a dependency graph with only our workflow components
        in it, with additional edges added to it from sub-workflows
        of any Driver components in our workflow, and from any ExprEvaluators
        in any components in our workflow.
        """
        if self._collapsed_graph:
            return self._collapsed_graph

        scope = self.scope
        graph = scope._depgraph

        # find all of the incoming and outgoing edges to/from all of the
        # components in each driver's iteration set so we can add edges to/from
        # the driver in our collapsed graph
        comps = self.get_components(full=True)
        cnames = set([c.name for c in comps])
        graph_with_subs = graph.component_graph()
        collapsed_graph = graph_with_subs.subgraph(cnames)
        for comp in comps:
            if has_interface(comp, IDriver):
                comp._collapse_subdrivers(collapsed_graph)

        fnames = [n for n in self._fullnames if n in collapsed_graph]
        self._fullnames = gsort(collapsed_graph, fnames)

        self._collapsed_graph = collapsed_graph

        return self._collapsed_graph
    def compute_ordering(self, cgraph):
        """Given a component graph, each driver can determine its iteration
        set and the ordering of its workflow.
        """
        cgraph = cgraph.subgraph(self._full_iter_set)

        # call compute_ordering on all subdrivers
        for name in self._iter_set:
            obj = getattr(self.parent, name)
            if has_interface(obj, IDriver):
                obj.compute_ordering(cgraph)

        self._collapse_subdrivers(cgraph)

        # now figure out the order of our iter_set
        self._ordering = self.workflow._explicit_names + \
                         [n for n in self._iter_set
                           if n not in self.workflow._explicit_names]

        # remove any nodes that got collapsed into subdrivers
        self._ordering = [n for n in self._ordering if n in cgraph]

        self._ordering = gsort(cgraph, self._ordering)

        self.workflow._ordering = self._ordering
    def _add_var_for_surrogate(self, surrogate, varname):
        """Different surrogates have different types of output values, so create
        the appropriate type of output Variable based on the return value
        of get_uncertain_value on the surrogate.
        """

        val = surrogate.get_uncertain_value(self.model.get(varname))
        if has_interface(val, IUncertainVariable):
            ttype = UncertainDistVar
        elif isinstance(val, int_types):
            ttype = Int
        elif isinstance(val, real_types):
            ttype = Float
        else:
            self.raise_exception("value type of '%s' is not a supported"
                                 " surrogate return value" %
                                 val.__class__.__name__)

        if "." not in varname:  # non vartree variable
            self.add(varname, ttype(default_value=val, iotype='out',
                                    desc=self.model.trait(varname).desc,
                                    units=self.model.trait(varname).units))
            setattr(self, varname, val)

        else:  # vartree sub variable
            vartreename, subvarname = varname.split(".")

            metamodel_vartree = self.get(vartreename)
            model_vartree_node = self.model.get(vartreename)
            metamodel_vartree.add(subvarname, ttype(default_value=val, iotype='out',
                                  desc=model_vartree_node.trait(subvarname).desc,
                                  units=model_vartree_node.trait(subvarname).units))
            setattr(metamodel_vartree, subvarname, val)

        return
    def __init__(self, klass = object, allow_none = True, factory = None, 
                 args = None, kw = None, **metadata):
        try:
            iszopeiface = issubclass(klass, zope.interface.Interface)
        except TypeError:
            iszopeiface = False
        
        metadata.setdefault( 'copy', 'deep' )

        self._allow_none = allow_none
        self.klass = klass
        default_value = None
        
        if has_interface(klass, IContainer) or (isclass(klass) and IContainer.implementedBy(klass)):
            self._is_container = True
        else:
            self._is_container = False

        if iszopeiface:
            self._instance = None
            self.factory = factory
            self.args = args
            self.kw = kw
        else:
            self._instance = Instance(klass=klass, allow_none=allow_none, 
                                      factory=factory, args=args, kw=kw,
                                      **metadata)
            default_value = self._instance.default_value
        super(Slot, self).__init__(default_value, **metadata)
Exemple #10
0
    def add_subscriber(self, pathname, publish):
        ''' Publish the specified topic.
        '''
        if pathname in [
                '', 'components', 'files', 'types', 'console_errors',
                'file_errors'
        ]:
            # these topics are published automatically
            return
        elif pathname == 'log_msgs':
            if publish:
                self._start_log_msgs(pathname)
            else:
                self._stop_log_msgs()
        else:
            parts = pathname.split('.')
            if len(parts) > 1:
                root = self.proj.get(parts[0])
                if root:
                    rest = '.'.join(parts[1:])
                    root.register_published_vars(rest, publish)

            cont, root = self.get_container(pathname)
            if has_interface(cont, IComponent):
                if publish:
                    if pathname in self._publish_comps:
                        self._publish_comps[pathname] += 1
                    else:
                        self._publish_comps[pathname] = 1
                else:
                    if pathname in self._publish_comps:
                        self._publish_comps[pathname] -= 1
                        if self._publish_comps[pathname] < 1:
                            del self._publish_comps[pathname]
Exemple #11
0
    def set_state(self, X):
        """Take the given state vector and set its values into the
        correct state variables.
        """
        unused = len(X)
        idx = 0

        for name in self.list_states():
            val = getattr(self, name)
            flatval = flattened_value(name, val)
            size = len(flatval)
            newval = X[idx:idx + size]
            unused -= size
            idx += size
            try:
                iter(val)
            except TypeError:
                if has_interface(val, IVariableTree):
                    msg = "VariableTree states are not supported yet."
                    raise RuntimeError(msg)
                else:
                    if len(newval) != 1:
                        self.raise_exception(
                            "Trying to set a scalar value '%s' with a ")
                    setattr(self, name, newval[0])
            else:
                setattr(self, name, newval.copy())

        if unused != 0:
            msg = "State vector size does not match flattened size of state variables."
            self.raise_exception(msg, ValueError)
    def _update_var_for_surrogate(self, surrogate, varname):
        """Different surrogates have different types of output values, so create
        the appropriate type of output Variable based on the return value
        of get_uncertain_value on the surrogate.

        Presently, this just adds the UncertainVariable for Kriging
        """

        # TODO - ISurrogate should have a get_datatype or get_uncertainty_type
        val = surrogate.get_uncertain_value(1.0)

        if has_interface(val, IUncertainVariable):
            ttype = UncertainDistVar
        #elif isinstance(val, int_types):
        #    ttype = Int
        elif isinstance(val, real_types):
            ttype = Float
        else:
            self.raise_exception("value type of '%s' is not a supported"
                                 " surrogate return value" %
                                 val.__class__.__name__)

        self.add(
            varname,
            ttype(default_value=val,
                  iotype='out',
                  desc=self.trait(varname).desc))
        setattr(self, varname, val)

        return
    def _add_var_for_surrogate(self, surrogate, varname):
        """Different surrogates have different types of output values, so create
        the appropriate type of output Variable based on the return value
        of get_uncertain_value on the surrogate.
        """

        val = surrogate.get_uncertain_value(self.model.get(varname))
        if has_interface(val, IUncertainVariable):
            ttype = UncertainDistVar
        elif isinstance(val, int_types):
            ttype = Int
        elif isinstance(val, real_types):
            ttype = Float
        else:
            self.raise_exception("value type of '%s' is not a supported"
                                 " surrogate return value" %
                                 val.__class__.__name__)

        if "." not in varname:  # non vartree variable
            self.add(varname, ttype(default_value=val, iotype='out',
                                    desc=self.model.trait(varname).desc,
                                    units=self.model.trait(varname).units))
            setattr(self, varname, val)

        else:  # vartree sub variable
            vartreename, subvarname = varname.split(".")

            metamodel_vartree = self.get(vartreename)
            model_vartree_node = self.model.get(vartreename)
            metamodel_vartree.add(subvarname, ttype(default_value=val, iotype='out',
                                  desc=model_vartree_node.trait(subvarname).desc,
                                  units=model_vartree_node.trait(subvarname).units))
            setattr(metamodel_vartree, subvarname, val)

        return
 def validate(self, obj, name, value):
     """ Use the Enthought trait's validate.
     """
     if not has_interface(value, IUncertainVariable):
         raise ValueError("'%s' does not implement the IUncertainVariable interface" %
                          name)
     return value
    def get_names(self, full=False):
        """Return a list of component names in this workflow.
        If full is True, include hidden pseudo-components in the list.
        """
        if self._names is None:
            comps = [getattr(self.scope, n) for n in self._explicit_names]
            drivers = [c for c in comps if has_interface(c, IDriver)]
            self._names = self._explicit_names[:]

            if len(drivers) == len(comps):  # all comps are drivers
                iterset = set()
                for driver in drivers:
                    iterset.update([c.name for c in driver.iteration_set()])
                added = set([
                    n for n in self._parent._get_required_compnames()
                    if n not in iterset
                ]) - set(self._names)
                self._names.extend(added)

            self._fullnames = self._names[:]
            fullset = set(self._parent.list_pseudocomps())
            fullset.update(
                find_related_pseudos(self.scope._depgraph.component_graph(),
                                     self._names))
            self._fullnames.extend(fullset - set(self._names))

        if full:
            return self._fullnames[:]
        else:
            return self._names[:]
Exemple #16
0
 def get_dataflow(self, pathname):
     ''' Get the structure of the specified assembly or of the global
         namespace if no pathname is specified; consists of the list of
         components and the connections between them (i.e., the dataflow).
     '''
     dataflow = {}
     if pathname and len(pathname) > 0:
         try:
             asm, root = self.get_object(pathname)
             if has_interface(asm, IAssembly):
                 dataflow = asm.get_dataflow()
         except Exception as err:
             self._error(err, sys.exc_info())
     else:
         components = []
         for k, v in self.proj.items():
             if is_instance(v, Component):
                 inames = [
                     cls.__name__
                     for cls in list(implementedBy(v.__class__))
                 ]
                 components.append({
                     'name': k,
                     'pathname': k,
                     'type': type(v).__name__,
                     'interfaces': inames,
                     'python_id': id(v)
                 })
         dataflow['components'] = components
         dataflow['connections'] = []
         dataflow['parameters'] = []
         dataflow['constraints'] = []
         dataflow['objectives'] = []
         dataflow['responses'] = []
     return json.dumps(dataflow, default=json_default)
    def _model_changed(self, oldmodel, newmodel):
        """called whenever the model variable is set or when includes/excludes change."""
        # TODO: check for pre-connected traits on the new model
        # TODO: disconnect traits corresponding to old model (or leave them if the new model has the same ones?)
        # TODO: check for nested MMs?  Is this a problem?
        # TODO: check for name collisions between MetaModel class traits and traits from model

        if newmodel is not None and not has_interface(newmodel, IComponent):
            self.raise_exception('model of type %s does not implement the'
                                 ' IComponent interface' % type(newmodel).__name__,
                                 TypeError)

        self.reset_training_data = True

        if newmodel:

            # We can metamodel assemblies too.
            if newmodel._call_configure:
                newmodel.configure()
                newmodel._call_configure = False

            if not check_model_only_one_level_vartree(newmodel):
                self.raise_exception('metamodels currently do not support multi'
                                     ' level vartrees', TypeError)

        self._update_surrogate_list()

        if newmodel:
            newmodel.parent = self
            newmodel.name = 'model'

        self.config_changed()
    def add_subscriber(self, pathname, publish):
        ''' Publish the specified topic.
        '''
        if pathname in ['', 'components', 'files', 'types',
                        'console_errors', 'file_errors']:
            # these topics are published automatically
            return
        elif pathname == 'log_msgs':
            if publish:
                self._start_log_msgs(pathname)
            else:
                self._stop_log_msgs()
        else:
            parts = pathname.split('.')
            if len(parts) > 1:
                root = self.proj.get(parts[0])
                if root:
                    rest = '.'.join(parts[1:])
                    root.register_published_vars(rest, publish)

            cont, root = self.get_container(pathname)
            if has_interface(cont, IComponent):
                if publish:
                    if pathname in self._publish_comps:
                        self._publish_comps[pathname] += 1
                    else:
                        self._publish_comps[pathname] = 1
                else:
                    if pathname in self._publish_comps:
                        self._publish_comps[pathname] -= 1
                        if self._publish_comps[pathname] < 1:
                            del self._publish_comps[pathname]
    def _update_var_for_surrogate(self, surrogate, varname):
        """Different surrogates have different types of output values, so create
        the appropriate type of output Variable based on the return value
        of get_uncertain_value on the surrogate.

        Presently, this just adds the UncertainVariable for Kriging
        """

        # TODO - ISurrogate should have a get_datatype or get_uncertainty_type
        val = surrogate.get_uncertain_value(1.0)

        if has_interface(val, IUncertainVariable):
            ttype = UncertainDistVar
        #elif isinstance(val, int_types):
        #    ttype = Int
        elif isinstance(val, real_types):
            ttype = Float
        else:
            self.raise_exception("value type of '%s' is not a supported"
                                 " surrogate return value" %
                                 val.__class__.__name__)

        self.add(varname, ttype(default_value=val, iotype='out',
                                desc=self.trait(varname).desc))
        setattr(self, varname, val)

        return
 def get_dataflow(self, pathname):
     ''' Get the structure of the specified assembly or of the global
         namespace if no pathname is specified; consists of the list of
         components and the connections between them (i.e., the dataflow).
     '''
     dataflow = {}
     if pathname and len(pathname) > 0:
         try:
             asm, root = self.get_object(pathname)
             if has_interface(asm, IAssembly):
                 dataflow = asm.get_dataflow()
         except Exception as err:
             self._error(err, sys.exc_info())
     else:
         components = []
         for k, v in self.proj.items():
             if is_instance(v, Component):
                 inames = [cls.__name__
                           for cls in list(implementedBy(v.__class__))]
                 components.append({
                     'name': k,
                     'pathname': k,
                     'type': type(v).__name__,
                     'interfaces': inames,
                     'python_id': id(v)
                 })
         dataflow['components']  = components
         dataflow['connections'] = []
         dataflow['parameters']  = []
         dataflow['constraints'] = []
         dataflow['objectives']  = []
         dataflow['responses']   = []
     return json.dumps(dataflow, default=json_default)
    def get_names(self, full=False):
        """Return a list of component names in this workflow.
        If full is True, include hidden pseudo-components in the list.
        """
        if self._names is None:
            comps = [getattr(self.scope, n) for n in self._explicit_names]
            drivers = [c for c in comps if has_interface(c, IDriver)]
            self._names = self._explicit_names[:]

            if len(drivers) == len(comps): # all comps are drivers
                iterset = set()
                for driver in drivers:
                    iterset.update([c.name for c in driver.iteration_set()])
                added = set([n for n in self._parent._get_required_compnames()
                                if n not in iterset]) - set(self._names)
                self._names.extend(added)

            self._fullnames = self._names[:]
            fullset = set(self._parent.list_pseudocomps())
            fullset.update(find_related_pseudos(self.scope._depgraph.component_graph(),
                                                self._names))
            self._fullnames.extend(fullset - set(self._names))

        if full:
            return self._fullnames[:]
        else:
            return self._names[:]
    def calc_derivatives(self, first=False, second=False, savebase=True, required_inputs=None, required_outputs=None):
        """Calculate the derivatives for this non-differentiable block using
        Finite Difference."""
        # We don't do this in __init__ because some inputs and outputs
        # are added after creation (for nested driver support).
        if self.fd is None:
            from openmdao.main.derivatives import FiniteDifference

            self.fd = FiniteDifference(self)

        if hasattr(self.wflow, "_severed_edges"):
            self.wflow.sever_edges(self.wflow._severed_edges)

        try:
            # First, linearize about operating point.
            # Note: Only needed for differentiable islands, which are handled
            # with Fake Finite Difference.
            # Don't do this for full-model finite difference.
            if first and self.ffd_order > 0:
                for name in self.comps:
                    comp = self.wflow.scope.get(name)

                    # Comp list contains full graph, so don't double up on
                    # the subdrivers.
                    if not has_interface(comp, IDriver) and name not in self.wflow._J_cache:
                        comp.calc_derivatives(first, second, True)

            self.J = self.fd.calculate()
        finally:
            if hasattr(self.wflow, "_severed_edges"):
                self.wflow.unsever_edges()

        return self.J
    def _model_changed(self, oldmodel, newmodel):
        """called whenever the model variable is set or when includes/excludes change."""
        # TODO: check for pre-connected traits on the new model
        # TODO: disconnect traits corresponding to old model (or leave them if the new model has the same ones?)
        # TODO: check for nested MMs?  Is this a problem?
        # TODO: check for name collisions between MetaModel class traits and traits from model
        if newmodel is not None and not has_interface(newmodel, IComponent):
            self.raise_exception('model of type %s does not implement the IComponent interface' % type(newmodel).__name__,
                                 TypeError)

        self.reset_training_data = True

        self._update_surrogate_list()
        
        if self.default_surrogate is None:
            no_sur = []
            for name in self.surrogate_output_names():
                if getattr(self, __surrogate_prefix__+name, None) is None:
                    no_sur.append(name)
            if len(no_sur) > 0 and len(no_sur) != len(self._surrogate_output_names):
                self.raise_exception("No default surrogate model is defined and the following outputs do not have a surrogate model: %s. Either specify default_surrogate, or specify a surrogate model for all outputs." %
                                     no_sur, RuntimeError)
        
        if newmodel:
            newmodel.parent = self
            newmodel.name = 'model'
Exemple #24
0
    def _model_changed(self, oldmodel, newmodel):
        """called whenever the model variable is set or when includes/excludes change."""
        # TODO: check for pre-connected traits on the new model
        # TODO: disconnect traits corresponding to old model (or leave them if the new model has the same ones?)
        # TODO: check for nested MMs?  Is this a problem?
        # TODO: check for name collisions between MetaModel class traits and traits from model
        if newmodel is not None and not has_interface(newmodel, IComponent):
            self.raise_exception(
                'model of type %s does not implement the IComponent interface'
                % type(newmodel).__name__, TypeError)

        new_model_traitnames = set()
        self.reset_training_data = True

        self._update_surrogate_list()

        if self.default_surrogate is None:
            no_sur = []
            for name in self.surrogate_output_names():
                if getattr(self, __surrogate_prefix__ + name, None) is None:
                    no_sur.append(name)
            if len(no_sur) > 0 and len(no_sur) != len(
                    self._surrogate_output_names):
                self.raise_exception(
                    "No default surrogate model is defined and the following outputs do not have a surrogate model: %s. Either specify default_surrogate, or specify a surrogate model for all outputs."
                    % no_sur, RuntimeError)

        if newmodel:
            newmodel.parent = self
            newmodel.name = 'model'
    def set_state(self, X):
        """Take the given state vector and set its values into the
        correct state variables.
        """
        unused = len(X)
        idx = 0

        for name in self.list_states():
            val = getattr(self, name)
            flatval = flattened_value(name, val)
            size = len(flatval)
            newval = X[idx:idx+size]
            unused -= size
            idx += size
            try:
                iter(val)
            except TypeError:
                if has_interface(val, IVariableTree):
                    raise RuntimeError("VariableTree states are not supported yet.")
                else:
                    if len(newval) != 1:
                        self.raise_exception("Trying to set a scalar value '%s' with a ")
                    setattr(self, name, newval[0])
            else:
                setattr(self, name, flatval)

        if unused != 0:
            self.raise_exception("State vector size does not match flattened size of state variables.",
                                 ValueError)
    def iteration_set(self, solver_only=False):
        """Return a set of all Components in our workflow and
        recursively in any workflow in any Driver in our workflow.

        solver_only: Bool
            Only recurse into solver drivers. These are the only kinds
            of drivers whose derivatives get absorbed into the parent
            driver's graph.
        """
        allcomps = set()
        for child in self.workflow.get_components(full=True):
            allcomps.add(child)
            if has_interface(child, IDriver):
                if solver_only and not has_interface(child, ISolver):
                    continue
                allcomps.update(child.iteration_set())
        return allcomps
    def iteration_set(self, solver_only=False):
        """Return a set of all Components in our workflow and
        recursively in any workflow in any Driver in our workflow.

        solver_only: Bool
            Only recurse into solver drivers. These are the only kinds
            of drivers whose derivatives get absorbed into the parent
            driver's graph.
        """
        allcomps = set()
        for child in self.workflow:
            allcomps.add(child)
            if has_interface(child, IDriver):
                if solver_only and not has_interface(child, ISolver):
                    continue
                allcomps.update(child.iteration_set())
        return allcomps
    def update_model(self, oldmodel, newmodel):
        """called whenever the model variable is set."""
        # TODO: check for pre-connected traits on the new model
        # TODO: disconnect traits corresponding to old model (or leave them if the new model has the same ones?)
        # TODO: check for nested MMs?  Is this a problem?
        # TODO: check for name collisions between MetaModel class traits and traits from model
        if newmodel is not None and not has_interface(newmodel, IComponent):
            self.raise_exception('model of type %s does not implement the IComponent interface' % type(newmodel).__name__,
                                 TypeError)

        if not self.surrogate:
            self.raise_exception("surrogate must be set before the model or any includes/excludes of variables", RuntimeError)

        new_model_traitnames = set()
        self._surrogate_input_names = []
        self._taining_input_history = []
        self._surrogate_info = {}
        
        # remove traits promoted from the old model
        for name in self._current_model_traitnames:
            if self.parent:
                self.parent.disconnect('.'.join([self.name,name]))
            self.remove_trait(name)
            
        if newmodel:
            # query for inputs
            traitdict = newmodel._alltraits(iotype='in')
            for name,trait in traitdict.items():
                if self._eligible(name):
                    self._surrogate_input_names.append(name)
                if name not in self._mm_class_traitnames:
                    self.add_trait(name, _clone_trait(trait))
                    new_model_traitnames.add(name)
                    setattr(self, name, getattr(newmodel, name))
                
            # now outputs
            traitdict = newmodel._alltraits(iotype='out')
            
            for name,trait in traitdict.items():
                if self._eligible(name):
                    try: 
                        surrogate = self.surrogate[name]
                    except KeyError: 
                        try: 
                            surrogate = self.surrogate['default']
                        except KeyError: 
                            self.raise_exception("No default surrogate model was" 
                            " specified. Either specify a default, or specify a "
                            "surrogate model for all outputs",ValueError)
                    trait_type = surrogate.get_uncertain_value(1.0).__class__()
                    self.add(name, Slot(trait_type, iotype='out', desc=trait.desc))
                    self._surrogate_info[name] = (surrogate.__class__(), []) # (surrogate,output_history)
                    new_model_traitnames.add(name)
                    setattr(self, name, surrogate.get_uncertain_value(getattr(newmodel,name)))
            newmodel.parent = self
            newmodel.name = 'model'
        
        self._current_model_traitnames = new_model_traitnames
Exemple #29
0
 def check_config(self):
     """Any checks we need. For now, drivers are not allowed. You can get
     around this by placing them in an assembly."""         
     
     for comp in self.get_components():
         if has_interface(comp, IDriver):
             msg = 'Subdriver not supported in a cyclicflow. Please ' \
                   'place it in a subassembly.'
             self.scope.raise_exception(msg, RuntimeError)
 def _update_workflows(self):
     ''' Call :meth:`_update_workflow` on drivers to capture any workflow
         updates now rather than waiting until they are run.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             for driver in [obj for name, obj in v.items(recurse=True)
                            if is_instance(obj, Driver)]:
                 driver._update_workflow()
Exemple #31
0
    def check_config(self):
        """Any checks we need. For now, drivers are not allowed. You can get
        around this by placing them in an assembly."""

        for comp in self.get_components():
            if has_interface(comp, IDriver):
                msg = 'Subdriver not supported in a cyclicflow. Please ' \
                      'place it in a subassembly.'
                self.scope.raise_exception(msg, RuntimeError)
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and are set as top.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             if v.name != k:
                 v.name = k
             if v._call_cpath_updated:
                 set_as_top(v)
Exemple #33
0
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and are set as top.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             if v.name != k:
                 v.name = k
             if v._call_cpath_updated:
                 set_as_top(v)
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and that all root assemblies are set as top
     '''
     g = self.proj.__dict__.items()
     for k,v in g:
         if has_interface(v,IContainer):
             if v.name != k:
                 v.name = k
         if is_instance(v,Assembly):
             set_as_top(v)
Exemple #35
0
 def iteration_set(self):
     """Return a set of all Components in our workflow(s) and
     recursively in any workflow in any Driver in our workflow(s).
     """
     allcomps = set()
     if len(self.workflow) == 0:
         for compname in self._get_required_compnames():
             self.workflow.add(compname)
     for child in self.workflow.get_components():
         allcomps.add(child)
         if has_interface(child, IDriver):
             allcomps.update(child.iteration_set())
     return allcomps
Exemple #36
0
    def remove(self, name):
        """Remove the named container object from this assembly and remove
        it from its workflow(s) if it's a Component."""
        cont = getattr(self, name)
        self.disconnect(name)
        self._depgraph.remove(name)
        self._exprmapper.remove(name)
        if has_interface(cont, IComponent):
            for obj in self.__dict__.values():
                if obj is not cont and is_instance(obj, Driver):
                    obj.workflow.remove(name)

        return super(Assembly, self).remove(name)
    def subdrivers(self, recurse=False):
        """Returns a generator of all subdrivers
        contained in this driver's workflow.  If recurse is True,
        include all subdrivers in our entire iteration set.
        """
        if recurse:
            itercomps = self.iteration_set()
        else:
            itercomps = list(self.workflow)

        for comp in itercomps:
            if has_interface(comp, IDriver):
                yield comp
    def subdrivers(self, recurse=False):
        """Returns a generator of all subdrivers
        contained in this driver's workflow.  If recurse is True,
        include all subdrivers in our entire iteration set.
        """
        if recurse:
            itercomps = self.iteration_set()
        else:
            itercomps = list([getattr(self.parent,n) for n in self._iter_set])

        for comp in itercomps:
            if has_interface(comp, IDriver):
                yield comp
 def iteration_set(self):
     """Return a set of all Components in our workflow(s) and
     recursively in any workflow in any Driver in our workflow(s).
     """
     allcomps = set()
     if len(self.workflow) == 0:
         for compname in self._get_required_compnames():
             self.workflow.add(compname)
     for child in self.workflow.get_components():
         allcomps.add(child)
         if has_interface(child, IDriver):
             allcomps.update(child.iteration_set())
     return allcomps
Exemple #40
0
    def replace(self, target_name, newobj):
        """Replace one object with another, attempting to mimic the inputs and connections
        of the replaced object as much as possible.
        """
        tobj = getattr(self, target_name)
        if has_interface(
                newobj, IComponent
        ):  # remove any existing connections to replacement object
            self.disconnect(newobj.name)
        if hasattr(newobj, 'mimic'):
            try:
                newobj.mimic(
                    tobj)  # this should copy inputs, delegates and set name
            except Exception as err:
                self.raise_exception(
                    "Couldn't replace '%s' of type %s with type %s: %s" %
                    (target_name, type(tobj).__name__, type(newobj).__name__,
                     str(err)), TypeError)
        conns = self.find_referring_connections(target_name)
        wflows = self.find_in_workflows(target_name)
        target_rgx = re.compile(r'(\W?)%s.' % target_name)
        conns.extend([(u, v)
                      for u, v in self._depgraph.list_autopassthroughs()
                      if re.search(target_rgx, u) is not None
                      or re.search(target_rgx, v) is not None])

        self.add(
            target_name, newobj
        )  # this will remove the old object (and any connections to it)

        # recreate old connections
        for u, v in conns:
            self.connect(u, v)

        # add new object (if it's a Component) to any workflows where target was
        if has_interface(newobj, IComponent):
            for wflow, idx in wflows:
                wflow.add(target_name, idx)
Exemple #41
0
    def _edge_counter(self, scope, dscope, index, head=''):
        """Helper function to figure out which edges in the edge dicts are
        our unknowns. Called recursively for assy or driver scopes."""

        # Traverse the workflow
        self._find_edges(scope, dscope)
        scope_name = dscope.get_pathname()
        
        if head:
            head = '%s.' % head
            
        index_bar = 0 if index == 1 else 1
            
        # Number of unknowns = number of edges in our edge_dict
        for name, edges in self.edge_dicts[scope_name].iteritems():
            
            # For assemblies, we need to traverse their workflows too
            node = scope.parent.get(name)
            
            if isinstance(node, Assembly):

                # Assembly inputs are also counted as unknowns. This makes
                # recursion easier so that you don't have to query out of
                # scope.
                for input_name in edges[index_bar]:
                    input_full = "%s%s.%s" % (head, name, input_name)
                    self.var_list.append(input_full)
                    
                node_scope_name = node.get_pathname()
                self._edge_counter(node.driver, node.driver, index,
                                   node_scope_name)
                
            elif isinstance(node, Driver):
                
                if not has_interface(node, ISolver):
                    msg = "Only nested solvers are supported"
                    raise NotImplementedError(msg)
                
                node_scope_name = node.get_pathname()
                self._edge_counter(scope, node, index, head)
            
            # Save the names for all our unknowns
            for item in edges[index]:
                full_name = "%s%s.%s" % (head, name, item)
                
                # Parameter edges in adjoint mode are not added to the
                # unknowns
                if full_name not in self.param_names and \
                   full_name not in self.grouped_param_names:
                    self.var_list.append(full_name)
Exemple #42
0
    def remove(self, name):
        """Remove the named container object from this assembly and remove
        it from its workflow(s) if it's a Component."""
        cont = getattr(self, name)
        self.disconnect(name)
        self._exprmapper.remove(name)
        if has_interface(cont, IComponent):
            self._depgraph.remove(name)
            for obj in self.__dict__.values():
                if obj is not cont and is_instance(obj, Driver):
                    obj.workflow.remove(name)
                    obj.remove_references(name)

        return super(Assembly, self).remove(name)
    def _edge_counter(self, scope, dscope, index, head=''):
        """Helper function to figure out which edges in the edge dicts are
        our unknowns. Called recursively for assy or driver scopes."""

        # Traverse the workflow
        self._find_edges(scope, dscope)
        scope_name = dscope.get_pathname()

        if head:
            head = '%s.' % head

        index_bar = 0 if index == 1 else 1

        # Number of unknowns = number of edges in our edge_dict
        for name, edges in self.edge_dicts[scope_name].iteritems():

            # For assemblies, we need to traverse their workflows too
            node = scope.parent.get(name)

            if isinstance(node, Assembly):

                # Assembly inputs are also counted as unknowns. This makes
                # recursion easier so that you don't have to query out of
                # scope.
                for input_name in edges[index_bar]:
                    input_full = "%s%s.%s" % (head, name, input_name)
                    self.var_list.append(input_full)

                node_scope_name = node.get_pathname()
                self._edge_counter(node.driver, node.driver, index,
                                   node_scope_name)

            elif isinstance(node, Driver):

                if not has_interface(node, ISolver):
                    msg = "Only nested solvers are supported"
                    raise NotImplementedError(msg)

                node_scope_name = node.get_pathname()
                self._edge_counter(scope, node, index, head)

            # Save the names for all our unknowns
            for item in edges[index]:
                full_name = "%s%s.%s" % (head, name, item)

                # Parameter edges in adjoint mode are not added to the
                # unknowns
                if full_name not in self.param_names and \
                   full_name not in self.grouped_param_names:
                    self.var_list.append(full_name)
Exemple #44
0
    def __init__(self,
                 klass=object,
                 allow_none=True,
                 factory=None,
                 args=None,
                 kw=None,
                 **metadata):

        default_value = None
        try:
            iszopeiface = issubclass(klass, zope.interface.Interface)
        except TypeError:
            iszopeiface = False
            if not isclass(klass):
                default_value = klass
                klass = klass.__class__

        metadata.setdefault('copy', 'deep')

        self._allow_none = allow_none
        self.klass = klass

        if has_interface(klass, IContainer) or \
                (isclass(klass) and IContainer.implementedBy(klass)):
            self._is_container = True
        else:
            self._is_container = False

        if iszopeiface:
            self._instance = None
            self.factory = factory
            self.args = args
            self.kw = kw
        else:
            self._instance = traits.api.Instance(klass=klass,
                                                 allow_none=allow_none,
                                                 factory=factory,
                                                 args=args,
                                                 kw=kw,
                                                 **metadata)
            if default_value:
                self._instance.default_value = default_value
            else:
                default_value = self._instance.default_value

            if klass.__name__ == 'VariableTree':
                raise TypeError('Slotting of VariableTrees is not supported,'
                                ' please use VarTree instead')

        super(Base, self).__init__(default_value, **metadata)
    def ensure_init(self):
        """Make sure our inputs and outputs have been
        initialized.
        """
        # set the current value of the connected variable
        # into our input
        for ref, in_name in self._inmap.items():
            setattr(self, in_name, ExprEvaluator(ref).evaluate(self.parent))
            if has_interface(getattr(self, in_name), IContainer):
                getattr(self, in_name).name = in_name

        # set the initial value of the output
        outval = self._srcexpr.evaluate()
        setattr(self, 'out0', outval)
    def ensure_init(self):
        """Make sure our inputs and outputs have been
        initialized.
        """
        # set the current value of the connected variable
        # into our input
        for ref, in_name in self._inmap.items():
            setattr(self, in_name,
                    ExprEvaluator(ref).evaluate(self.parent))
            if has_interface(getattr(self, in_name), IContainer):
                getattr(self, in_name).name = in_name

        # set the initial value of the output
        outval = self._srcexpr.evaluate()
        setattr(self, 'out0', outval)
Exemple #47
0
    def test_brent_converge(self):

        a = set_as_top(Assembly())
        comp = a.add('comp', ExecComp(exprs=["f=a * x**n + b * x - c"]))
        comp.n = 77.0/27.0
        comp.a = 1.0
        comp.b = 1.0
        comp.c = 10.0

        driver = a.add('driver', Brent())
        driver.add_parameter('comp.x', 0, 100)
        driver.add_constraint('comp.f=0')

        a.run()

        assert_rel_error(self, a.comp.x, 2.06720359226, .0001)
        assert_rel_error(self, a.comp.f, 0, .0001)

        self.assertTrue(has_interface(driver, ISolver))
    def __init__(self, klass=object, allow_none=True, factory=None,
                 args=None, kw=None, **metadata):

        default_value = None
        try:
            iszopeiface = issubclass(klass, zope.interface.Interface)
        except TypeError:
            iszopeiface = False
            if not isclass(klass):
                default_value = klass
                klass = klass.__class__

        metadata.setdefault('copy', 'deep')

        self._allow_none = allow_none
        self.klass = klass

        if has_interface(klass, IContainer) or \
           (isclass(klass) and IContainer.implementedBy(klass)):
            self._is_container = True
        else:
            self._is_container = False

        if iszopeiface:
            self._instance = None
            self.factory = factory
            self.args = args
            self.kw = kw
        else:
            self._instance = traits.api.Instance(klass=klass, allow_none=allow_none,
                                      factory=factory, args=args, kw=kw,
                                      **metadata)
            if default_value:
                self._instance.default_value = default_value
            else:
                default_value = self._instance.default_value

            if klass.__name__ == 'VariableTree':
                raise TypeError('Slotting of VariableTrees is not supported,'
                                ' please use VarTree instead')

        super(Base, self).__init__(default_value, **metadata)
    def __init__(self, default_value=NoDefaultSpecified, **metadata):
        if 'vartypename' not in metadata:
            metadata['vartypename'] = self.__class__.__name__

        is_vt = False
        # force default value to a value that will always be different
        # than any value assigned to the variable so that the callback
        # will always fire the first time the variable is set.
        if metadata['vartypename'] != 'Slot' and metadata.get('required') == True:
            if default_value is not NoDefaultSpecified:
                is_vt = has_interface(default_value, IVariableTree)
                if not is_vt:
                    # set a marker in the metadata that we can check for later
                    # since we don't know the variable name yet and can't generate
                    # a good error message from here.
                    metadata['_illegal_default_'] = True

            if not is_vt:
                default_value = _missing
        super(Variable, self).__init__(default_value=default_value, **metadata)
    def __init__(self, default_value=NoDefaultSpecified, **metadata):
        if 'vartypename' not in metadata:
            metadata['vartypename'] = self.__class__.__name__

        is_vt = False
        # force default value to a value that will always be different
        # than any value assigned to the variable so that the callback
        # will always fire the first time the variable is set.
        if metadata['vartypename'] != 'Slot' and metadata.get(
                'required') == True:
            if default_value is not NoDefaultSpecified:
                is_vt = has_interface(default_value, IVariableTree)
                if not is_vt:
                    # set a marker in the metadata that we can check for later
                    # since we don't know the variable name yet and can't generate
                    # a good error message from here.
                    metadata['_illegal_default_'] = True

            if not is_vt:
                default_value = _missing
        super(Variable, self).__init__(default_value=default_value, **metadata)
    def get_names(self, full=False):
        """Return a list of component names in this workflow.
        If full is True, include hidden pseudo-components in the list.
        """
        if self._names is None:
            comps = [getattr(self.scope, n) for n in self._explicit_names]
            drivers = [c for c in comps if has_interface(c, IDriver)]
            self._names = self._explicit_names[:]
            self._iternames = self.parent._get_required_compnames()

            if len(drivers) == len(
                    comps):  # all comps are drivers or explicit set is empty
                iterset = set()
                for driver in drivers:
                    iterset.update([c.name for c in driver.iteration_set()])
                added = set([
                    n for n in self._iternames
                    if not n.startswith('_pseudo_') and n not in iterset
                ]) - set(self._names)
                self._names.extend(added)

            self._fullnames = self._names[:]
            fullset = set(self.parent.list_pseudocomps())
            fullset.update(
                find_related_pseudos(self.scope._depgraph, self._names))
            self._fullnames.extend(fullset - set(self._names))

            self._initnames = set(self._fullnames) - self._iternames

            # drivers are always manually placed in the workflow, so
            # assume that they're supposed to be there and don't
            # warn the user
            self._initnames -= set([d.name for d in drivers])

        if full:
            return self._fullnames[:]
        else:
            return self._names[:]
Exemple #52
0
    def _find_edges(self, scope, dscope):
        """ Finds the minimum set of edges for which we need derivatives.
        These edges contain the inputs and outputs that are in the workflow
        of the driver, and whose source or target component has derivatives
        defined. Note that we only need one set of edges for all params.
        
        For each scope, we store a dictionary keyed by the components in the
        workflow. Each component has a tuple that contains the input and
        output varpaths that are active in the derivative calculation.
        
        A separate driver scope (dscope) can also be specified for drivers
        that are not the top driver ('driver') in the assembly's workflow.
        
        """

        scope_name = dscope.get_pathname()

        self.edge_dicts[scope_name] = OrderedDict()
        edge_dict = self.edge_dicts[scope_name]

        # Find our minimum set of edges part 1
        # Inputs and Outputs from interior edges
        driver_set = dscope.workflow.get_names()
        interior_edges = scope._parent._depgraph.get_interior_edges(driver_set)

        needed_in_edges = set([b for a, b in interior_edges])
        needed_edge_exprs = set([a for a, b in interior_edges])

        # Output edges are expressions, so we need to find the referenced
        # variables

        needed_edges = []
        for edge in needed_edge_exprs:
            expr = scope._parent._exprmapper.get_expr(edge)
            needed_edges.append(expr.refs().pop())

        needed_edges = set(needed_edges)

        # Find our minimum set of edges part 2
        # Inputs connected to parameters
        needed_in_edges = needed_in_edges.union(set(self.param_names))
        needed_in_edges = needed_in_edges.union(set(self.grouped_param_names))

        # Find our minimum set of edges part 3
        # Outputs connected to objectives
        for _, expr in self._parent.get_objectives().iteritems():
            varpaths = expr.get_referenced_varpaths()
            needed_edges = needed_edges.union(varpaths)

        # Find our minimum set of edges part 4
        # Outputs connected to constraints
        # Note: constraints have a left and right hand side expression.
        for _, expr in self._parent.get_constraints().iteritems():
            for item in [expr.lhs, expr.rhs]:
                varpaths = item.get_referenced_varpaths()
                needed_edges = needed_edges.union(varpaths)

        # Find our minimum set of edges part 5
        # If we are collecting edges for a solver-type driver, then we need
        # to add in the independents and dependents
        if has_interface(dscope, ISolver):

            params = dscope.get_parameters().keys()

            deps = []
            indeps = []
            for expr, constr in dscope.get_eq_constraints().iteritems():

                item1 = constr.lhs.get_referenced_varpaths()
                item2 = constr.rhs.get_referenced_varpaths()
                comps = list(item1.union(item2))

                if comps[0] in params:
                    indep = comps[0]
                    dep = comps[1]
                elif comps[1] in params:
                    indep = comps[1]
                    dep = comps[0]
                else:
                    msg = "No independent in solver equation."
                    raise NotImplementedError(msg)

                deps.append(dep)
                indeps.append(indep)

            needed_edges = needed_edges.union(set(deps))
            needed_in_edges = needed_in_edges.union(set(indeps))

        # If we are at a deeper recursion level than the driver's assembly,
        # then we need to add the upscoped connected edges on the assembly
        # boundary
        if scope != self._parent:
            in_edges, out_edges = self._assembly_edges(scope)
            needed_edges = needed_edges.union(set(out_edges))
            needed_in_edges = needed_in_edges.union(set(in_edges))

        # Figure out any workflows for subblocks that need finite-differenced
        if scope_name not in self.dworkflow:
            self._divide_workflow(dscope)

        # Loop through each comp in the workflow and assemble our data
        # structures
        for node_name in self.dworkflow[scope_name]:

            # Finite-differenced blocks come in as lists.
            if isinstance(node_name, list):
                node_list = node_name
            else:
                node_list = [node_name]

                node = dscope.parent.get(node_name)

                # We don't handle nested drivers yet...
                # ... though the Analytic differentiator can handle solvers
                if isinstance(node, Driver):

                    # There are no connections on an ISolver
                    edge_dict[node_name] = ([], [])

                    continue

                # TODO: Still need to work this out for a subassembly with a
                # non-default driver.
                elif isinstance(node, Assembly):
                    if not isinstance(node.driver, Run_Once):
                        raise NotImplementedError("Nested drivers")

            # Components with derivatives, assemblies, and nested finite
            # difference blocks must provide all derivatives that are
            # needed.
            # NOTE - Derivatives in Functional form

            for item in node_list:

                needed_inputs = []
                for in_name in needed_in_edges:

                    parts = in_name.split(".")
                    if parts[0] == item:
                        var = ".".join(parts[1:])
                        needed_inputs.append(var)

                needed_outputs = []
                for out_name in needed_edges:

                    parts = out_name.split(".")
                    # Note: sometimes constraints and objectives are
                    # functions of component inputs. These should not
                    # be included in the edge dictionaries.
                    if parts[0] == item and dscope.parent.get_metadata(out_name, "iotype") == "out":

                        var = ".".join(parts[1:])
                        needed_outputs.append(var)

                edge_dict[item] = (needed_inputs, needed_outputs)

        # Cache our finite differentiator helper objects.
        if scope_name not in self.fdhelpers:
            self._cache_fd(dscope)
    def test_5_misc(self):
        logging.debug('')
        logging.debug('test_misc')

        factory = self.start_factory()

        # Try using a server after being released, server never used before.
        # This usually results in a "Can't connect" error, but sometimes gets a
        # "Can't send" error, based on timing/proxying.
        server = factory.create('')
        factory.release(server)
        msg1 = "Can't connect to server at"
        msg2 = "Can't send to server at"
        try:
            reply = server.echo('hello')
        except RuntimeError as exc:
            if str(exc)[:len(msg1)] != msg1 and str(exc)[:len(msg2)] != msg2:
                self.fail('Expected connect/send error, got %r' % exc)
        else:
            self.fail('Expected RuntimeError')

        # Try using a server after being released, server has been used before.
        # This usually results in a "Can't send" error, but sometimes gets a
        # "Can't connect" error, based on timing/proxying.
        server = factory.create('')
        reply = server.echo('hello')
        factory.release(server)
        msg1 = "Can't send to server at"
        msg2 = "Can't connect to server at"
        try:
            reply = server.echo('hello')
        except RuntimeError as exc:
            if str(exc)[:len(msg1)] != msg1 and str(exc)[:len(msg2)] != msg2:
                self.fail('Expected send/connect error, got %r' % exc)
        else:
            self.fail('Expected RuntimeError')

        # Try releasing a server twice. Depending on timing, this could
        # result in a ValueError trying to identify the server to release or
        # a RemoteError where the request can't be unpacked. The timing seems
        # to be sensitive to AF_INET/AF_UNIX connection type.
        server = factory.create('')
        factory.release(server)
        msg1 = "can't identify server "
        msg2 = "RuntimeError: Can't decrypt/unpack request." \
               " This could be the result of referring to a dead server."
        try:
            factory.release(server)
        except ValueError as exc:
            self.assertEqual(str(exc)[:len(msg1)], msg1)
        except RemoteError as exc:
            self.assertTrue(msg2 in str(exc))
        else:
            self.fail('Expected ValueError or RemoteError')

        # Check false return of has_interface().
        self.assertFalse(has_interface(factory, HasObjectives))

        # Try to connect to wrong port (assuming junk_port isn't being used!)
        address = socket.gethostname()
        junk_port = 12345
        assert_raises(self, 'connect(address, junk_port, pubkey=self.key)',
                      globals(), locals(), RuntimeError, "Can't connect to ")

        # Unpickleable argument.
        code = compile('3 + 4', '<string>', 'eval')
        assert_raises(self, 'factory.echo(code)', globals(), locals(),
                      cPickle.PicklingError, "Can't pickle <type 'code'>")

        # Server startup failure.
        assert_raises(self, 'self.start_factory(port=0, allowed_users={})',
                      globals(), locals(), RuntimeError,
                      'Server startup failed')
class TestCase(unittest.TestCase):
    """ Test distributed simulation. """
    def run(self, result=None):
        """
        Record the :class:`TestResult` used so we can conditionally cleanup
        directories in :meth:`tearDown`.
        """
        self.test_result = result or unittest.TestResult()
        return super(TestCase, self).run(self.test_result)

    def setUp(self):
        """ Called before each test. """
        self.n_errors = len(self.test_result.errors)
        self.n_failures = len(self.test_result.failures)

        self.factories = []
        self.servers = []
        self.server_dirs = []

        # Ensure we control directory cleanup.
        self.keepdirs = os.environ.get('OPENMDAO_KEEPDIRS', '0')
        os.environ['OPENMDAO_KEEPDIRS'] = '1'

    def start_factory(self, port=None, allowed_users=None):
        """ Start each factory process in a unique directory. """
        global _SERVER_ID
        _SERVER_ID += 1

        server_dir = 'Factory_%d' % _SERVER_ID
        if os.path.exists(server_dir):
            shutil.rmtree(server_dir)
        os.mkdir(server_dir)
        os.chdir(server_dir)
        self.server_dirs.append(server_dir)
        try:
            logging.debug('')
            logging.debug('tester pid: %s', os.getpid())
            logging.debug('starting server...')

            if port is None:
                # Exercise both AF_INET and AF_UNIX/AF_PIPE.
                port = -1 if _SERVER_ID & 1 else 0

            if allowed_users is None:
                credentials = get_credentials()
                allowed_users = {credentials.user: credentials.public_key}

            allowed_types = [
                'openmdao.main.test.test_distsim.HollowSphere',
                'openmdao.main.test.test_distsim.Box',
                'openmdao.main.test.test_distsim.ProtectedBox'
            ]

            server, server_cfg = start_server(port=port,
                                              allowed_users=allowed_users,
                                              allowed_types=allowed_types)
            self.servers.append(server)
            cfg = read_server_config(server_cfg)
            self.address = cfg['address']
            self.port = cfg['port']
            self.tunnel = cfg['tunnel']
            self.key = cfg['key']
            logging.debug('server pid: %s', server.pid)
            logging.debug('server address: %s', self.address)
            logging.debug('server port: %s', self.port)
            logging.debug('server key: %s', self.key)
        finally:
            os.chdir('..')

        factory = connect(self.address,
                          self.port,
                          self.tunnel,
                          pubkey=self.key)
        self.factories.append(factory)
        logging.debug('factory: %r', factory)
        return factory

    def tearDown(self):
        """ Shut down server process. """
        try:
            for factory in self.factories:
                factory.cleanup()
            for server in self.servers:
                logging.debug('terminating server pid %s', server.pid)
                server.terminate(timeout=10)

            # Cleanup only if there weren't any new errors or failures.
            if len(self.test_result.errors) == self.n_errors and \
               len(self.test_result.failures) == self.n_failures and \
               not int(self.keepdirs):
                for server_dir in self.server_dirs:
                    shutil.rmtree(server_dir)
        finally:
            os.environ['OPENMDAO_KEEPDIRS'] = self.keepdirs

    def test_1_client(self):
        logging.debug('')
        logging.debug('test_client')

        factory = self.start_factory()

        # List available types.
        types = factory.get_available_types()
        logging.debug('Available types:')
        for typname, version in types:
            logging.debug('   %s %s', typname, version)

        # First a HollowSphere, accessed via get()/set().
        obj = factory.create(_MODULE + '.HollowSphere')
        sphere_pid = obj.get('pid')
        self.assertNotEqual(sphere_pid, os.getpid())

        radius = obj.get('radius')
        self.assertEqual(radius, 1.)
        radius += 1
        obj.set('radius', radius)
        new_radius = obj.get('radius')
        self.assertEqual(new_radius, 2.)
        self.assertEqual(obj.get('inner_volume'), 0.)
        self.assertEqual(obj.get('volume'), 0.)
        self.assertEqual(obj.get('solid_volume'), 0.)
        self.assertEqual(obj.get('surface_area'), 0.)
        obj.run()
        assert_rel_error(self, obj.get('inner_volume'), 33.510321638, 0.000001)
        assert_rel_error(self, obj.get('volume'), 36.086951213, 0.000001)
        assert_rel_error(self, obj.get('solid_volume'), 2.5766295747, 0.000001)
        assert_rel_error(self, obj.get('surface_area'), 50.265482457, 0.000001)

        msg = ": Variable 'radius' must be a float in the range (0.0, "
        assert_raises(self, "obj.set('radius', -1)", globals(), locals(),
                      ValueError, msg)

        # Now a Box, accessed via attribute methods.
        obj = factory.create(_MODULE + '.Box')
        box_pid = obj.get('pid')
        self.assertNotEqual(box_pid, os.getpid())
        self.assertNotEqual(box_pid, sphere_pid)

        obj.width += 2
        obj.height += 2
        obj.depth += 2
        self.assertEqual(obj.width, 2.)
        self.assertEqual(obj.height, 2.)
        self.assertEqual(obj.depth, 2.)
        self.assertEqual(obj.volume, 0.)
        self.assertEqual(obj.surface_area, 0.)
        obj.run()
        self.assertEqual(obj.volume, 8.0)
        self.assertEqual(obj.surface_area, 24.0)

        try:
            obj.no_rbac()
        except RemoteError as exc:
            msg = "AttributeError: method 'no_rbac' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

    def test_2_model(self):
        logging.debug('')
        logging.debug('test_model')

        factory = self.start_factory()

        # Create model and run it.
        box = factory.create(_MODULE + '.Box')
        model = set_as_top(Model(box))
        model.run()

        # Check results.
        for width in range(1, 2):
            for height in range(1, 3):
                for depth in range(1, 4):
                    case = model.driver.recorders[0].cases.pop(0)
                    self.assertEqual(case.outputs[0][2],
                                     width * height * depth)

        self.assertTrue(is_instance(model.box.parent, Assembly))
        self.assertTrue(has_interface(model.box.parent, IComponent))

        # Upcall to use parent to resolve sibling.
        # At one time this caused proxy problems.
        source = model.box.parent.source
        self.assertEqual(source.width_in, 1.)

        # Proxy resolution.
        obj, path = get_closest_proxy(model, 'box.subcontainer.subvar')
        self.assertEqual(obj, model.box)
        self.assertEqual(path, 'subcontainer.subvar')

        obj, path = get_closest_proxy(model, 'source.subcontainer.subvar')
        self.assertEqual(obj, model.source.subcontainer)
        self.assertEqual(path, 'subvar')

        obj, path = get_closest_proxy(model.source.subcontainer, 'subvar')
        self.assertEqual(obj, model.source.subcontainer)
        self.assertEqual(path, 'subvar')

        # Observable proxied type.
        tmp = model.box.open_in_parent('tmp', 'w')
        tmp.close()
        os.remove('tmp')

        # Cause server-side errors we can see.

        try:
            box.cause_parent_error1()
        except RemoteError as exc:
            msg = "AttributeError: attribute 'no_such_variable' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

        try:
            box.cause_parent_error2()
        except RemoteError as exc:
            msg = "AttributeError: method 'get_trait' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

        try:
            box.cause_parent_error3()
        except RemoteError as exc:
            msg = "RoleError: xyzzy(): No access for role 'owner'"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')
    def calculate(self):
        """Return Jacobian for all inputs and outputs."""
        self.get_inputs(self.x)
        self.get_outputs(self.y_base)

        for j, src, in enumerate(self.inputs):

            # Users can customize the FD per variable
            if j in self.form_custom:
                form = self.form_custom[j]
            else:
                form = self.form
            if j in self.step_type_custom:
                step_type = self.step_type_custom[j]
            else:
                step_type = self.step_type

            if isinstance(src, basestring):
                i1, i2 = self.in_bounds[src]
            else:
                i1, i2 = self.in_bounds[src[0]]

            for i in range(i1, i2):

                # Relative stepsizing
                fd_step = self.fd_step[j]
                if step_type == 'relative':
                    current_val = self.get_value(src, i1, i2, i)
                    if current_val > self.relative_threshold:
                        fd_step = fd_step*current_val

                #--------------------
                # Forward difference
                #--------------------
                if form == 'forward':

                    # Step
                    self.set_value(src, fd_step, i1, i2, i)

                    self.pa.run(ffd_order=1)
                    self.get_outputs(self.y)

                    # Forward difference
                    self.J[:, i] = (self.y - self.y_base)/fd_step

                    # Undo step
                    self.set_value(src, -fd_step, i1, i2, i)

                #--------------------
                # Backward difference
                #--------------------
                elif form == 'backward':

                    # Step
                    self.set_value(src, -fd_step, i1, i2, i)

                    self.pa.run(ffd_order=1)
                    self.get_outputs(self.y)

                    # Backward difference
                    self.J[:, i] = (self.y_base - self.y)/fd_step

                    # Undo step
                    self.set_value(src, fd_step, i1, i2, i)

                #--------------------
                # Central difference
                #--------------------
                elif form == 'central':

                    # Forward Step
                    self.set_value(src, fd_step, i1, i2, i)

                    self.pa.run(ffd_order=1)
                    self.get_outputs(self.y)

                    # Backward Step
                    self.set_value(src, -2.0*fd_step, i1, i2, i)

                    self.pa.run(ffd_order=1)
                    self.get_outputs(self.y2)

                    # Central difference
                    self.J[:, i] = (self.y - self.y2)/(2.0*fd_step)

                    # Undo step
                    self.set_value(src, fd_step, i1, i2, i)

        # Return outputs to a clean state.
        for src in self.outputs:
            i1, i2 = self.out_bounds[src]
            old_val = self.scope.get(src)

            if isinstance(old_val, float):
                new_val = float(self.y_base[i1:i2])
            elif isinstance(old_val, ndarray):
                shape = old_val.shape
                if len(shape) > 1:
                    new_val = self.y_base[i1:i2]
                    new_val = new_val.reshape(shape)
                else:
                    new_val = self.y_base[i1:i2]
            elif has_interface(old_val, IVariableTree):
                new_val = old_val.copy()
                self.pa.wflow._update(src, new_val, self.y_base[i1:i2])
            else:
                continue

            src, _, idx = src.partition('[')
            if idx:
                old_val = self.scope.get(src)
                if isinstance(new_val, ndarray):
                    exec('old_val[%s = new_val.copy()' % idx)
                else:
                    exec('old_val[%s = new_val' % idx)
                self.scope.set(src, old_val, force=True)
            else:
                if isinstance(new_val, ndarray):
                    self.scope.set(src, new_val.copy(), force=True)
                else:
                    self.scope.set(src, new_val, force=True)

        #print 'after FD', self.pa.name, self.J
        return self.J
 def subdrivers(self):
     """Returns a generator of of subdrivers of this driver."""
     for d in self.iteration_set():
         if has_interface(d, IDriver):
             yield d