Пример #1
0
 def get_workflow(self, pathname):
     ''' Get the workflow for the specified driver or assembly.
         If no driver or assembly is specified, get the workflows for
         all of the top-level assemblies.
     '''
     flows = []
     if pathname:
         drvr, root = self.get_object(pathname)
         # allow for request on the parent assembly
         if is_instance(drvr, Assembly):
             drvr = drvr.get('driver')
             pathname = pathname + '.driver'
         if drvr:
             try:
                 flow = drvr.get_workflow()
             except Exception as err:
                 self._error(err, sys.exc_info())
             flows.append(flow)
     else:
         for k, v in self.proj.items():
             if is_instance(v, Assembly):
                 v = v.get('driver')
             if is_instance(v, Driver):
                 flow = v.get_workflow()
                 flows.append(flow)
     return json.dumps(flows, default=json_default)
Пример #2
0
 def get_workflow(self):
     """ Get the driver info and the list of components that make up the
         driver's workflow; recurse on nested drivers.
     """
     from openmdao.main.assembly import Assembly
     ret = {}
     ret['pathname'] = self.get_pathname()
     ret['type'] = type(self).__module__ + '.' + type(self).__name__
     ret['workflow'] = []
     ret['valid'] = self.is_valid()
     for comp in self.workflow:
         pathname = comp.get_pathname()
         if is_instance(comp, Assembly) and comp.driver:
             ret['workflow'].append({
                 'pathname': pathname,
                 'type':     type(comp).__module__ + '.' + type(comp).__name__,
                 'driver':   comp.driver.get_workflow(),
                 'valid':    comp.is_valid()
               })
         elif is_instance(comp, Driver):
             ret['workflow'].append(comp.get_workflow())
         else:
             ret['workflow'].append({
                 'pathname': pathname,
                 'type':     type(comp).__module__ + '.' + type(comp).__name__,
                 'valid':    comp.is_valid()
               })
     return ret
Пример #3
0
 def get_workflow(self):
     """ Get the driver info and the list of components that make up the
         driver's workflow; recurse on nested drivers.
     """
     from openmdao.main.assembly import Assembly
     ret = {}
     ret['pathname'] = self.get_pathname()
     ret['type'] = type(self).__module__ + '.' + type(self).__name__
     ret['workflow'] = []
     ret['valid'] = self.is_valid()
     for comp in self.workflow:
         pathname = comp.get_pathname()
         if is_instance(comp, Assembly) and comp.driver:
             ret['workflow'].append({
                 'pathname':
                 pathname,
                 'type':
                 type(comp).__module__ + '.' + type(comp).__name__,
                 'driver':
                 comp.driver.get_workflow(),
                 'valid':
                 comp.is_valid()
             })
         elif is_instance(comp, Driver):
             ret['workflow'].append(comp.get_workflow())
         else:
             ret['workflow'].append({
                 'pathname':
                 pathname,
                 'type':
                 type(comp).__module__ + '.' + type(comp).__name__,
                 'valid':
                 comp.is_valid()
             })
     return ret
 def get_workflow(self, pathname):
     ''' Get the workflow for the specified driver or assembly.
         If no driver or assembly is specified, get the workflows for
         all of the top-level assemblies.
     '''
     flows = []
     if pathname:
         drvr, root = self.get_object(pathname)
         # allow for request on the parent assembly
         if is_instance(drvr, Assembly):
             drvr = drvr.get('driver')
             pathname = pathname + '.driver'
         if drvr:
             try:
                 flow = drvr.get_workflow()
             except Exception as err:
                 self._error(err, sys.exc_info())
             flows.append(flow)
     else:
         for k, v in self.proj.items():
             if is_instance(v, Assembly):
                 v = v.get('driver')
             if is_instance(v, Driver):
                 flow = v.get_workflow()
                 flows.append(flow)
     return json.dumps(flows, default=json_default)
Пример #5
0
 def get_dataflow(self):
     ''' get the list of components and connections between them
         that make up the data flow for the given assembly
     '''
     components = []
     connections = []
     if is_instance(self, Assembly):
         # list of components (name & type) in the assembly
         g = self._depgraph._graph
         for name in nx.algorithms.dag.topological_sort(g):
             if not name.startswith('@'):
                 comp = self.get(name)
                 if is_instance(comp, Component):
                     components.append({
                         'name':
                         comp.name,
                         'pathname':
                         comp.get_pathname(),
                         'type':
                         type(comp).__name__,
                         'valid':
                         comp.is_valid(),
                         'is_assembly':
                         is_instance(comp, Assembly)
                     })
         # list of connections (convert tuples to lists)
         conntuples = self.list_connections(show_passthrough=True)
         for connection in conntuples:
             connections.append(list(connection))
     return {'components': components, 'connections': connections}
Пример #6
0
    def replace(self, target_name, newobj):
        """Replace one object with another, attempting to mimic the inputs and connections
        of the replaced object as much as possible.
        """
        tobj = getattr(self, target_name)

        # Save existing driver references.
        refs = {}
        if has_interface(tobj, IComponent):
            for obj in self.__dict__.values():
                if obj is not tobj and is_instance(obj, Driver):
                    refs[obj] = obj.get_references(target_name)

        if has_interface(
                newobj, IComponent
        ):  # remove any existing connections to replacement object
            self.disconnect(newobj.name)
        if hasattr(newobj, 'mimic'):
            try:
                newobj.mimic(
                    tobj)  # this should copy inputs, delegates and set name
            except Exception:
                self.reraise_exception(
                    "Couldn't replace '%s' of type %s with type %s" %
                    (target_name, type(tobj).__name__, type(newobj).__name__))
        conns = self.find_referring_connections(target_name)
        wflows = self.find_in_workflows(target_name)
        target_rgx = re.compile(r'(\W?)%s.' % target_name)
        conns.extend([(u, v)
                      for u, v in self._depgraph.list_autopassthroughs()
                      if re.search(target_rgx, u) is not None
                      or re.search(target_rgx, v) is not None])

        self.add(
            target_name, newobj
        )  # this will remove the old object (and any connections to it)

        # recreate old connections
        for u, v in conns:
            self.connect(u, v)

        # add new object (if it's a Component) to any workflows where target was
        if has_interface(newobj, IComponent):
            for wflow, idx in wflows:
                wflow.add(target_name, idx)

        # Restore driver references.
        if refs:
            for obj in self.__dict__.values():
                if obj is not newobj and is_instance(obj, Driver):
                    obj.restore_references(refs[obj], target_name)

        # Workflows need a reference to their new parent driver
        if is_instance(newobj, Driver):
            newobj.workflow._parent = newobj
Пример #7
0
 def get_workflow(self, pathname):
     flows = []
     if pathname:
         drvr, root = self.get_container(pathname)
         # allow for request on the parent assembly
         if is_instance(drvr, Assembly):
             drvr = drvr.get('driver')
             pathname = pathname + '.driver'
         if drvr:
             try:
                 flow = drvr.get_workflow()
             except Exception as err:
                 self._error(err, sys.exc_info())
             flows.append(flow)
     else:
         for k, v in self.proj.items():
             if is_instance(v, Assembly):
                 v = v.get('driver')
             if is_instance(v, Driver):
                 flow = {}
                 flow['pathname'] = v.get_pathname()
                 flow['type'] = type(v).__module__ + '.' + type(v).__name__
                 flow['workflow'] = []
                 flow['valid'] = v.is_valid()
                 for comp in v.workflow:
                     pathname = comp.get_pathname()
                     if is_instance(comp, Assembly) and comp.driver:
                         flow['workflow'].append({
                             'pathname':
                             pathname,
                             'type':
                             type(comp).__module__ + '.' +
                             type(comp).__name__,
                             'driver':
                             comp.driver.get_workflow(),
                             'valid':
                             comp.is_valid()
                         })
                     elif is_instance(comp, Driver):
                         flow['workflow'].append(comp.get_workflow())
                     else:
                         flow['workflow'].append({
                             'pathname':
                             pathname,
                             'type':
                             type(comp).__module__ + '.' +
                             type(comp).__name__,
                             'valid':
                             comp.is_valid()
                         })
                 flows.append(flow)
     return jsonpickle.encode(flows)
Пример #8
0
    def get_workflow(self):
        """ Get the driver info and the list of components that make up the
            driver's workflow; recurse on nested drivers.
        """
        from openmdao.main.assembly import Assembly
        ret = {}
        ret['pathname'] = self.get_pathname()
        ret['type'] = type(self).__module__ + '.' + type(self).__name__
        ret['workflow'] = []
        ret['valid'] = self.is_valid()
        comps = [comp for comp in self.workflow]
        for comp in comps:

            # Skip pseudo-comps
            if hasattr(comp, '_pseudo_type'):
                continue

            pathname = comp.get_pathname()
            if is_instance(comp, Assembly) and comp.driver:
                inames = [
                    cls.__name__ for cls in list(implementedBy(comp.__class__))
                ]
                ret['workflow'].append({
                    'pathname':
                    pathname,
                    'type':
                    type(comp).__module__ + '.' + type(comp).__name__,
                    'interfaces':
                    inames,
                    'driver':
                    comp.driver.get_workflow(),
                    'valid':
                    comp.is_valid()
                })
            elif is_instance(comp, Driver):
                ret['workflow'].append(comp.get_workflow())
            else:
                inames = [
                    cls.__name__ for cls in list(implementedBy(comp.__class__))
                ]
                ret['workflow'].append({
                    'pathname':
                    pathname,
                    'type':
                    type(comp).__module__ + '.' + type(comp).__name__,
                    'interfaces':
                    inames,
                    'valid':
                    comp.is_valid()
                })
        return ret
    def replace(self, target_name, newobj):
        """Replace one object with another, attempting to mimic the inputs and connections
        of the replaced object as much as possible.
        """
        tobj = getattr(self, target_name)

        # Save existing driver references.
        refs = {}
        if has_interface(tobj, IComponent):
            for obj in self.__dict__.values():
                if obj is not tobj and is_instance(obj, Driver):
                    refs[obj] = obj.get_references(target_name)

        if has_interface(newobj, IComponent):  # remove any existing connections to replacement object
            self.disconnect(newobj.name)
        if hasattr(newobj, "mimic"):
            try:
                newobj.mimic(tobj)  # this should copy inputs, delegates and set name
            except Exception:
                self.reraise_exception(
                    "Couldn't replace '%s' of type %s with type %s"
                    % (target_name, type(tobj).__name__, type(newobj).__name__)
                )
        conns = self.find_referring_connections(target_name)
        wflows = self.find_in_workflows(target_name)
        target_rgx = re.compile(r"(\W?)%s." % target_name)
        conns.extend(
            [
                (u, v)
                for u, v in self._depgraph.list_autopassthroughs()
                if re.search(target_rgx, u) is not None or re.search(target_rgx, v) is not None
            ]
        )

        self.add(target_name, newobj)  # this will remove the old object (and any connections to it)

        # recreate old connections
        for u, v in conns:
            self.connect(u, v)

        # add new object (if it's a Component) to any workflows where target was
        if has_interface(newobj, IComponent):
            for wflow, idx in wflows:
                wflow.add(target_name, idx)

        # Restore driver references.
        if refs:
            for obj in self.__dict__.values():
                if obj is not newobj and is_instance(obj, Driver):
                    obj.restore_references(refs[obj], target_name)
Пример #10
0
    def _items(self, visited, recurse=False, **metadata):
        """Return an iterator that returns a list of tuples of the form
        (rel_pathname, obj) for each trait of this VariableTree that matches
        the given metadata. If recurse is True, also iterate through all
        child Containers of each Container found.
        """
        if id(self) not in visited:
            visited.add(id(self))

            if 'iotype' in metadata:
                meta_io = metadata['iotype']
                matches_io = False
                if type(meta_io) is FunctionType:
                    if meta_io(self.iotype):
                        matches_io = True
                elif meta_io == self.iotype:
                    matches_io = True
                if matches_io:
                    newdict = metadata.copy()
                    del newdict['iotype']
            else:
                matches_io = True
                newdict = metadata

            if matches_io:
                for name in self._alltraits(**newdict):
                    if name.startswith('_'):
                        continue
                    obj = getattr(self, name)
                    yield (name, obj)
                    if recurse and is_instance(obj, VariableTree) and \
                       id(obj) not in visited:
                        for chname, child in obj._items(visited, recurse,
                                                        **metadata):
                            yield ('.'.join([name, chname]), child)
 def get_dataflow(self, pathname):
     ''' Get the structure of the specified assembly or of the global
         namespace if no pathname is specified; consists of the list of
         components and the connections between them (i.e., the dataflow).
     '''
     dataflow = {}
     if pathname and len(pathname) > 0:
         try:
             asm, root = self.get_object(pathname)
             if has_interface(asm, IAssembly):
                 dataflow = asm.get_dataflow()
         except Exception as err:
             self._error(err, sys.exc_info())
     else:
         components = []
         for k, v in self.proj.items():
             if is_instance(v, Component):
                 inames = [cls.__name__
                           for cls in list(implementedBy(v.__class__))]
                 components.append({
                     'name': k,
                     'pathname': k,
                     'type': type(v).__name__,
                     'interfaces': inames,
                     'python_id': id(v)
                 })
         dataflow['components']  = components
         dataflow['connections'] = []
         dataflow['parameters']  = []
         dataflow['constraints'] = []
         dataflow['objectives']  = []
         dataflow['responses']   = []
     return json.dumps(dataflow, default=json_default)
Пример #12
0
    def get_req_default(self, vt_required=None):
        """Returns a list of all inputs that are required but still have
        their default value.
        """
        req = []
        if vt_required:
            req_test = [True, False, None]
        else:
            req_test = [True]

        for name, trait in self.traits(type=not_event).items():
            obj = getattr(self, name)
            if obj is self.parent:
                continue
            if is_instance(obj, VariableTree):
                req.extend([
                    '.'.join((self.name, n))
                    for n in obj.get_req_default(vt_required)
                ])
            elif trait.required in req_test:
                try:
                    trait = trait.trait_type
                except:
                    unset = (obj == trait.default)
                else:
                    unset = (obj == trait.default_value)
                if not isinstance(unset, bool):
                    try:
                        unset = unset.all()
                    except:
                        pass
                if unset:
                    req.append('.'.join((self.name, name)))

        return req
Пример #13
0
def _get_as_xml(container, xml):
    """ Recursive helper for :meth:`get_as_xml`. """
    xml.append('<members>')
    # Using ._alltraits().items() since .items() returns a generator.
    for name, trait in sorted(container._alltraits().items(),
                              key=lambda item: item[0]):
        if name in _IGNORE_ATTR or name.startswith('_'):
            continue
        val = getattr(container, name)
        if is_instance(val, Container):
            xml.append("""\
<member name="%s" type="object" access="public" className="%s">\
""" % (name, _lookup(val)))
            _get_as_xml(val, xml)
            xml.append('</member>')
        else:
            ttype = trait.trait_type
            if isinstance(ttype, Array):
                _get_array(name, val, trait, xml, True, container)
            elif isinstance(ttype, List):
                _get_array(name, val, trait, xml, False, container)
            elif isinstance(ttype, Bool):
                _get_bool(name, val, trait, xml)
            elif isinstance(ttype, Enum):
                _get_enum(name, val, trait, xml)
            elif isinstance(ttype, Float):
                _get_float(name, val, trait, xml)
            elif isinstance(ttype, Int):
                _get_int(name, val, trait, xml)
            elif isinstance(ttype, Str):
                _get_str(name, val, trait, xml)
            else:
                raise RuntimeError('%s.%s: unsupported type' %
                                   (container.get_pathname(), name))
    xml.append('</members>')
 def _get_components(self,cont,pathname=None):
     ''' get a heierarchical list of all the components in the given
         container or dictionary.  the name of the root container, if
         specified, is prepended to all pathnames
     '''
     
     comps = []
     for k,v in cont.items():                        
         if is_instance(v,Component):
             comp = {}
             if cont == self.proj.__dict__:
                 comp['pathname'] = k
                 children = self._get_components(v,k)
             else:
                 comp['pathname'] = pathname+'.'+ k if pathname else k
                 children = self._get_components(v,comp['pathname'])
             if len(children) > 0:
                 comp['children'] = children
             comp['type'] = str(v.__class__.__name__)
             inames = []
             for klass in list(implementedBy(v.__class__)):
                 inames.append(klass.__name__)
             comp['interfaces'] = inames
             comps.append(comp)
     return comps
Пример #15
0
    def _items(self, visited, recurse=False, **metadata):
        """Return an iterator that returns a list of tuples of the form
        (rel_pathname, obj) for each trait of this VariableTree that matches
        the given metadata. If recurse is True, also iterate through all
        child Containers of each Container found.
        """
        if id(self) not in visited:
            visited.add(id(self))

            if 'iotype' in metadata:
                meta_io = metadata['iotype']
                matches_io = False
                if type(meta_io) is FunctionType:
                    if meta_io(self.iotype):
                        matches_io = True
                elif meta_io == self.iotype:
                    matches_io = True
                if matches_io:
                    newdict = metadata.copy()
                    del newdict['iotype']
            else:
                matches_io = True
                newdict = metadata

            if matches_io:
                for name in self._alltraits(**newdict):
                    if name.startswith('_'):
                        continue
                    obj = getattr(self, name)
                    yield (name, obj)
                    if recurse and is_instance(obj, VariableTree) and \
                       id(obj) not in visited:
                        for chname, child in obj._items(
                                visited, recurse, **metadata):
                            yield ('.'.join((name, chname)), child)
Пример #16
0
 def get_dataflow(self, pathname):
     ''' Get the structure of the specified assembly or of the global
         namespace if no pathname is specified; consists of the list of
         components and the connections between them (i.e., the dataflow).
     '''
     dataflow = {}
     if pathname and len(pathname) > 0:
         try:
             asm, root = self.get_object(pathname)
             if has_interface(asm, IAssembly):
                 dataflow = asm.get_dataflow()
         except Exception as err:
             self._error(err, sys.exc_info())
     else:
         components = []
         for k, v in self.proj.items():
             if is_instance(v, Component):
                 inames = [
                     cls.__name__
                     for cls in list(implementedBy(v.__class__))
                 ]
                 components.append({
                     'name': k,
                     'pathname': k,
                     'type': type(v).__name__,
                     'interfaces': inames,
                     'python_id': id(v)
                 })
         dataflow['components'] = components
         dataflow['connections'] = []
         dataflow['parameters'] = []
         dataflow['constraints'] = []
         dataflow['objectives'] = []
         dataflow['responses'] = []
     return json.dumps(dataflow, default=json_default)
Пример #17
0
 def _get_components(self, cont, pathname=None):
     ''' Get a heierarchical list of all the components in the given
         container or dictionary.  The name of the root container, if
         specified, is prepended to all pathnames.
     '''
     comps = []
     for k, v in cont.items():
         if is_instance(v, Component):
             comp = {}
             if cont is self.proj._model_globals:
                 comp['pathname'] = k
                 children = self._get_components(v, k)
             else:
                 comp['pathname'] = '.'.join([pathname, k
                                              ]) if pathname else k
                 children = self._get_components(v, comp['pathname'])
             if len(children) > 0:
                 comp['children'] = children
             comp['type'] = str(v.__class__.__name__)
             inames = []
             for klass in list(implementedBy(v.__class__)):
                 inames.append(klass.__name__)
             comp['interfaces'] = inames
             comps.append(comp)
     return comps
Пример #18
0
def _get_as_xml(container, xml):
    """ Recursive helper for :meth:`get_as_xml`. """
    xml.append('<members>')
    # Using ._alltraits().items() since .items() returns a generator.
    for name, trait in sorted(container._alltraits().items(),
                              key=lambda item: item[0]):
        if name in _IGNORE_ATTR or name.startswith('_'):
            continue
        val = getattr(container, name)
        if is_instance(val, Container):
            xml.append("""\
<member name="%s" type="object" access="public" className="%s">\
""" % (name, _lookup(val)))
            _get_as_xml(val, xml)
            xml.append('</member>')
        else:
            ttype = trait.trait_type
            if isinstance(ttype, Array):
                _get_array(name, val, trait, xml, True, container)
            elif isinstance(ttype, List):
                _get_array(name, val, trait, xml, False, container)
            elif isinstance(ttype, Bool):
                _get_bool(name, val, trait, xml)
            elif isinstance(ttype, Enum):
                _get_enum(name, val, trait, xml)
            elif isinstance(ttype, Float):
                _get_float(name, val, trait, xml)
            elif isinstance(ttype, Int):
                _get_int(name, val, trait, xml)
            elif isinstance(ttype, Str):
                _get_str(name, val, trait, xml)
            else:
                raise RuntimeError('%s.%s: unsupported type'
                                   % (container.get_pathname(), name))
    xml.append('</members>')
Пример #19
0
    def get_req_default(self, vt_required=None):
        """Returns a list of all inputs that are required but still have
        their default value.
        """
        req = []
        if vt_required:
            req_test = [True, False, None]
        else:
            req_test = [True]

        for name, trait in self.traits(type=not_event).items():
            obj = getattr(self, name)
            if obj is self.parent:
                continue
            if is_instance(obj, VariableTree):
                req.extend(['.'.join((self.name, n))
                                 for n in obj.get_req_default(vt_required)])
            elif trait.required in req_test:
                try:
                    trait = trait.trait_type
                except:
                    unset = (obj == trait.default)
                else:
                    unset = (obj == trait.default_value)
                if not isinstance(unset, bool):
                    try:
                        unset = unset.all()
                    except:
                        pass
                if unset:
                    req.append('.'.join((self.name, name)))

        return req
Пример #20
0
 def _dump_iteration_tree(obj, f, tablevel):
     if is_instance(obj, Driver):
         f.write(' '*tablevel)
         f.write(obj.get_pathname())
         f.write('\n')
         for comp in obj.workflow:
             if is_instance(comp, Driver) or is_instance(comp, Assembly):
                 _dump_iteration_tree(comp, f, tablevel+3)
             else:
                 f.write(' '*(tablevel+3))
                 f.write(comp.get_pathname())
                 f.write('\n')
     elif is_instance(obj, Assembly):
         f.write(' '*tablevel)
         f.write(obj.get_pathname())
         f.write('\n')
         _dump_iteration_tree(obj.driver, f, tablevel+3)
Пример #21
0
 def _dump_iteration_tree(obj, f, tablevel):
     if is_instance(obj, Driver):
         f.write(' ' * tablevel)
         f.write(obj.get_pathname())
         f.write('\n')
         for comp in obj.workflow:
             if is_instance(comp, Driver) or is_instance(comp, Assembly):
                 _dump_iteration_tree(comp, f, tablevel + 3)
             else:
                 f.write(' ' * (tablevel + 3))
                 f.write(comp.get_pathname())
                 f.write('\n')
     elif is_instance(obj, Assembly):
         f.write(' ' * tablevel)
         f.write(obj.get_pathname())
         f.write('\n')
         _dump_iteration_tree(obj.driver, f, tablevel + 3)
Пример #22
0
 def _update_workflows(self):
     ''' Call :meth:`_update_workflow` on drivers to capture any workflow
         updates now rather than waiting until they are run.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             for driver in [obj for name, obj in v.items(recurse=True)
                            if is_instance(obj, Driver)]:
                 driver._update_workflow()
Пример #23
0
 def find_in_workflows(self, name):
     """Returns a list of tuples of the form (workflow, index) for all
     workflows in the scope of this Assembly that contain the given
     component name.
     """
     wflows = []
     for obj in self.__dict__.values():
         if is_instance(obj, Driver) and name in obj.workflow:
             wflows.append((obj.workflow, obj.workflow.index(name)))
     return wflows
Пример #24
0
 def remove(self, name):
     """Remove the named container object from this assembly and remove
     it from its workflow (if any)."""
     cont = getattr(self, name)
     self._depgraph.remove(name)
     for obj in self.__dict__.values():
         if obj is not cont and is_instance(obj, Driver):
             obj.workflow.remove(name)
                 
     return super(Assembly, self).remove(name)
Пример #25
0
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and that all root assemblies are set as top.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             if v.name != k:
                 v.name = k
         if is_instance(v, Assembly) and v._call_cpath_updated:
             set_as_top(v)
Пример #26
0
 def find_in_workflows(self, name):
     """Returns a list of tuples of the form (workflow, index) for all
     workflows in the scope of this Assembly that contain the given
     component name.
     """
     wflows = []
     for obj in self.__dict__.values():
         if is_instance(obj, Driver) and name in obj.workflow:
             wflows.append((obj.workflow, obj.workflow.index(name)))
     return wflows
Пример #27
0
 def get_workflow(self, pathname):
     flows = []
     if pathname:
         drvr, root = self.get_container(pathname)
         # allow for request on the parent assembly
         if is_instance(drvr, Assembly):
             drvr = drvr.get('driver')
             pathname = pathname + '.driver'
         if drvr:
             try:
                 flow = drvr.get_workflow()
             except Exception as err:
                 self._error(err, sys.exc_info())
             flows.append(flow)
     else:
         for k, v in self.proj.items():
             if is_instance(v, Assembly):
                 v = v.get('driver')
             if is_instance(v, Driver):
                 flow = {}
                 flow['pathname'] = v.get_pathname()
                 flow['type'] = type(v).__module__ + '.' + type(v).__name__
                 flow['workflow'] = []
                 flow['valid'] = v.is_valid()
                 for comp in v.workflow:
                     pathname = comp.get_pathname()
                     if is_instance(comp, Assembly) and comp.driver:
                         flow['workflow'].append({
                             'pathname': pathname,
                             'type':     type(comp).__module__ + '.' + type(comp).__name__,
                             'driver':   comp.driver.get_workflow(),
                             'valid':    comp.is_valid()
                           })
                     elif is_instance(comp, Driver):
                         flow['workflow'].append(comp.get_workflow())
                     else:
                         flow['workflow'].append({
                             'pathname': pathname,
                             'type':     type(comp).__module__ + '.' + type(comp).__name__,
                             'valid':    comp.is_valid()
                           })
                 flows.append(flow)
     return jsonpickle.encode(flows)
Пример #28
0
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and that all root assemblies are set as top.
     '''
     for k, v in self.proj.items():
         if has_interface(v, IContainer):
             if v.name != k:
                 v.name = k
         if is_instance(v, Assembly) and v._call_cpath_updated:
             set_as_top(v)
Пример #29
0
    def remove(self, name):
        """Remove the named container object from this assembly and remove
        it from its workflow (if any)."""
        cont = getattr(self, name)
        self._depgraph.remove(name)
        for obj in self.__dict__.values():
            if obj is not cont and is_instance(obj, Driver):
                obj.workflow.remove(name)

        return super(Assembly, self).remove(name)
Пример #30
0
 def add(self, name, obj):
     """Call the base class *add*.  Then,
     if obj is a Component, add it to the component graph.
     
     Returns the added object.
     """
     obj = super(Assembly, self).add(name, obj)
     if is_instance(obj, Component):
         self._depgraph.add(obj.name)
     return obj
Пример #31
0
 def add(self, name, obj):
     """Call the base class *add*.  Then,
     if obj is a Component, add it to the component graph.
     
     Returns the added object.
     """
     obj = super(Assembly, self).add(name, obj)
     if is_instance(obj, Component):
         self._depgraph.add(obj.name)
     return obj
Пример #32
0
 def _update_roots(self):
     ''' Ensure that all root containers in the project dictionary know
         their own name and that all root assemblies are set as top
     '''
     g = self.proj.__dict__.items()
     for k,v in g:
         if has_interface(v,IContainer):
             if v.name != k:
                 v.name = k
         if is_instance(v,Assembly):
             set_as_top(v)
Пример #33
0
    def _restore(self, container):
        """ Restore remote state (variables don't have a post_load()). """
        # Using _alltraits() here because at this point items()
        # considers the ProxyMixin traits as 'Missing'.
        for name, trait in container._alltraits().items():
            typ = trait.trait_type
            if isinstance(typ, ProxyMixin):
                typ.restore(self._client)

        for name, obj in container.items():
            if is_instance(obj, Container):
                self._restore(obj)  # Recurse.
 def get_workflow(self,pathname):
     flow = {}
     drvr, root = self.get_container(pathname)
     # allow for request on the parent assembly
     if is_instance(drvr,Assembly):
         drvr = drvr.get('driver')
         pathname = pathname + '.driver'
     if drvr:
         try:
             flow = self._get_workflow(drvr,pathname,root)
         except Exception, err:
             self.error(err,sys.exc_info())
 def _get_structure(self,asm,pathname):
     ''' get the list of components and connections between them
         that make up the data flow for the given assembly 
     '''
     components = []
     connections = []
     if is_instance(asm,Assembly):
         # list of components (name & type) in the assembly
         g = asm._depgraph._graph
         for name in nx.algorithms.dag.topological_sort(g):
             if not name.startswith('@'):
                 comp = asm.get(name)
                 if is_instance(comp,Component):
                     components.append({ 'name': comp.name,
                                         'pathname': pathname + '.' + name,
                                         'type': type(comp).__name__ })
         # list of connections (convert tuples to lists)
         conntuples = asm.list_connections(show_passthrough=False)
         for connection in conntuples:
             connections.append(list(connection))
     return { 'components': components, 'connections': connections }
Пример #36
0
    def get_workflow(self):
        """ Get the driver info and the list of components that make up the
            driver's workflow; recurse on nested drivers.
        """
        from openmdao.main.assembly import Assembly
        ret = {}
        ret['pathname'] = self.get_pathname()
        ret['type'] = type(self).__module__ + '.' + type(self).__name__
        ret['workflow'] = []
        ret['valid'] = self.is_valid()
        comps = [comp for comp in self.workflow]
        for comp in comps:

            # Skip pseudo-comps
            if hasattr(comp, '_pseudo_type'):
                continue

            pathname = comp.get_pathname()
            if is_instance(comp, Assembly) and comp.driver:
                inames = [cls.__name__
                          for cls in list(implementedBy(comp.__class__))]
                ret['workflow'].append({
                    'pathname':   pathname,
                    'type':       type(comp).__module__ + '.' + type(comp).__name__,
                    'interfaces': inames,
                    'driver':     comp.driver.get_workflow(),
                    'valid':      comp.is_valid()
                })
            elif is_instance(comp, Driver):
                ret['workflow'].append(comp.get_workflow())
            else:
                inames = [cls.__name__
                          for cls in list(implementedBy(comp.__class__))]
                ret['workflow'].append({
                    'pathname':   pathname,
                    'type':       type(comp).__module__ + '.' + type(comp).__name__,
                    'interfaces': inames,
                    'valid':      comp.is_valid()
                })
        return ret
Пример #37
0
 def save(self):
     """ Save the state of the project to its project directory.
         entries in the project dictionary that start with double 
         underscores (e.g. __builtins__) are excluded
     """
     fname = os.path.join(self.path, '_project_state')
     # copy all openmdao containers to a new dict for saving
     save_state = {}
     for k in self.__dict__:
         if is_instance(self.__dict__[k], Container):
             save_state[k] = self.__dict__[k]
     with open(fname, 'wb') as f:
         pickle.dump(save_state, f)
 def get_workingtypes(self):
     ''' Return this server's user defined types. 
     '''
     types = []
     try:
         g = self._globals.items()
         for k,v in g:
             if (type(v).__name__ == 'classobj') or str(v).startswith('<class'):
                 obj = self._globals[k]()
                 if is_instance(obj, HasTraits):
                     types.append( ( k , 'n/a') )
     except Exception, err:
         self.error(err,sys.exc_info())
Пример #39
0
    def remove(self, name):
        """Remove the named container object from this assembly and remove
        it from its workflow(s) if it's a Component."""
        cont = getattr(self, name)
        self.disconnect(name)
        self._depgraph.remove(name)
        self._exprmapper.remove(name)
        if has_interface(cont, IComponent):
            for obj in self.__dict__.values():
                if obj is not cont and is_instance(obj, Driver):
                    obj.workflow.remove(name)

        return super(Assembly, self).remove(name)
Пример #40
0
 def save(self):
     """ Save the state of the project to its project directory.
         entries in the project dictionary that start with double 
         underscores (e.g. __builtins__) are excluded
     """
     fname = os.path.join(self.path, '_project_state')
     # copy all openmdao containers to a new dict for saving
     save_state = {}
     for k in self.__dict__:
         if is_instance(self.__dict__[k],Container):
             save_state[k] = self.__dict__[k]
     with open(fname, 'w') as f:
         pickle.dump(save_state, f)
 def _get_workflow(self,drvr):
     ''' get the driver info and the list of components that make up the
         driver's workflow, recurse on nested drivers
     '''
     ret = {}
     ret['pathname'] = drvr.get_pathname()
     ret['type'] = type(drvr).__module__+'.'+type(drvr).__name__ 
     ret['workflow'] = []
     for comp in drvr.workflow:
         if is_instance(comp,Assembly) and comp.driver:
             ret['workflow'].append({ 
                 'pathname': comp.get_pathname(),
                 'type':     type(comp).__module__+'.'+type(comp).__name__,
                 'driver':   self._get_workflow(comp.driver)
             })
         elif is_instance(comp,Driver):
             ret['workflow'].append(self._get_workflow(comp))            
         else:
             ret['workflow'].append({ 
                 'pathname': comp.get_pathname(),
                 'type':     type(comp).__module__+'.'+type(comp).__name__,
             })
     return ret
Пример #42
0
    def remove(self, name):
        """Remove the named container object from this assembly and remove
        it from its workflow(s) if it's a Component."""
        cont = getattr(self, name)
        self.disconnect(name)
        self._exprmapper.remove(name)
        if has_interface(cont, IComponent):
            self._depgraph.remove(name)
            for obj in self.__dict__.values():
                if obj is not cont and is_instance(obj, Driver):
                    obj.workflow.remove(name)
                    obj.remove_references(name)

        return super(Assembly, self).remove(name)
Пример #43
0
    def test_allocator(self):
        logging.debug('')
        logging.debug('test_allocator')

        # Normal, successful allocation.
        allocator = GridEngineAllocator()
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 19 * 48)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, 0)

        # Unused deployment.
        server = allocator.deploy('GridEngineTestServer', {}, {})
        self.assertTrue(is_instance(server, GridEngineServer))

        # Too many CPUs.
        estimate, criteria = allocator.time_estimate({'n_cpus': 1000})
        self.assertEqual(estimate, -2)

        # Not remote.
        nhosts = allocator.max_servers({'localhost': True})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'localhost': True})
        self.assertEqual(estimate, -2)

        # Configure bad pattern.
        cfg = ConfigParser.ConfigParser()
        cfg.add_section('GridEngine')
        cfg.set('GridEngine', 'pattern', 'xyzzy')
        allocator.configure(cfg)
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, -2)

        # Incompatible Python version.
        estimate, criteria = allocator.time_estimate({'python_version': '9.9'})
        self.assertEqual(estimate, -2)

        # Unrecognized key.
        estimate, criteria = allocator.time_estimate({'no-such-key': 0})
        self.assertEqual(estimate, -2)

        # 'qhost' failure.
        GridEngineAllocator._QHOST = os.path.join('bogus-qhost')
        cfg.set('GridEngine', 'pattern', '*')
        allocator.configure(cfg)
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 0)
Пример #44
0
    def test_allocator(self):
        logging.debug('')
        logging.debug('test_allocator')

        # Normal, successful allocation.
        allocator = GridEngineAllocator()
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 19*48)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, 0)

        # Unused deployment.
        server = allocator.deploy('GridEngineTestServer', {}, {})
        self.assertTrue(is_instance(server, GridEngineServer))

        # Too many CPUs.
        estimate, criteria = allocator.time_estimate({'n_cpus': 1000})
        self.assertEqual(estimate, -2)

        # Not remote.
        nhosts = allocator.max_servers({'localhost': True})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'localhost': True})
        self.assertEqual(estimate, -2)

        # Configure bad pattern.
        cfg = ConfigParser.ConfigParser()
        cfg.add_section('GridEngine')
        cfg.set('GridEngine', 'pattern', 'xyzzy')
        allocator.configure(cfg)
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, -2)

        # Incompatible Python version.
        estimate, criteria = allocator.time_estimate({'python_version': '9.9'})
        self.assertEqual(estimate, -2)

        # Unrecognized key.
        estimate, criteria = allocator.time_estimate({'no-such-key': 0})
        self.assertEqual(estimate, -2)

        # 'qhost' failure.
        GridEngineAllocator._QHOST = os.path.join('bogus-qhost')
        cfg.set('GridEngine', 'pattern', '*')
        allocator.configure(cfg)
        nhosts = allocator.max_servers({})
        self.assertEqual(nhosts, 0)
Пример #45
0
    def _restore(self, container):
        """ Restore remote state (variables don't have a post_load()). """
        # Using _alltraits() here because at this point items()
        # considers the ProxyMixin traits as 'Missing'.
        for name, trait in container._alltraits().items():
            typ = trait.trait_type
            if isinstance(typ, ProxyMixin):
                typ.restore(self._client)

        for name, obj in container.items():
            if is_instance(obj, Container):
                if isinstance(obj, ObjProxy):
                    obj.restore(self._client)
                else:
                    self._restore(obj)  # Recurse.
 def get_workingtypes(self):
     ''' Return this server's user defined types. 
     '''
     g = self.proj.__dict__.items()
     for k,v in g:
         if not k in self.known_types and \
            ((type(v).__name__ == 'classobj') or str(v).startswith('<class')):
             try:
                 obj = self.proj.__dict__[k]()
                 if is_instance(obj, HasTraits):
                     self.known_types.append( ( k , 'n/a') )
             except Exception, err:
                 # print 'Class',k,'not included in working types'
                 # self.error(err,sys.exc_info())
                 pass
Пример #47
0
    def test_allocator(self):
        logging.debug('')
        logging.debug('test_allocator')

        allocator = PBS_Allocator()
        cfg = ConfigParser.ConfigParser()
        cfg.add_section('PBS')
        cfg.set('PBS', 'accounting_id', 'test-account')
        allocator.configure(cfg)

        # Normal, successful allocation.
        nhosts, criteria = allocator.max_servers({})
        self.assertEqual(nhosts, allocator.n_cpus)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, 0)

        nhosts, criteria = allocator.max_servers({
            'min_cpus': 2,
            'max_cpus': 2
        })
        self.assertEqual(nhosts, allocator.n_cpus / 2)
        estimate, criteria = allocator.time_estimate({'min_cpus': 2})
        self.assertEqual(estimate, 0)

        # Unused deployment.
        server = allocator.deploy('PBS_TestServer', {}, {})
        self.assertTrue(is_instance(server, PBS_Server))
        allocator.release(server)

        # Too many CPUs.
        nhosts, criteria = allocator.max_servers({'min_cpus': 1000000})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'min_cpus': 1000000})
        self.assertEqual(estimate, -2)

        # Not remote.
        nhosts, criteria = allocator.max_servers({'localhost': True})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'localhost': True})
        self.assertEqual(estimate, -2)

        # Incompatible Python version.
        estimate, criteria = allocator.time_estimate({'python_version': '9.9'})
        self.assertEqual(estimate, -2)

        # Unrecognized key.
        estimate, criteria = allocator.time_estimate({'no-such-key': 0})
        self.assertEqual(estimate, -2)
 def _get_components(self,cont):
     comps = []
     for n,v in cont.items():
         if is_instance(v,Component):
             comp = {}            
             comp['pathname'] = v.get_pathname()
             comp['type'] = str(v.__class__.__name__)
             inames = []
             for klass in list(implementedBy(v.__class__)):
                 inames.append(klass.__name__)
             comp['interfaces'] = inames
             children = self._get_components(v)
             if len(children) > 0:
                 comp['children'] = children
             comps.append(comp)
     return comps
Пример #49
0
    def _items(self, visited, recurse=False, **metadata):
        """Return an iterator that returns a list of tuples of the form 
        (rel_pathname, obj) for each trait of this VariableTree that matches
        the given metadata. If recurse is True, also iterate through all
        child Containers of each Container found.
        """
        if id(self) not in visited:
            visited.add(id(self))
            
            if 'iotype' in metadata:
                meta_io = metadata['iotype']
                matches_io = False
                if type( meta_io ) is FunctionType:
                    if meta_io(self._iotype):
                        matches_io = True
                elif meta_io == self._iotype:
                    matches_io = True
                if matches_io:
                    newdict = metadata.copy()
                    del newdict['iotype']
            else:
                matches_io = True
                newdict = metadata

            if matches_io:
                match_dict = dict([(k,v) for k,v in self._alltraits(**newdict).items() 
                                        if not k.startswith('_')])
            else:
                return  #our children have same iotype as we do, so won't match if we didn't
            
            if recurse:
                for name in self.list_containers():
                    obj = getattr(self, name)
                    if name in match_dict and id(obj) not in visited:
                        yield(name, obj)
                    if obj:
                        for chname, child in obj._items(visited, recurse, 
                                                        **metadata):
                            yield ('.'.join([name, chname]), child)
                            
            for name, trait in match_dict.items():
                obj = getattr(self, name)
                if is_instance(obj, Container) and id(obj) not in visited:
                    if not recurse:
                        yield (name, obj)
                else:
                    yield (name, obj)
Пример #50
0
    def _items(self, visited, recurse=False, **metadata):
        """Return an iterator that returns a list of tuples of the form 
        (rel_pathname, obj) for each trait of this VariableTree that matches
        the given metadata. If recurse is True, also iterate through all
        child Containers of each Container found.
        """
        if id(self) not in visited:
            visited.add(id(self))
            
            if 'iotype' in metadata:
                meta_io = metadata['iotype']
                matches_io = False
                if type( meta_io ) is FunctionType:
                    if meta_io(self._iotype):
                        matches_io = True
                elif meta_io == self._iotype:
                    matches_io = True
                if matches_io:
                    newdict = metadata.copy()
                    del newdict['iotype']
            else:
                matches_io = True
                newdict = metadata

            if matches_io:
                match_dict = dict([(k,v) for k,v in self._alltraits(**newdict).items() 
                                        if not k.startswith('_')])
            else:
                return  #our children have same iotype as we do, so won't match if we didn't
            
            if recurse:
                for name in self.list_containers():
                    obj = getattr(self, name)
                    if name in match_dict and id(obj) not in visited:
                        yield(name, obj)
                    if obj:
                        for chname, child in obj._items(visited, recurse, 
                                                        **metadata):
                            yield ('.'.join([name, chname]), child)
                            
            for name, trait in match_dict.items():
                obj = getattr(self, name)
                if is_instance(obj, Container) and id(obj) not in visited:
                    if not recurse:
                        yield (name, obj)
                else:
                    yield (name, obj)
Пример #51
0
    def test_allocator(self):
        logging.debug('')
        logging.debug('test_allocator')

        allocator = PBS_Allocator()
        cfg = ConfigParser.ConfigParser()
        cfg.add_section('PBS')
        cfg.set('PBS', 'accounting_id', 'test-account')
        allocator.configure(cfg)

        # Normal, successful allocation.
        nhosts, criteria = allocator.max_servers({})
        self.assertEqual(nhosts, allocator.n_cpus)
        estimate, criteria = allocator.time_estimate({})
        self.assertEqual(estimate, 0)

        nhosts, criteria = allocator.max_servers({'min_cpus': 2, 'max_cpus': 2})
        self.assertEqual(nhosts, allocator.n_cpus/2)
        estimate, criteria = allocator.time_estimate({'min_cpus': 2})
        self.assertEqual(estimate, 0)

        # Unused deployment.
        server = allocator.deploy('PBS_TestServer', {}, {})
        self.assertTrue(is_instance(server, PBS_Server))
        allocator.release(server)

        # Too many CPUs.
        nhosts, criteria = allocator.max_servers({'min_cpus': 1000000})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'min_cpus': 1000000})
        self.assertEqual(estimate, -2)

        # Not remote.
        nhosts, criteria = allocator.max_servers({'localhost': True})
        self.assertEqual(nhosts, 0)
        estimate, criteria = allocator.time_estimate({'localhost': True})
        self.assertEqual(estimate, -2)

        # Incompatible Python version.
        estimate, criteria = allocator.time_estimate({'python_version': '9.9'})
        self.assertEqual(estimate, -2)

        # Unrecognized key.
        estimate, criteria = allocator.time_estimate({'no-such-key': 0})
        self.assertEqual(estimate, -2)
Пример #52
0
 def _check_req_traits(self, comp):
     """Raise an exception if any child traits with required=True have not
     been set to a non-default value.
     """
     for name, trait in self.traits(type=not_event).items():
         obj = getattr(self, name)
         if obj is self.parent:
             continue
         trait = self.trait(name)
         if is_instance(obj, VariableTree):
             obj._check_req_traits(comp)
         elif trait.required is True:
             if comp._depgraph.get_sources(name):
                 unset = False
             else:
                 unset = (obj == trait.default)
                 try:
                     unset = unset.all()
                 except:
                     pass
             if unset:
                 self.raise_exception("required variable '%s' was"
                                      " not set" % name, RuntimeError)
Пример #53
0
    def get_dataflow(self):
        ''' get a dictionary of components and the connections between them
            that make up the data flow for the assembly
            also includes parameter, constraint, and objective flows
        '''
        components = []
        connections = []
        parameters = []
        constraints = []
        objectives = []
        if is_instance(self, Assembly):
            # list of components (name & type) in the assembly
            g = self._depgraph._graph
            names = [
                name for name in nx.algorithms.dag.topological_sort(g)
                if not name.startswith('@')
            ]

            # Bubble-up drivers ahead of their parameter targets.
            sorted_names = []
            for name in names:
                comp = self.get(name)
                if is_instance(comp, Driver) and hasattr(comp, '_delegates_'):
                    driver_index = len(sorted_names)
                    for dname, dclass in comp._delegates_.items():
                        inst = getattr(comp, dname)
                        if isinstance(inst, HasParameters):
                            refs = inst.get_referenced_compnames()
                            for ref in refs:
                                try:
                                    target_index = sorted_names.index(ref)
                                except ValueError:
                                    pass
                                else:
                                    driver_index = min(driver_index,
                                                       target_index)
                    sorted_names.insert(driver_index, name)
                else:
                    sorted_names.append(name)

            # Process names in new order.
            for name in sorted_names:
                comp = self.get(name)
                if is_instance(comp, Component):
                    inames = [
                        cls.__name__
                        for cls in list(implementedBy(comp.__class__))
                    ]
                    components.append({
                        'name': comp.name,
                        'pathname': comp.get_pathname(),
                        'type': type(comp).__name__,
                        'valid': comp.is_valid(),
                        'interfaces': inames,
                        'python_id': id(comp)
                    })

                if is_instance(comp, Driver):
                    if hasattr(comp, '_delegates_'):
                        for name, dclass in comp._delegates_.items():
                            inst = getattr(comp, name)
                            if isinstance(inst, HasParameters):
                                for name, param in inst.get_parameters().items(
                                ):
                                    if isinstance(param, ParameterGroup):
                                        for n, p in zip(
                                                name, tuple(param.targets)):
                                            parameters.append(
                                                [comp.name + '.' + n, p])
                                    else:
                                        parameters.append([
                                            comp.name + '.' + name,
                                            param.target
                                        ])
                            elif isinstance(inst,
                                            (HasConstraints, HasEqConstraints,
                                             HasIneqConstraints)):
                                for path in inst.get_referenced_varpaths():
                                    name, dot, rest = path.partition('.')
                                    constraints.append(
                                        [path, comp.name + '.' + rest])
                            elif isinstance(inst,
                                            (HasObjective, HasObjectives)):
                                for path in inst.get_referenced_varpaths():
                                    name, dot, rest = path.partition('.')
                                    objectives.append(
                                        [path, comp.name + '.' + name])

            # list of connections (convert tuples to lists)
            conntuples = self.list_connections(show_passthrough=True)
            for connection in conntuples:
                connections.append(list(connection))

        return {
            'components': components,
            'connections': connections,
            'parameters': parameters,
            'constraints': constraints,
            'objectives': objectives
        }
    def _get_attributes(self,comp):
        ''' get attributes of object 
        '''
        attrs = {}
        
        if has_interface(comp,IComponent):
            inputs = []
            for vname in comp.list_inputs():
                v = comp.get(vname)
                attr = {}
                if not is_instance(v,Component):
                    attr['name'] = vname
                    attr['type'] = type(v).__name__
                    attr['value'] = v
                    attr['valid'] = comp.get_valid([vname])[0]
                    meta = comp.get_metadata(vname);
                    if meta:
                        for field in ['units','high','low','desc']:
                            if field in meta:
                                attr[field] = meta[field]
                            else:
                                attr[field] = ''
                inputs.append(attr)
            attrs['Inputs'] = inputs
                
            outputs = []
            for vname in comp.list_outputs():
                v = comp.get(vname)
                attr = {}
                if not is_instance(v,Component):
                    attr['name'] = vname
                    attr['type'] = type(v).__name__
                    attr['value'] = v
                    attr['valid'] = comp.get_valid([vname])[0]
                    meta = comp.get_metadata(vname);
                    if meta:
                        for field in ['units','high','low','desc']:
                            if field in meta:
                                attr[field] = meta[field]
                            else:
                                attr[field] = ''
                outputs.append(attr)
            attrs['Outputs'] = outputs

        if is_instance(comp,Assembly):
            attrs['Structure'] = self._get_dataflow(comp)
        
        if has_interface(comp,IDriver):
            attrs['Workflow'] = self._get_workflow(comp)
        
        if has_interface(comp,IHasCouplingVars):
            couples = []
            objs = comp.list_coupling_vars()
            for indep,dep in objs:
                attr = {}
                attr['independent'] = indep
                attr['dependent'] = dep
                couples.append(attr)
            attrs['CouplingVars'] = couples
            
        if has_interface(comp,IHasObjectives):
            objectives = []
            objs = comp.get_objectives()
            for key in objs.keys():
                attr = {}
                attr['name'] = str(key)
                attr['expr'] = objs[key].text
                attr['scope'] = objs[key].scope.name
                objectives.append(attr)
            attrs['Objectives'] = objectives
            
        if has_interface(comp,IHasParameters):
            parameters = []
            parms = comp.get_parameters()
            for key,parm in parms.iteritems():
                attr = {}
                attr['name']    = str(key)
                attr['target']  = parm.target
                attr['low']     = parm.low
                attr['high']    = parm.high
                attr['scaler']  = parm.scaler
                attr['adder']   = parm.adder
                attr['fd_step'] = parm.fd_step
                #attr['scope']   = parm.scope.name
                parameters.append(attr)
            attrs['Parameters'] = parameters
        
        if has_interface(comp,IHasConstraints) or has_interface(comp,IHasEqConstraints):
            constraints = []
            cons = comp.get_eq_constraints()
            for key,con in cons.iteritems():
                attr = {}
                attr['name']    = str(key)
                attr['expr']    = str(con)
                attr['scaler']  = con.scaler
                attr['adder']   = con.adder
                constraints.append(attr)
            attrs['EqConstraints'] = constraints
            
        if has_interface(comp,IHasConstraints) or has_interface(comp,IHasIneqConstraints):
            constraints = []
            cons = comp.get_ineq_constraints()
            for key,con in cons.iteritems():
                attr = {}
                attr['name']    = str(key)
                attr['expr']    = str(con)
                attr['scaler']  = con.scaler
                attr['adder']   = con.adder
                constraints.append(attr)
            attrs['IneqConstraints'] = constraints
            
        slots = []
        for name, value in comp.traits().items():
            if value.is_trait_type(Slot):
                attr = {}
                attr['name'] = name
                attr['klass'] = value.trait_type.klass.__name__
                if getattr(comp, name) is None:
                    attr['filled'] = False
                else:
                    attr['filled'] = True
                meta = comp.get_metadata(name);
                if meta:
                    for field in [ 'desc' ]:    # just desc?
                        if field in meta:
                            attr[field] = meta[field]
                        else:
                            attr[field] = ''
                    attr['type'] = meta['vartypename']
                slots.append(attr)            
        attrs['Slots'] = slots

        return attrs
Пример #55
0
class TestCase(unittest.TestCase):
    """ Test distributed simulation. """
    def run(self, result=None):
        """
        Record the :class:`TestResult` used so we can conditionally cleanup
        directories in :meth:`tearDown`.
        """
        self.test_result = result or unittest.TestResult()
        return super(TestCase, self).run(self.test_result)

    def setUp(self):
        """ Called before each test. """
        self.n_errors = len(self.test_result.errors)
        self.n_failures = len(self.test_result.failures)

        self.factories = []
        self.servers = []
        self.server_dirs = []

        # Ensure we control directory cleanup.
        self.keepdirs = os.environ.get('OPENMDAO_KEEPDIRS', '0')
        os.environ['OPENMDAO_KEEPDIRS'] = '1'

    def start_factory(self, port=None, allowed_users=None):
        """ Start each factory process in a unique directory. """
        global _SERVER_ID
        _SERVER_ID += 1

        server_dir = 'Factory_%d' % _SERVER_ID
        if os.path.exists(server_dir):
            shutil.rmtree(server_dir)
        os.mkdir(server_dir)
        os.chdir(server_dir)
        self.server_dirs.append(server_dir)
        try:
            logging.debug('')
            logging.debug('tester pid: %s', os.getpid())
            logging.debug('starting server...')

            if port is None:
                # Exercise both AF_INET and AF_UNIX/AF_PIPE.
                port = -1 if _SERVER_ID & 1 else 0

            if allowed_users is None:
                credentials = get_credentials()
                allowed_users = {credentials.user: credentials.public_key}

            allowed_types = [
                'openmdao.main.test.test_distsim.HollowSphere',
                'openmdao.main.test.test_distsim.Box',
                'openmdao.main.test.test_distsim.ProtectedBox'
            ]

            server, server_cfg = start_server(port=port,
                                              allowed_users=allowed_users,
                                              allowed_types=allowed_types)
            self.servers.append(server)
            cfg = read_server_config(server_cfg)
            self.address = cfg['address']
            self.port = cfg['port']
            self.tunnel = cfg['tunnel']
            self.key = cfg['key']
            logging.debug('server pid: %s', server.pid)
            logging.debug('server address: %s', self.address)
            logging.debug('server port: %s', self.port)
            logging.debug('server key: %s', self.key)
        finally:
            os.chdir('..')

        factory = connect(self.address,
                          self.port,
                          self.tunnel,
                          pubkey=self.key)
        self.factories.append(factory)
        logging.debug('factory: %r', factory)
        return factory

    def tearDown(self):
        """ Shut down server process. """
        try:
            for factory in self.factories:
                factory.cleanup()
            for server in self.servers:
                logging.debug('terminating server pid %s', server.pid)
                server.terminate(timeout=10)

            # Cleanup only if there weren't any new errors or failures.
            if len(self.test_result.errors) == self.n_errors and \
               len(self.test_result.failures) == self.n_failures and \
               not int(self.keepdirs):
                for server_dir in self.server_dirs:
                    shutil.rmtree(server_dir)
        finally:
            os.environ['OPENMDAO_KEEPDIRS'] = self.keepdirs

    def test_1_client(self):
        logging.debug('')
        logging.debug('test_client')

        factory = self.start_factory()

        # List available types.
        types = factory.get_available_types()
        logging.debug('Available types:')
        for typname, version in types:
            logging.debug('   %s %s', typname, version)

        # First a HollowSphere, accessed via get()/set().
        obj = factory.create(_MODULE + '.HollowSphere')
        sphere_pid = obj.get('pid')
        self.assertNotEqual(sphere_pid, os.getpid())

        radius = obj.get('radius')
        self.assertEqual(radius, 1.)
        radius += 1
        obj.set('radius', radius)
        new_radius = obj.get('radius')
        self.assertEqual(new_radius, 2.)
        self.assertEqual(obj.get('inner_volume'), 0.)
        self.assertEqual(obj.get('volume'), 0.)
        self.assertEqual(obj.get('solid_volume'), 0.)
        self.assertEqual(obj.get('surface_area'), 0.)
        obj.run()
        assert_rel_error(self, obj.get('inner_volume'), 33.510321638, 0.000001)
        assert_rel_error(self, obj.get('volume'), 36.086951213, 0.000001)
        assert_rel_error(self, obj.get('solid_volume'), 2.5766295747, 0.000001)
        assert_rel_error(self, obj.get('surface_area'), 50.265482457, 0.000001)

        msg = ": Variable 'radius' must be a float in the range (0.0, "
        assert_raises(self, "obj.set('radius', -1)", globals(), locals(),
                      ValueError, msg)

        # Now a Box, accessed via attribute methods.
        obj = factory.create(_MODULE + '.Box')
        box_pid = obj.get('pid')
        self.assertNotEqual(box_pid, os.getpid())
        self.assertNotEqual(box_pid, sphere_pid)

        obj.width += 2
        obj.height += 2
        obj.depth += 2
        self.assertEqual(obj.width, 2.)
        self.assertEqual(obj.height, 2.)
        self.assertEqual(obj.depth, 2.)
        self.assertEqual(obj.volume, 0.)
        self.assertEqual(obj.surface_area, 0.)
        obj.run()
        self.assertEqual(obj.volume, 8.0)
        self.assertEqual(obj.surface_area, 24.0)

        try:
            obj.no_rbac()
        except RemoteError as exc:
            msg = "AttributeError: method 'no_rbac' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

    def test_2_model(self):
        logging.debug('')
        logging.debug('test_model')

        factory = self.start_factory()

        # Create model and run it.
        box = factory.create(_MODULE + '.Box')
        model = set_as_top(Model(box))
        model.run()

        # Check results.
        for width in range(1, 2):
            for height in range(1, 3):
                for depth in range(1, 4):
                    case = model.driver.recorders[0].cases.pop(0)
                    self.assertEqual(case.outputs[0][2],
                                     width * height * depth)

        self.assertTrue(is_instance(model.box.parent, Assembly))
        self.assertTrue(has_interface(model.box.parent, IComponent))

        # Upcall to use parent to resolve sibling.
        # At one time this caused proxy problems.
        source = model.box.parent.source
        self.assertEqual(source.width_in, 1.)

        # Proxy resolution.
        obj, path = get_closest_proxy(model, 'box.subcontainer.subvar')
        self.assertEqual(obj, model.box)
        self.assertEqual(path, 'subcontainer.subvar')

        obj, path = get_closest_proxy(model, 'source.subcontainer.subvar')
        self.assertEqual(obj, model.source.subcontainer)
        self.assertEqual(path, 'subvar')

        obj, path = get_closest_proxy(model.source.subcontainer, 'subvar')
        self.assertEqual(obj, model.source.subcontainer)
        self.assertEqual(path, 'subvar')

        # Observable proxied type.
        tmp = model.box.open_in_parent('tmp', 'w')
        tmp.close()
        os.remove('tmp')

        # Cause server-side errors we can see.

        try:
            box.cause_parent_error1()
        except RemoteError as exc:
            msg = "AttributeError: attribute 'no_such_variable' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

        try:
            box.cause_parent_error2()
        except RemoteError as exc:
            msg = "AttributeError: method 'get_trait' of"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')

        try:
            box.cause_parent_error3()
        except RemoteError as exc:
            msg = "RoleError: xyzzy(): No access for role 'owner'"
            logging.debug('msg: %s', msg)
            logging.debug('exc: %s', exc)
            self.assertTrue(msg in str(exc))
        else:
            self.fail('Expected RemoteError')