def register(topic, obj):
            """Associates a given topic with a binary publisher based on the
            corresponding object type. If no binpub type exists for that object
            type, nothing happens.
            """
            sender = None
            if _binpub_types is None:
                load_binpubs()

            with _lock:
                if topic in _binpubs:
                    logger.debug("found topic %s in _binpubs", topic)
                    _binpubs[topic][0] += 1
                else:
                    # see if a sender is registered for this object type
                    for sender_type in _binpub_types:
                        if sender_type.supports(obj):
                            logger.debug("creating a sender for topic: %s",
                                         topic)
                            try:
                                sender = sender_type(Pub_WV_Wrapper(topic))
                            except Exception:
                                logger.error(traceback.format_exc())
                            _binpubs[topic] = [1, sender]
                            break

            if sender is not None:
                sender.send(obj, first=True)
Beispiel #2
0
    def command(self, cmd, save=True):
        err = None
        result = None
        self._cmds_to_save = []

        try:
            code = compile(cmd, '<string>', 'eval')
        except SyntaxError:
            try:
                exec(cmd) in self._model_globals
            except Exception as err:
                pass
        else:
            try:
                result = eval(code, self._model_globals)
            except Exception as err:
                pass

        if err:
            exc_type, exc_value, exc_traceback = sys.exc_info()
            lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
            logger.error("command '%s' generated an exception:\n %s",
                         cmd, ''.join(lines))
            raise
        else:
            if not self._cmds_to_save:
                self._cmds_to_save.append(cmd)
            self._save_command(save)

        return result
 def publish(self, topic, value, lock=True, binary=False):
     if Publisher.__enabled:
         try:
             if lock:
                 _lock.acquire()
             if binary:
                 if not isinstance(value, bytes):
                     raise TypeError("published binary value must be of type 'bytes'")
                 logger.debug("sending binary value for topic %s", topic)
                 self._sender.send_multipart([topic.encode('utf-8'),
                                              value])
             elif topic in _binpubs:
                 # if a binary publisher exists for this topic, use that
                 # to publish the value. It will call publish again
                 # (possibly multiple times) with binary=True
                 logger.debug("sending value via binpub for topic %s",
                              topic)
                 try:
                     _binpubs[topic][1].send(value)
                 except Exception:
                     logger.error("ERROR: %s", traceback.format_exc())
             else:
                 msg = json.dumps([topic.encode('utf-8'), value],
                                  default=json_default)
                 self._sender.send_multipart([msg])
             if hasattr(self._sender, 'flush'):
                 self._sender.flush()
         except Exception:
             print 'Publisher - Error publishing message %s: %s, %s' % \
                   (topic, value, traceback.format_exc())
         finally:
             if lock:
                 _lock.release()
Beispiel #4
0
 def publish(self, topic, value, lock=True, binary=False):
     global _binpubs
     if Publisher.__enabled:
         try:
             if lock:
                 _lock.acquire()
             if binary:
                 if not isinstance(value, bytes):
                     raise TypeError(
                         "published binary value must be of type 'bytes'")
                 logger.debug("sending binary value for topic %s" % topic)
                 self._sender.send_multipart([topic.encode('utf-8'), value])
             elif topic in _binpubs:
                 # if a binary publisher exists for this topic, use that to
                 # publish the value. It will call publish again (possibly multiple times)
                 # with binary=True
                 logger.debug("sending value via binpub for topic %s" %
                              topic)
                 try:
                     _binpubs[topic][1].send(value)
                 except Exception:
                     logger.error("ERROR: %s" % traceback.format_exc())
             else:
                 msg = json.dumps([topic.encode('utf-8'), value],
                                  default=json_default)
                 self._sender.send_multipart([msg])
             if hasattr(self._sender, 'flush'):
                 self._sender.flush()
         except Exception:
             print 'Publisher - Error publishing message %s: %s, %s' % \
                   (topic, value, traceback.format_exc())
         finally:
             if lock:
                 _lock.release()
Beispiel #5
0
    def register(topic, obj):
        """Associates a given topic with a binary publisher based on the corresponding object type.
        If no binpub type exists for that object type, nothing happens.
        """
        global _binpubs, _binpub_types
        sender = None
        if _binpub_types is None:
            load_binpubs()

        with _lock:
            if topic in _binpubs:
                logger.debug("found topic %s in _binpubs" % topic)
                _binpubs[topic][0] += 1
            else:
                # see if a sender is registered for this object type
                for sender_type in _binpub_types:
                    if sender_type.supports(obj):
                        logger.debug("creating a sender for topic: %s" % topic)
                        try:
                            sender = sender_type(Pub_WV_Wrapper(topic))
                        except Exception:
                            logger.error(traceback.format_exc())
                        _binpubs[topic] = [1, sender]
                        break

        if sender is not None:
            sender.send(obj, first=True)
Beispiel #6
0
    def __init__(self, watchdir, use_observer=True, observer=None):
        super(ProjDirFactory, self).__init__()
        self._lock = threading.RLock()
        self.observer = None
        self.watchdir = watchdir
        self._files = {}    # mapping of file pathnames to _FileInfo objects
        self._classes = {}  # mapping of class names to _FileInfo objects
        try:
            added_set = set()
            changed_set = set()
            deleted_set = set()

            modeldir = watchdir + PROJ_DIR_EXT
            if modeldir not in sys.path:
                sys.path = [modeldir] + sys.path
                logger.info("added %s to sys.path" % modeldir)

            for pyfile in find_files(self.watchdir, "*.py"):
                self.on_modified(pyfile, added_set, changed_set, deleted_set)

            if use_observer:
                self._start_observer(observer)
                self.publish_updates(added_set, changed_set, deleted_set)
            else:
                # sometimes for debugging/testing it's easier to turn observer off
                self.observer = None
        except Exception as err:
            self._error(str(err))
            logger.error(str(err))
    def __init__(self, projpath):
        """Initializes a Project containing the project found in the
        specified directory or creates a new project if one doesn't exist.

        projpath: str
            Path to the project's directory.
        """
        self._recorded_cmds = []
        self._cmds_to_save = []
        self.path = expand_path(projpath)
        self._model_globals = {}

        self.macrodir = os.path.join(self.path, '_macros')
        self.macro = 'default'
        
        if not os.path.isdir(self.macrodir):
            os.makedirs(self.macrodir)

        settings = os.path.join(self.path, '_settings.cfg')
        if not os.path.isfile(settings):
            self._create_config()
            
        self.config = SafeConfigParser()
        self.config.optionxform = str  # Preserve case.
        files = self.config.read(settings)
        if not files:
            logger.error("Failed to read project config file")
Beispiel #8
0
    def __init__(self, projpath):
        """Initializes a Project containing the project found in the
        specified directory or creates a new project if one doesn't exist.

        projpath: str
            Path to the project's directory.
        """
        self._recorded_cmds = []
        self._cmds_to_save = []
        self.path = expand_path(projpath)
        self._model_globals = {}

        self.macrodir = os.path.join(self.path, '_macros')
        self.macro = 'default'

        if not os.path.isdir(self.macrodir):
            os.makedirs(self.macrodir)

        settings = os.path.join(self.path, '_settings.cfg')
        if not os.path.isfile(settings):
            self._create_config()

        self.config = SafeConfigParser()
        self.config.optionxform = str  # Preserve case.
        files = self.config.read(settings)
        if not files:
            logger.error("Failed to read project config file")
    def load_macro(self, macro_name):
        fpath = os.path.join(self.macrodir, macro_name)
        self._recorded_cmds = []
        with open(fpath, 'r') as f:
            content = f.read()
            
        # fix missing newline at end of file to avoid issues later when
        # we append to it
        if not content.endswith('\n'): 
            with open(fpath, 'a') as f:
                f.write('\n')

        lines = content.split('\n')
            
        errors = []
        for i, line in enumerate(lines):
            try:
                self.command(line, save=False)
            except Exception as err:
                msg = str(err)
                logger.error("%s" % ''.join(traceback.format_tb(sys.exc_info()[2])))
                try:
                    publish('console_errors', msg)
                except:
                    logger.error("publishing of error failed")
Beispiel #10
0
    def load_macro(self, macro_name):
        fpath = os.path.join(self.macrodir, macro_name)
        self._recorded_cmds = []
        with open(fpath, 'r') as f:
            content = f.read()

        # fix missing newline at end of file to avoid issues later when
        # we append to it
        if not content.endswith('\n'):
            with open(fpath, 'a') as f:
                f.write('\n')

        lines = content.split('\n')

        errors = []
        for i, line in enumerate(lines):
            try:
                self.command(line, save=False)
            except Exception as err:
                msg = str(err)
                logger.error("%s",
                             ''.join(traceback.format_tb(sys.exc_info()[2])))
                try:
                    publish('console_errors', msg)
                except:
                    logger.error("publishing of error failed")
Beispiel #11
0
    def create(self,
               typ,
               version=None,
               server=None,
               res_desc=None,
               **ctor_args):
        """Tries to import the given named module and return a factory 
        function from it. The factory function or constructor must have the same
        name as the module. The module must be importable in the current Python
        environment.
        """
        if server is not None or version is not None:
            return None
        if res_desc is not None and len(res_desc) > 0:
            return None

        if typ not in self._ctors:
            parts = typ.split('.')
            cname = parts[-1]
            modname = '.'.join(parts[:-1])
            try:
                __import__(modname, globals(), locals(), [cname])
                mod = sys.modules[modname]
            except (ImportError, KeyError), err:
                logger.error(str(err))
                return None
            try:
                self._ctors[typ] = getattr(mod, cname)
            except AttributeError, err:
                logger.error(str(err))
                return None
Beispiel #12
0
    def __init__(self, watchdir, use_observer=True, observer=None):
        super(ProjDirFactory, self).__init__()
        self._lock = threading.RLock()
        self.observer = None
        self.watchdir = watchdir
        self._files = {}  # mapping of file pathnames to _FileInfo objects
        self._classes = {}  # mapping of class names to _FileInfo objects
        try:
            added_set = set()
            changed_set = set()
            deleted_set = set()

            modeldir = watchdir + PROJ_DIR_EXT
            if modeldir not in sys.path:
                sys.path = [modeldir] + sys.path
                logger.info("added %s to sys.path" % modeldir)

            for pyfile in find_files(self.watchdir, "*.py"):
                self.on_modified(pyfile, added_set, changed_set, deleted_set)

            if use_observer:
                self._start_observer(observer)
                self.publish_updates(added_set, changed_set, deleted_set)
            else:
                # sometimes for debugging/testing it's easier to turn observer off
                self.observer = None
        except Exception as err:
            self._error(str(err))
            logger.error(str(err))
    def create(self, typ, version=None, server=None, res_desc=None, **ctor_args):
        """Tries to import the given named module and return a factory 
        function from it. The factory function or constructor must have the same
        name as the module. The module must be importable in the current Python
        environment.
        """
        if server is not None or version is not None:
            return None
        if res_desc is not None and len(res_desc) > 0:
            return None

        if typ not in self._ctors:
            parts = typ.split(".")
            cname = parts[-1]
            modname = ".".join(parts[:-1])
            try:
                __import__(modname, globals(), locals(), [cname])
                mod = sys.modules[modname]
            except (ImportError, KeyError), err:
                logger.error(str(err))
                return None
            try:
                self._ctors[typ] = getattr(mod, cname)
            except AttributeError, err:
                logger.error(str(err))
                return None
 def send_binary_data(self, wsi, buf, ibuf):
     """This is called multiple times during the sending of a
     set of graphics primitives.
     """
     try:
         publish(self.objname, buf, binary=True)
     except Exception:
         logger.error(traceback.format_exc())
         return -1
     return 0
Beispiel #15
0
 def send_binary_data(self, wsi, buf, ibuf):
     """This is called multiple times during the sending of a
     set of graphics primitives.
     """
     try:
         publish(self.objname, buf, binary=True)
     except Exception:
         logger.error(traceback.format_exc())
         return -1
     return 0
 def execute(self):
     """Rebuild the geometry using the current set of parameters.
     """
     if self.parametric_geometry is not None:
         try:
             self.parametric_geometry.regen_model()
         except Exception as err:
             logger.error("ERROR:"+str(err))
             raise
         self._update_comp_outputs()
Beispiel #17
0
 def execute(self):
     """Rebuild the geometry using the current set of parameters.
     """
     if self.parametric_geometry is not None:
         try:
             self.parametric_geometry.regen_model()
         except Exception as err:
             logger.error("ERROR:" + str(err))
             raise
         self._update_comp_outputs()
Beispiel #18
0
 def publish_updates(self, added_set, changed_set, deleted_set):
     types = get_available_types()
     try:
         publish('types', [
             packagedict(types),
             list(added_set),
             list(changed_set),
             list(deleted_set),
         ])
     except:
         logger.error("publish of types failed")
Beispiel #19
0
def project_from_archive(archive_name,
                         proj_name=None,
                         dest_dir=None,
                         create=True,
                         overwrite=False):
    """Expand the given project archive file in the specified destination
    directory and return a Project object that points to the newly
    expanded project.

    archive_name: str
        Path to the project archive to be expanded.

    proj_name: str (optional)
        Name of the new project. Defaults to the name of the project contained
        in the name of the archive.

    dest_dir: str (optional)
        Directory where the project directory for the expanded archive will
        reside. Defaults to the directory where the archive is located.

    create: bool (optional)
        If True, create and return a Project object. Otherwise just unpack the
        project directory.
    """
    archive_name = expand_path(archive_name)

    if dest_dir is None:
        dest_dir = os.path.dirname(archive_name)
    else:
        dest_dir = expand_path(dest_dir)

    if proj_name is None:
        proj_name = parse_archive_name(archive_name)

    projpath = os.path.join(dest_dir, proj_name)

    if not overwrite and os.path.exists(projpath):
        raise RuntimeError("Directory '%s' already exists" % projpath)

    if not os.path.exists(projpath):
        os.mkdir(projpath)
    if os.path.getsize(archive_name) > 0:
        try:
            f = open(archive_name, 'rb')
            tf = tarfile.open(fileobj=f, mode='r')
            tf.extractall(projpath)
        except Exception as err:
            logger.error(str(err))
            print "Error expanding project archive:", err
        finally:
            tf.close()

    if create:
        return Project(projpath)
Beispiel #20
0
 def publish_updates(self, added_set, changed_set, deleted_set):
     types = get_available_types()
     try:
         publish('types',
                 [
                     packagedict(types),
                     list(added_set),
                     list(changed_set),
                     list(deleted_set),
                 ])
     except:
         logger.error("publish of types failed")
Beispiel #21
0
 def publish_updates(self, added_set, changed_set, deleted_set):
     publisher = Publisher.get_instance()
     if publisher:
         types = get_available_types()
         types.extend(self.get_available_types())
         publisher.publish('types', [
             packagedict(types),
             list(added_set),
             list(changed_set),
             list(deleted_set),
         ])
     else:
         logger.error("no Publisher found")
Beispiel #22
0
def project_from_archive(archive_name, proj_name=None, dest_dir=None,
                         create=True, overwrite=False):
    """Expand the given project archive file in the specified destination
    directory and return a Project object that points to the newly
    expanded project.

    archive_name: str
        Path to the project archive to be expanded.

    proj_name: str (optional)
        Name of the new project. Defaults to the name of the project contained
        in the name of the archive.

    dest_dir: str (optional)
        Directory where the project directory for the expanded archive will
        reside. Defaults to the directory where the archive is located.

    create: bool (optional)
        If True, create and return a Project object. Otherwise, just unpack the
        project directory.
    """
    archive_name = expand_path(archive_name)

    if dest_dir is None:
        dest_dir = os.path.dirname(archive_name)
    else:
        dest_dir = expand_path(dest_dir)

    if proj_name is None:
        proj_name = parse_archive_name(archive_name)

    projpath = os.path.join(dest_dir, proj_name)

    if not overwrite and os.path.exists(projpath):
        raise RuntimeError("Directory '%s' already exists" % projpath)

    if not os.path.exists(projpath):
        os.mkdir(projpath)
    if os.path.getsize(archive_name) > 0:
        try:
            f = open(archive_name, 'rb')
            tf = tarfile.open(fileobj=f, mode='r')
            tf.extractall(projpath)
        except Exception as err:
            logger.error(str(err))
            print "Error expanding project archive:", err
        finally:
            tf.close()

    if create:
        return Project(projpath)
Beispiel #23
0
def _run_command(cmd, silent=False):
    fd, fname = tempfile.mkstemp()
    proc = subprocess.Popen(cmd, stdout=fd, stderr=subprocess.STDOUT, shell=True)
    proc.wait()
    os.close(fd)
    try:
        if proc.returncode != 0 and not silent:
            with open(fname, 'rb') as f:
                out = f.read()
            logger.error("out: %s" % out)
            raise RuntimeError(out)
    finally:
        os.remove(fname)
    return proc.returncode
Beispiel #24
0
 def load_macro(self, macro_name):
     fpath = os.path.join(self.macrodir, macro_name)
     self._recorded_cmds = []
     with open(fpath, 'r') as f:
         lines = f.readlines()
         
     errors = []
     for i, line in enumerate(lines):
         try:
             self.command(line.rstrip('\n'), save=False)
         except Exception as err:
             msg = str(err)
             logger.error("%s" % ''.join(traceback.format_tb(sys.exc_info()[2])))
             try:
                 publish('console_errors', msg)
             except:
                 logger.error("publishing of error failed")
Beispiel #25
0
def _run_command(cmd, silent=False):
    fd, fname = tempfile.mkstemp()
    proc = subprocess.Popen(cmd,
                            stdout=fd,
                            stderr=subprocess.STDOUT,
                            shell=True)
    proc.wait()
    os.close(fd)
    try:
        if proc.returncode != 0 and not silent:
            with open(fname, 'rb') as f:
                out = f.read()
            logger.error("out: %s" % out)
            raise RuntimeError(out)
    finally:
        os.remove(fname)
    return proc.returncode
Beispiel #26
0
    def load_binpubs():
        """Loads all binpubs entry points."""
        global _binpub_types
        logger.debug("loading binpubs")

        if _binpub_types is None:
            _binpub_types = []

            # find all of the installed binpubs
            for ep in working_set.iter_entry_points('openmdao.binpub'):
                try:
                    klass = ep.load()
                except Exception as err:
                    logger.error("Entry point %s failed to load: %s" % (str(ep).split()[0], err))
                else:
                    logger.debug("adding binpub entry point: %s" % str(ep).split()[0])
                    with _lock:
                        _binpub_types.append(klass)
Beispiel #27
0
    def load_macro(self, macro_name):
        fpath = os.path.join(self.macrodir, macro_name)
        self._recorded_cmds = []
        with open(fpath, 'r') as f:
            lines = f.readlines()

        errors = []
        for i, line in enumerate(lines):
            try:
                self.command(line.rstrip('\n'), save=False)
            except Exception as err:
                msg = str(err)
                logger.error("%s" %
                             ''.join(traceback.format_tb(sys.exc_info()[2])))
                try:
                    publish('console_errors', msg)
                except:
                    logger.error("publishing of error failed")
Beispiel #28
0
    def _import(self, typ):
        """Return class for *typ*."""
        if typ not in self._ctors:
            parts = typ.split(".")
            cname = parts[-1]
            modname = ".".join(parts[:-1])
            try:
                __import__(modname, globals(), locals(), [cname])
                mod = sys.modules[modname]
            except (ImportError, KeyError) as err:
                logger.error(str(err))
                return None
            try:
                self._ctors[typ] = getattr(mod, cname)
            except AttributeError as err:
                logger.error(str(err))
                return None

        return self._ctors[typ]
    def _import(self, typ):
        """Return class for *typ*."""
        if typ not in self._ctors:
            parts = typ.split('.')
            cname = parts[-1]
            modname = '.'.join(parts[:-1])
            try:
                __import__(modname, globals(), locals(), [cname])
                mod = sys.modules[modname]
            except (ImportError, KeyError) as err:
                logger.error(str(err))
                return None
            try:
                self._ctors[typ] = getattr(mod, cname)
            except AttributeError as err:
                logger.error(str(err))
                return None

        return self._ctors[typ]
 def write_file(self, filename, contents):
     ''' Write contents to file in working directory.
     '''
     try:
         filename = str(filename)
         fpath = self._get_abs_path(filename)
         if filename.endswith('.py'):
             initpath = os.path.join(os.path.dirname(fpath), '__init__.py')
             files = os.listdir(os.path.dirname(fpath))
             # FIXME: This is a bit of a kludge, but for now we only create
             # an __init__.py file if it's the very first file in the
             # directory where a new file is being added.
             if not files and not os.path.isfile(initpath):
                 with open(initpath, 'w') as f:
                     f.write(' ')
         with open(fpath, 'wb') as fout:
             fout.write(contents)
         return True
     except Exception, err:
         logger.error(str(err))
         return err
 def write_file(self, filename, contents):
     ''' Write contents to file in working directory.
     '''
     try:
         filename = str(filename)
         fpath = self._get_abs_path(filename)
         if filename.endswith('.py'):
             files = os.listdir(os.path.dirname(fpath))
             # FIXME: This is a bit of a kludge, but for now we only create
             # an __init__.py file if it's the very first file in the
             # directory where a new file is being added.
             initpath = os.path.join(os.path.dirname(fpath), '__init__.py')
             if not files and not os.path.isfile(initpath):
                 with open(initpath, 'w') as f:
                     f.write(' ')
         with open(fpath, 'wb') as fout:
             fout.write(contents)
         return True
     except Exception, err:
         logger.error(str(err))
         return err
Beispiel #32
0
    def __init__(self, watchdir, use_observer=True, observer=None):
        super(ProjDirFactory, self).__init__()
        self._lock = threading.RLock()
        self.watchdir = watchdir
        self.imported = {}  # imported files vs (module, ctor dict)
        try:
            self.analyzer = PythonSourceTreeAnalyser()

            added_set = set()
            changed_set = set()
            deleted_set = set()
            for pyfile in find_files(self.watchdir, "*.py"):
                self.on_modified(pyfile, added_set, changed_set, deleted_set)

            if use_observer:
                self._start_observer(observer)
                self.publish_updates(added_set, changed_set, deleted_set)
            else:
                self.observer = None  # sometimes for debugging/testing it's easier to turn observer off
        except Exception as err:
            logger.error(str(err))
    def solve(self, arg):
        """ Solve the coupled equations for a new state vector that nulls the
        residual. Used by the Newton solvers."""

        system = self._system
        options = self.options
        A = self.A

        #print system.name, 'Linear solution start vec', system.rhs_vec.array
        # Call GMRES to solve the linear system
        dx, info = gmres(A, arg, tol=options.atol, maxiter=options.maxiter)

        if info > 0:
            msg = "ERROR in calc_gradient in '%s': gmres failed to converge " \
                  "after %d iterations"
            logger.error(msg, system.name, info)
        elif info < 0:
            msg = "ERROR in calc_gradient in '%s': gmres failed"
            logger.error(msg, system.name)

        #print system.name, 'Linear solution vec', -dx
        return dx
    def solve(self, arg):
        """ Solve the coupled equations for a new state vector that nulls the
        residual. Used by the Newton solvers."""

        system = self._system
        options = self.options
        A = self.A

        #print system.name, 'Linear solution start vec', system.rhs_vec.array
        # Call GMRES to solve the linear system
        dx, info = gmres(A, arg,
                         tol=options.atol,
                         maxiter=options.maxiter)

        if info > 0:
            msg = "ERROR in calc_gradient in '%s': gmres failed to converge " \
                  "after %d iterations"
            logger.error(msg, system.name, info)
        elif info < 0:
            msg = "ERROR in calc_gradient in '%s': gmres failed"
            logger.error(msg, system.name)

        #print system.name, 'Linear solution vec', -dx
        return dx
Beispiel #35
0
 def _error(self, msg):
     logger.error(msg)
     publish('console_errors', msg)
     publish('file_errors', msg)
 def _error(self, msg):
     logger.error(msg)
     print msg
     publish('console_errors', msg)
def calc_gradient(wflow, inputs, outputs, n_edge, shape):
    """Returns the gradient of the passed outputs with respect to
    all passed inputs.
    """

    # Size the problem
    A = LinearOperator((n_edge, n_edge),
                       matvec=wflow.matvecFWD,
                       dtype=float)

    J = zeros(shape)

    # Each comp calculates its own derivatives at the current
    # point. (i.e., linearizes)
    comps = wflow.calc_derivatives(first=True)

    if not comps:
        return J

    dgraph = wflow._derivative_graph
    options = wflow._parent.gradient_options
    bounds = wflow._bounds_cache

    # Forward mode, solve linear system for each parameter
    j = 0
    for param in inputs:

        if isinstance(param, tuple):

            # You can ask for derivatives of broadcast inputs in cases
            # where some of the inputs aren't in the relevance graph.
            # Find the one that is.
            for bcast_param in param:
                if bcast_param in dgraph and 'bounds' in dgraph.node[bcast_param]:
                    param = bcast_param
                    break
            else:
                param = param[0]
                #raise RuntimeError("didn't find any of '%s' in derivative graph for '%s'" %
                                   #(param, wflow._parent.get_pathname()))
        try:
            i1, i2 = bounds[param]
        except KeyError:

            # If you end up here, it is usually because you have a
            # tuple of broadcast inputs containing only non-relevant
            # variables. Derivative is zero, so take one and increment
            # by its width.
            j += wflow.get_width(param)
            continue

        if isinstance(i1, list):
            in_range = i1
        else:
            in_range = range(i1, i2)

        for irhs in in_range:

            RHS = zeros((n_edge, 1))
            RHS[irhs, 0] = 1.0

            # Call GMRES to solve the linear system
            dx, info = gmres(A, RHS,
                             tol=options.gmres_tolerance,
                             maxiter=options.gmres_maxiter)
            if info > 0:
                msg = "ERROR in calc_gradient in '%s': gmres failed to converge " \
                      "after %d iterations for parameter '%s' at index %d"
                logger.error(msg % (wflow._parent.get_pathname(), info, param, irhs))
            elif info < 0:
                msg = "ERROR in calc_gradient in '%s': gmres failed " \
                      "for parameter '%s' at index %d"
                logger.error(msg % (wflow._parent.get_pathname(), param, irhs))

            i = 0
            for item in outputs:
                try:
                    k1, k2 = bounds[item]
                except KeyError:
                    i += wflow.get_width(item)
                    continue

                if isinstance(k1, list):
                    J[i:i+(len(k1)), j] = dx[k1]
                    i += len(k1)
                else:
                    J[i:i+(k2-k1), j] = dx[k1:k2]
                    i += k2-k1

            j += 1

    #print inputs, '\n', outputs, '\n', J
    return J
Beispiel #38
0
 def _error(self, msg):
     logger.error(msg)
     print msg
     publish('console_errors', msg)
Beispiel #39
0
def calc_gradient_adjoint(wflow, inputs, outputs, n_edge, shape):
    """Returns the gradient of the passed outputs with respect to
    all passed inputs. Calculation is done in adjoint mode.
    """

    # Size the problem
    A = LinearOperator((n_edge, n_edge), matvec=wflow.matvecREV, dtype=float)
    J = zeros(shape)

    # Each comp calculates its own derivatives at the current
    # point. (i.e., linearizes)
    wflow.calc_derivatives(first=True)

    dgraph = wflow._derivative_graph
    options = wflow._parent.gradient_options

    # Adjoint mode, solve linear system for each output
    j = 0
    for output in outputs:

        if isinstance(output, tuple):
            output = output[0]

        try:
            i1, i2 = wflow.get_bounds(output)
        except KeyError:
            continue

        if isinstance(i1, list):
            out_range = i1
        else:
            out_range = range(i1, i2)

        for irhs in out_range:

            RHS = zeros((n_edge, 1))
            RHS[irhs, 0] = 1.0

            # Call GMRES to solve the linear system
            dx, info = gmres(A,
                             RHS,
                             tol=options.gmres_tolerance,
                             maxiter=options.gmres_maxiter)

            if info > 0:
                msg = "ERROR in calc_gradient_adjoint in '%s': gmres failed to converge " \
                      "after %d iterations for output '%s' at index %d"
                logger.error(
                    msg % (wflow._parent.get_pathname(), info, output, irhs))
            elif info < 0:
                msg = "ERROR in calc_gradient_adjoint in '%s': gmres failed " \
                      "for output '%s' at index %d"
                logger.error(msg %
                             (wflow._parent.get_pathname(), output, irhs))

            i = 0

            for param in inputs:

                # You can ask for derivatives of broadcast inputs in cases
                # where some of the inputs aren't in the relevance graph.
                # Find the one that is.
                if isinstance(param, tuple):
                    for bcast_param in param:
                        if bcast_param in dgraph and 'bounds' in dgraph.node[
                                bcast_param]:
                            param = bcast_param
                            break
                    else:
                        param = param[0]
                        #raise RuntimeError("didn't find any of '%s' in derivative graph for '%s'" %
                        #(param, wflow._parent.get_pathname()))

                try:
                    k1, k2 = wflow.get_bounds(param)
                except KeyError:

                    # If you end up here, it is usually because you have a
                    # tuple of broadcast inputs containing only non-relevant
                    # variables. Derivative is zero, so take one and increment
                    # by its width.

                    # TODO - We need to cache these when we remove
                    # boundcaching from the graph
                    val = wflow.scope.get(param)
                    i += flattened_size(param, val, wflow.scope)
                    continue

                if isinstance(k1, list):
                    J[j, i:i + (len(k1))] = dx[k1:k2]
                    i += len(k1)
                else:
                    J[j, i:i + (k2 - k1)] = dx[k1:k2]
                    i += k2 - k1

            j += 1

    #print inputs, '\n', outputs, '\n', J, dx
    return J
Beispiel #40
0
    def visit_ClassDef(self, node):
        """This executes every time a class definition is parsed."""
        fullname = '.'.join([self.modpath, node.name])
        self.localnames[node.name] = fullname
        bases = [_to_str(b) for b in node.bases]

        bvisitor = _ClassBodyVisitor()
        bvisitor.visit(node)

        bases = [self.localnames.get(b, b) for b in bases]

        self.classes[fullname] = ClassInfo(fullname, self.fname, bases,
                                           bvisitor.metadata,
                                           node.decorator_list)
        self.tree_analyser.class_map[fullname] = self.classes[fullname]

        undef_bases = [
            b for b in bases
            if b not in self.classes and not hasattr(__builtin__, b)
        ]
        while undef_bases:
            base = undef_bases.pop()
            cinfo = self.tree_analyser.find_classinfo(base)
            if cinfo is None:
                parts = base.rsplit('.', 1)
                if len(parts
                       ) == 1:  # no dot, so maybe it came in with a '*' import
                    trymods = self.starimports[::-1]
                    basename = base
                else:
                    trymods = [parts[0]]
                    basename = parts[1]

                for modname in trymods:
                    excluded = False
                    for m in self.tree_analyser.mod_excludes:
                        if m == modname or modname.startswith(m + '.'):
                            excluded = True
                            break
                    if excluded:
                        continue
                    fpath = find_module(modname)
                    if fpath is not None:
                        fanalyzer = self.tree_analyser.analyze_file(fpath)
                        if '.' not in base:
                            trybase = '.'.join([modname, base])
                        else:
                            trybase = base
                        if trybase in fanalyzer.classes:
                            break
                        elif basename in fanalyzer.localnames:
                            newname = fanalyzer.localnames[basename]
                            self.tree_analyser.class_map[trybase] = newname
                            if newname not in self.tree_analyser.class_map and \
                               newname not in self.unresolved_classes:
                                undef_bases.append(newname)
                            break
                else:
                    logger.error("can't locate python source for class %s" %
                                 base)
                    self.unresolved_classes.add(base)
Beispiel #41
0
 def _error(self, msg):
     logger.error(msg)
     publish('console_errors', msg)
     publish('file_errors', msg)
Beispiel #42
0
def calc_gradient(wflow, inputs, outputs, n_edge, shape):
    """Returns the gradient of the passed outputs with respect to
    all passed inputs.
    """

    # Size the problem
    A = LinearOperator((n_edge, n_edge), matvec=wflow.matvecFWD, dtype=float)

    J = zeros(shape)

    # Each comp calculates its own derivatives at the current
    # point. (i.e., linearizes)
    comps = wflow.calc_derivatives(first=True)

    if not comps:
        return J

    dgraph = wflow._derivative_graph
    options = wflow.parent.gradient_options
    bounds = wflow._bounds_cache

    # Forward mode, solve linear system for each parameter
    j = 0
    for param in inputs:

        if isinstance(param, tuple):

            # You can ask for derivatives of broadcast inputs in cases
            # where some of the inputs aren't in the relevance graph.
            # Find the one that is.
            for bcast_param in param:
                if bcast_param in dgraph and 'bounds' in dgraph.node[
                        bcast_param]:
                    param = bcast_param
                    break
            else:
                param = param[0]
                # raise RuntimeError("didn't find any of '%s' in derivative graph for '%s'" %
                # (param, wflow.parent.get_pathname()))
        try:
            i1, i2 = bounds[param]
        except KeyError:

            # If you end up here, it is usually because you have a
            # tuple of broadcast inputs containing only non-relevant
            # variables. Derivative is zero, so take one and increment
            # by its width.
            j += wflow.get_width(param)
            continue

        if isinstance(i1, list):
            in_range = i1
        else:
            in_range = range(i1, i2)

        for irhs in in_range:

            RHS = zeros((n_edge, 1))
            RHS[irhs, 0] = 1.0

            # Call GMRES to solve the linear system
            dx, info = gmres(A,
                             RHS,
                             tol=options.gmres_tolerance,
                             maxiter=options.gmres_maxiter)
            if info > 0:
                msg = "ERROR in calc_gradient in '%s': gmres failed to converge " \
                      "after %d iterations for parameter '%s' at index %d"
                logger.error(msg, wflow.parent.get_pathname(), info, param,
                             irhs)
            elif info < 0:
                msg = "ERROR in calc_gradient in '%s': gmres failed " \
                      "for parameter '%s' at index %d"
                logger.error(msg, wflow.parent.get_pathname(), param, irhs)

            i = 0
            for item in outputs:
                try:
                    k1, k2 = bounds[item]
                except KeyError:
                    i += wflow.get_width(item)
                    continue

                if isinstance(k1, list):
                    J[i:i + (len(k1)), j] = dx[k1]
                    i += len(k1)
                else:
                    J[i:i + (k2 - k1), j] = dx[k1:k2]
                    i += k2 - k1

            j += 1

    # print inputs, '\n', outputs, '\n', J
    return J
Beispiel #43
0
 def _error(self, msg, errclass=RuntimeError):
     if self._gui:
         logger.error(msg)
     else:
         raise errclass(msg)