class PkgResourcesFactory(Factory):
    """A Factory that loads plugins using the pkg_resources API, which means
    it searches through egg info of distributions in order to find any entry
    point groups corresponding to openmdao plugin types, e.g.,
    openmdao.component, openmdao.variable, etc.
    """
    def __init__(self, groups=plugin_groups.keys(), search_path=None):
        super(PkgResourcesFactory, self).__init__()
        self._have_new_types = True
        self._groups = copy.copy(groups)
        self._search_path = search_path
        self.env = Environment(search_path)
        self.tree_analyser = PythonSourceTreeAnalyser()

    def create(self,
               typ,
               version=None,
               server=None,
               res_desc=None,
               **ctor_args):
        """Create and return an object of the given type, with
        optional name, version, server id, and resource description.
        """
        if server is not None or res_desc is not None:
            return None

        classes = self._get_type_dict()

        try:
            lst = classes[typ]
            dist = lst[0]
            groups = lst[1]
            klass = dist.load_entry_point(groups[0], typ)

            if version is not None and dist.version != version:
                return None

            return klass(**ctor_args)
        except KeyError:
            if self._search_path is None:
                return None
            # try to look in the whole environment
            for group in self._groups:
                for proj in self.env:
                    for dist in self.env[proj]:
                        if version is not None and version != dist.version:
                            continue
                        ep = dist.get_entry_info(group, typ)
                        if ep is not None:
                            dist.activate()
                            klass = ep.load(require=True, env=self.env)
                            self._have_new_types = True
                            return klass(**ctor_args)
                        if version is None:
                            # newest version didn't have entry point, so skip to next project
                            break
        return None

    def _entry_map_info(self, distiter):
        dct = {}
        for group in plugin_groups.keys():
            for dist in distiter:
                for name, value in dist.get_entry_map(group).items():
                    lst = dct.setdefault(name, (dist, [], set()))
                    lst[1].append(group)
                    lst[2].add(value.module_name)
        return dct

    def _get_type_dict(self):
        if self._have_new_types:
            self._entry_pt_classes = self._entry_map_info(working_set)
        return self._entry_pt_classes

    def _get_meta_info(self, typ_list, groups, typ_dict):
        distset = set()
        for name, lst in typ_dict.items():
            dist = lst[0]
            modules = lst[2]
            distset.add(dist.project_name)
            ifaces = set()
            for g in lst[1]:
                ifaces.update(plugin_groups[g])

            meta = {
                'version': dist.version,
                'ifaces': set(ifaces),
            }

            for modname in modules:
                fpath = find_module(modname)
                if fpath is not None:
                    fanalyzer = self.tree_analyser.analyze_file(fpath,
                                                                use_cache=True)
                    meta['ifaces'].update(
                        self.tree_analyser.get_interfaces(name))

            meta['ifaces'] = list(meta['ifaces'])
            if groups.intersection(lst[1]):
                typ_list.append((name, meta))
        self.tree_analyser.flush_cache()
        return distset

    def get_available_types(self, groups=None):
        """Return a set of tuples of the form (typename, dist_version), one
        for each available plugin type in the given entry point groups.
        If groups is None, return the set for all openmdao entry point groups.
        """
        ret = []

        if groups is None:
            groups = plugin_groups.keys()
        groups = set(groups)

        typ_dict = self._get_type_dict()
        distset = self._get_meta_info(ret, groups, typ_dict)

        if self._search_path is None:  # self.env has same contents as working_set,
            # so don't bother looking through it
            return ret

        # now look in the whole Environment
        dists = [
        ]  # we want an iterator of newest dist for each project in Environment
        for proj in self.env:
            dist = self.env[proj][0]
            if dist.project_name not in distset:
                dists.append(dist)

        typ_dict = self._entry_map_info(dists)
        dset = self._get_meta_info(ret, groups, typ_dict)

        return ret
class ProjDirFactory(Factory):
    """A Factory that watches a Project directory and dynamically keeps
    the set of available types up-to-date as project files change.
    """
    def __init__(self, watchdir, use_observer=True, observer=None):
        super(ProjDirFactory, self).__init__()
        self._lock = threading.RLock()
        self.watchdir = watchdir
        self.imported = {}  # imported files vs (module, ctor dict)
        try:
            self.analyzer = PythonSourceTreeAnalyser()

            added_set = set()
            changed_set = set()
            deleted_set = set()
            for pyfile in find_files(self.watchdir, "*.py"):
                self.on_modified(pyfile, added_set, changed_set, deleted_set)

            if use_observer:
                self._start_observer(observer)
                self.publish_updates(added_set, changed_set, deleted_set)
            else:
                self.observer = None  # sometimes for debugging/testing it's easier to turn observer off
        except Exception as err:
            logger.error(str(err))

    def _start_observer(self, observer):
        if observer is None:
            self.observer = Observer()
            self._ownsobserver = True
        else:
            self.observer = observer
            self._ownsobserver = False
        self.observer.schedule(PyWatcher(self),
                               path=self.watchdir,
                               recursive=True)
        if self._ownsobserver:
            self.observer.daemon = True
            self.observer.start()

    def _get_mod_ctors(self, mod, fpath, visitor):
        self.imported[fpath] = (mod, {})
        for cname in visitor.classes.keys():
            self.imported[fpath][1][cname] = getattr(mod, cname.split('.')[-1])

    def create(self,
               typ,
               version=None,
               server=None,
               res_desc=None,
               **ctor_args):
        """Create and return an instance of the specified type, or None if
        this Factory can't satisfy the request.
        """
        if server is None and res_desc is None and typ in self.analyzer.class_map:
            with self._lock:
                fpath = self.analyzer.class_map[typ].fname
                modpath = self.analyzer.fileinfo[fpath][0].modpath
                if os.path.getmtime(fpath) > self.analyzer.fileinfo[fpath][
                        1] and modpath in sys.modules:
                    reload(sys.modules[modpath])
                if fpath not in self.imported:
                    sys.path = [
                        get_ancestor_dir(fpath, len(modpath.split('.')))
                    ] + sys.path
                    try:
                        __import__(modpath)
                    except ImportError as err:
                        return None
                    finally:
                        sys.path = sys.path[1:]
                    mod = sys.modules[modpath]
                    visitor = self.analyzer.fileinfo[fpath][0]
                    self._get_mod_ctors(mod, fpath, visitor)

                try:
                    ctor = self.imported[fpath][1][typ]
                except KeyError:
                    return None
                return ctor(**ctor_args)
        return None

    def get_available_types(self, groups=None):
        """Return a list of available types that cause predicate(classname, metadata) to
        return True.
        """
        with self._lock:
            graph = self.analyzer.graph
            typset = set(graph.nodes())
            types = []

            if groups is None:
                ifaces = set([v[0] for v in plugin_groups.values()])
            else:
                ifaces = set(
                    [v[0] for k, v in plugin_groups.items() if k in groups])

            for typ in typset:
                if typ.startswith(
                        'openmdao.'):  # don't include any standard lib types
                    continue
                if 'classinfo' in graph.node[typ]:
                    meta = graph.node[typ]['classinfo'].meta
                    if ifaces.intersection(self.analyzer.get_interfaces(typ)):
                        meta = meta.copy()
                        meta['_context'] = 'In Project'
                        types.append((typ, meta))
            return types

    def on_modified(self, fpath, added_set, changed_set, deleted_set):
        if os.path.isdir(fpath):
            return

        with self._lock:
            imported = False
            if fpath in self.analyzer.fileinfo:  # file has been previously scanned
                visitor = self.analyzer.fileinfo[fpath][0]
                pre_set = set(visitor.classes.keys())

                if fpath in self.imported:  # we imported it earlier
                    imported = True
                    sys.path = [os.path.dirname(fpath)
                                ] + sys.path  # add fpath location to sys.path
                    try:
                        reload(self.imported[fpath][0])
                    except ImportError as err:
                        return None
                    finally:
                        sys.path = sys.path[1:]  # restore original sys.path
                    #self.imported[fpath] = (m, self.imported[fpath][1])
                elif os.path.getmtime(
                        fpath) > self.analyzer.fileinfo[fpath][1]:
                    modpath = get_module_path(fpath)
                    if modpath in sys.modules:
                        reload(sys.modules[modpath])
                self.on_deleted(fpath, set())  # clean up old refs
            else:  # it's a new file
                pre_set = set()

            visitor = self.analyzer.analyze_file(fpath)
            post_set = set(visitor.classes.keys())

            deleted_set.update(pre_set - post_set)
            added_set.update(post_set - pre_set)
            if imported:
                changed_set.update(pre_set.intersection(post_set))

    def on_deleted(self, fpath, deleted_set):
        with self._lock:
            if os.path.isdir(fpath):
                for pyfile in find_files(self.watchdir, "*.py"):
                    self.on_deleted(pyfile, deleted_set)
            else:
                try:
                    del self.imported[fpath]
                except KeyError:
                    pass

                visitor = self.analyzer.fileinfo[fpath][0]
                deleted_set.update(visitor.classes.keys())
                self.analyzer.remove_file(fpath)

    def publish_updates(self, added_set, changed_set, deleted_set):
        publisher = Publisher.get_instance()
        if publisher:
            types = get_available_types()
            types.extend(self.get_available_types())
            publisher.publish('types', [
                packagedict(types),
                list(added_set),
                list(changed_set),
                list(deleted_set),
            ])
        else:
            logger.error("no Publisher found")

    def cleanup(self):
        """If this factory is removed from the FactoryManager during execution, this function
        will stop the watchdog observer thread.
        """
        if self.observer and self._ownsobserver:
            self.observer.unschedule_all()
            self.observer.stop()
            self.observer.join()
class PkgResourcesFactory(Factory):
    """A Factory that loads plugins using the pkg_resources API, which means
    it searches through egg info of distributions in order to find any entry
    point groups corresponding to openmdao plugin types, e.g.,
    openmdao.component, openmdao.variable, etc.
    """
    
    def __init__(self, groups=plugin_groups.keys(), search_path=None):
        super(PkgResourcesFactory, self).__init__()
        self._have_new_types = True
        self._groups = copy.copy(groups)
        self._search_path = search_path
        self.env = Environment(search_path)
        self.tree_analyser = PythonSourceTreeAnalyser()
            
    def create(self, typ, version=None, server=None, 
               res_desc=None, **ctor_args):
        """Create and return an object of the given type, with
        optional name, version, server id, and resource description.
        """
        if server is not None or res_desc is not None:
            return None

        classes = self._get_type_dict()
            
        try:
            lst = classes[typ]
            dist = lst[0]
            groups = lst[1]
            klass = dist.load_entry_point(groups[0], typ)
            
            if version is not None and dist.version != version:
                return None
            
            return klass(**ctor_args)
        except KeyError:
            if self._search_path is None:
                return None
            # try to look in the whole environment
            for group in self._groups:
                for proj in self.env:
                    for dist in self.env[proj]:
                        if version is not None and version != dist.version:
                            continue
                        ep = dist.get_entry_info(group, typ)
                        if ep is not None:
                            dist.activate()
                            klass = ep.load(require=True, env=self.env)
                            self._have_new_types = True
                            return klass(**ctor_args)
                        if version is None:
                            # newest version didn't have entry point, so skip to next project
                            break
        return None
            
    def _entry_map_info(self, distiter):
        dct = {}
        for group in plugin_groups.keys():
            for dist in distiter:
                for name, value in dist.get_entry_map(group).items():
                    lst = dct.setdefault(name, (dist, [], set()))
                    lst[1].append(group)
                    lst[2].add(value.module_name)
        return dct
        
    def _get_type_dict(self):
        if self._have_new_types:
            self._entry_pt_classes = self._entry_map_info(working_set)
        return self._entry_pt_classes
            
    def _get_meta_info(self, typ_list, groups, typ_dict):
        distset = set()
        for name, lst in typ_dict.items():
            dist = lst[0]
            modules = lst[2]
            distset.add(dist.project_name)
            ifaces = set()
            for g in lst[1]:
                ifaces.update(plugin_groups[g])
                
            meta = {
                'version': dist.version,
                'ifaces': set(ifaces),
            }
            
            for modname in modules:
                fpath = find_module(modname)
                if fpath is not None:
                    fanalyzer = self.tree_analyser.analyze_file(fpath, use_cache=True)
                    meta['ifaces'].update(self.tree_analyser.get_interfaces(name))
                    
            meta['ifaces'] = list(meta['ifaces'])
            if groups.intersection(lst[1]):
                typ_list.append((name, meta))
        self.tree_analyser.flush_cache()
        return distset
        
    def get_available_types(self, groups=None):
        """Return a set of tuples of the form (typename, dist_version), one
        for each available plugin type in the given entry point groups.
        If groups is None, return the set for all openmdao entry point groups.
        """
        ret = []
        
        if groups is None:
            groups = plugin_groups.keys()
        groups = set(groups)
        
        typ_dict = self._get_type_dict()
        distset = self._get_meta_info(ret, groups, typ_dict)
           
        if self._search_path is None: # self.env has same contents as working_set,
                                      # so don't bother looking through it
            return ret

        # now look in the whole Environment
        dists = [] # we want an iterator of newest dist for each project in Environment
        for proj in self.env:
            dist = self.env[proj][0]
            if dist.project_name not in distset:
                dists.append(dist)
        
        typ_dict = self._entry_map_info(dists)
        dset = self._get_meta_info(ret, groups, typ_dict)
        
        return ret