def load_directory(self, modpath): if not PM_DEVELOPMENT: log.error("This method should not be called in release.") return start_file = 'main' log.warning("You are loading a plugin without checking for needs, " \ "provides or conflitcts") log.warning("* You have been warned! *") log.warning("Assuming `%s' as start file!" % start_file) # Load the plugin sys.path.insert(0, os.path.abspath(modpath)) if start_file in sys.modules: sys.modules.pop(start_file) try: __builtin__.__import__ = hook_import module = hook_import(start_file, level=0) if hasattr(module, "__plugins__") and \ isinstance(module.__plugins__, list): lst = module.__plugins__ ret = [] for plug in lst: try: if issubclass(plug, PassiveAudit): is_audit = 1 elif issubclass(plug, ActiveAudit): is_audit = 2 else: is_audit = 0 inst = plug() inst.start(None) if is_audit: inst.register_decoders() inst.register_hooks() ret.append(inst) except Exception, err: log.critical("Error while starting %s:" % (plug)) log.critical(generate_traceback()) log.critical("Ignoring instance.") if not ret: log.error("Not startable plugin defined in main file") else:
def __unload_hook(self, pkg): """ This is the real unload procedure of plugin. Raise a PluginException on fail @return None or raise a PluginException """ if not pkg in self.instances: raise PluginException(pkg, "Already unloaded") for inst in self.instances[pkg]: try: inst.stop() except Exception, err: log.critical("Error while stopping %s from %s:" % (inst, pkg)) log.critical(generate_traceback()) log.critical("Ignoring instance.")
def __load_hook(self, pkg): """ This is the real load procedure of plugin. We'll use zipmodule to import and a global function expose to provide a simple method to access files inside the zip file to plugin. Raise a PluginException on fail @return None or raise a PluginException """ if pkg in self.instances: raise PluginException(pkg, "Already present") # We need to get the start-file field from pkg and then try # to import it modpath = os.path.join(pkg.get_path(), 'lib') sys.path.insert(0, os.path.abspath(modpath)) # This were removed fname = os.path.join(pkg.get_path(), 'bin', pkg.start_file) sys.path.insert(0, os.path.abspath(os.path.dirname(fname))) if pkg.start_file in sys.modules: sys.modules.pop(pkg.start_file) try: try: # We add to modules to avoid deleting and stop working plugin ;) sys.plugins_path.insert(0, pkg) module = self.__cache_import(pkg) except Exception, err: sys.plugins_path.pop(0) raise PluginException(pkg, str(err)) finally: # Check that sys.path.pop(0) if hasattr(module, "__plugins__") and \ isinstance(module.__plugins__, list): lst = module.__plugins__ ret = [] for plug in lst: try: if issubclass(plug, PassiveAudit): is_audit = 1 elif issubclass(plug, ActiveAudit): is_audit = 2 else: is_audit = 0 inst = plug() inst.start(pkg) if is_audit: inst.register_decoders() inst.register_hooks() ret.append(inst) except Exception, err: log.critical("Error while starting %s from %s:" % (plug, pkg)) log.critical(generate_traceback()) log.critical("Ignoring instance.") if not ret: raise PluginException(pkg, \ "No startablePlugin subclass in %s" % pkg) self.instances[pkg] = ret
class ThreadPool(object): MIN_THREADS = 5 MAX_THREADS = 20 IS_DAEMON = True started = False joined = False workers = 0 def __init__(self, minthreads=5, maxthreads=20): assert minthreads >= 0 assert minthreads <= maxthreads self.queue = Queue.Queue(0) self.min = minthreads self.max = maxthreads self.waiters = [] self.threads = [] self.working = [] def queue_work(self, callback, errback, func, *args, **kwargs): if self.joined: return obj = (callback, errback, func, args, kwargs) self.queue.put(obj) if self.started: self.resize() def start(self): self.joined = False self.started = True self.resize() def stop(self): self.joined = True threads = copy.copy(self.threads) while self.workers: self.queue.put(WorkerStop) self.workers -= 1 def join_threads(self): # check out for exceptions on already joined # threads. threads = copy.copy(self.threads) for thread in threads: thread.join() def resize(self, minthreads=None, maxthreads=None): minthreads = max(minthreads, self.MIN_THREADS) maxthreads = max(minthreads, self.MAX_THREADS) assert minthreads >= 0 assert minthreads <= maxthreads self.min = minthreads self.max = maxthreads if not self.started: return while self.workers > self.max: self.stop_worker() while self.workers < self.min: self.start_worker() self.start_needed_workers() def start_needed_workers(self): size = self.queue.qsize() + len(self.working) while self.workers < min(self.max, size): self.start_worker() def start_worker(self): self.workers += 1 thread = threading.Thread(target=self._worker) thread.setDaemon(self.IS_DAEMON) self.threads.append(thread) thread.start() def stop_worker(self): self.queue.put(WorkerStop) self.workers -= 1 def _worker(self): ct = threading.currentThread() obj = self.queue.get() while obj is not WorkerStop: self.working.append(ct) (callback, errback, func, args, kw) = obj try: try: result = func(*args, **kw) except Exception, exc: log.error("Handling exception %s Traceback:" % exc) log.error(generate_traceback()) if errback is not None: errback(sys.exc_info()[1]) else: if callback is not None: callback(result) except Exception, err: log.critical("Thread exceptions ignored. Traceback:") log.critical(generate_traceback())