def serve(config, num_workers, enable_listener, enable_collectors): channels = [TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler( daemon=True, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock)) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand( daemon=True, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock)) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.EventLogListener( daemon=True, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels)) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.EventLogCollector( daemon=True, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector)) exchanged.serve()
def dyn_import_test(): """ Utils.dyn_import puts modules in sys.modules. """ for name in ("Cerebrum.Utils", "Cerebrum.modules", "Cerebrum.modules.no"): Utils.dyn_import(name) assert name in sys.modules x = "Cerebrum.modules.no" assert Utils.dyn_import(x) is sys.modules[x]
def serve(logger, config, num_workers, enable_listener, enable_collectors): logger.info('Starting {!r} event utils'.format(TARGET_SYSTEM)) channels = [TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(logger=logger, manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.DBEventListener, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.DBEventCollector, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector) exchanged.serve()
def serve(config, num_workers, enable_listener, enable_collectors): channels = [ TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(manager=Manager) event_queue = exchanged.mgr.queue(maxsize=1000) queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process( Handler(daemon=True, queue=event_queue, log_channel=exchanged.log_channel, running=exchanged.run_trigger, config=config, mock=config.client.mock)) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process( hand(daemon=True, queue=group_event_queue, log_channel=exchanged.log_channel, running=exchanged.run_trigger, config=config, mock=config.client.mock)) queues.append(group_event_queue) if enable_listener: exchanged.add_process( evhandlers.EventLogListener(daemon=True, queue=event_queue, fan_out_queues=queues, log_channel=exchanged.log_channel, running=exchanged.run_trigger, channels=channels)) if enable_collectors: for chan in channels: exchanged.add_process( evhandlers.EventLogCollector(daemon=True, queue=event_queue, fan_out_queues=queues, log_channel=exchanged.log_channel, running=exchanged.run_trigger, channel=chan, config=config.eventcollector)) exchanged.serve()
def update_mappings(progname, config): events = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class).event_map.events if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): events.extend( getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class).event_map.events) utils.update_system_mappings(progname, TARGET_SYSTEM, events)
def serve(logger, config, num_workers, enable_listener, enable_collectors): logger.info('Starting {!r} event utils'.format(TARGET_SYSTEM)) channels = [ TARGET_SYSTEM, ] exchanged = utils.ProcessHandler(logger=logger, manager=Manager) event_queue = exchanged.mgr.queue() queues = [] Handler = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class) for i in range(0, num_workers): exchanged.add_process(Handler, queue=event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): group_event_queue = exchanged.mgr.queue() hand = getattr(Utils.dyn_import(config.deferred_handler.handler_mod), config.deferred_handler.handler_class) exchanged.add_process(hand, queue=group_event_queue, log_queue=exchanged.log_queue, running=exchanged.run_trigger, config=config, mock=config.client.mock) queues.append(group_event_queue) if enable_listener: exchanged.add_process(evhandlers.DBEventListener, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channels=channels) if enable_collectors: for chan in channels: exchanged.add_process(evhandlers.DBEventCollector, queue=event_queue, fan_out_queues=queues, log_queue=exchanged.log_queue, running=exchanged.run_trigger, channel=chan, config=config.eventcollector) exchanged.serve()
def update_mappings(progname, config): events = getattr(Utils.dyn_import(config.handler.handler_mod), config.handler.handler_class).event_map.events if (config.deferred_handler.handler_mod and config.deferred_handler.handler_class): events.extend(getattr(Utils.dyn_import( config.deferred_handler.handler_mod), config.deferred_handler.handler_class).event_map.events) utils.update_system_mappings( progname, TARGET_SYSTEM, events)
def fullsync(user_class_ref, url, user_spread=None, group_spread=None, dryrun=False, delete_objects=False, ad_ldap=None): # Get module and glass name, and use getattr to get a class object. modname, classname = user_class_ref.split("/") sync_class = getattr(Utils.dyn_import(modname), classname) # Group or user sync? sync_type = 'user' spread = user_spread if group_spread: sync_type = 'group' spread = group_spread # Different logger for different adsyncs logger_name = "ad_" + sync_type + "sync_" + str(spread).split('@ad_')[1] logger = Utils.Factory.get_logger(logger_name) # Catch protocolError to avoid that url containing password is # written to log try: # instantiate sync_class and call full_sync sync_class(db, co, logger, url=url, ad_ldap=ad_ldap).full_sync(sync_type, delete_objects, spread, dryrun, user_spread) except xmlrpclib.ProtocolError, xpe: logger.critical("Error connecting to AD service. Giving up!: %s %s" % (xpe.errcode, xpe.errmsg))
def load_extensions(self): """ Load BofhdExtensions (commands and help texts). This will load and initialize the BofhdExtensions specified by the configuration. """ self.extensions = getattr(self, 'extensions', set()) for cls in self.extensions: # Reload existing modules reload(sys.modules[cls.__module__]) self.extensions = set() self.classmap.clear() self.commands.clear() for module_name, class_name in self.__config.extensions(): mod = Utils.dyn_import(module_name) # TODO: Make dyn_import support class name try: cls = getattr(mod, class_name) except AttributeError: raise ImportError("Module '{}' has no class '{}'" .format(module_name, class_name)) self.extensions.add(cls) # Map commands to BofhdExtensions # NOTE: Any duplicate command will be overloaded by later # BofhdExtensions. for rpc in cls.list_commands('all_commands').keys(): self.classmap[rpc] = cls for rpc in cls.list_commands('hidden_commands').keys(): self.classmap[rpc] = cls # Check that all calls are implemented for rpc in sorted(self.classmap.keys()): if not hasattr(self.classmap[rpc], rpc): self.logger.warn("Warning, command %r is not implemented", rpc) self.__help = Help(self.extensions, logger=self.logger) self._log_help_text_mismatch() self._log_command_mismatch()
def load_extensions(self): """ Load BofhdExtensions (commands and help texts). This will load and initialize the BofhdExtensions specified by the configuration. """ self.extensions = getattr(self, 'extensions', set()) for cls in self.extensions: # Reload existing modules reload(sys.modules[cls.__module__]) self.extensions = set() self.classmap.clear() self.commands.clear() for module_name, class_name in self.__config.extensions(): mod = Utils.dyn_import(module_name) cls = getattr(mod, class_name) self.extensions.add(cls) # Map commands to BofhdExtensions # NOTE: Any duplicate command will be overloaded by later # BofhdExtensions. for rpc in cls.list_commands('all_commands').keys(): self.classmap[rpc] = cls for rpc in cls.list_commands('hidden_commands').keys(): self.classmap[rpc] = cls # Check that all calls are implemented for rpc in sorted(self.classmap.keys()): if not hasattr(self.classmap[rpc], rpc): self.logger.warn("Warning, command '%s' is not implemented", rpc) self.__help = Help(self.extensions, logger=self.logger) self._log_help_text_mismatch() self._log_command_mismatch()
def fullsync(user_class_ref, url, user_spread=None, group_spread=None, dryrun=False, delete_objects=False, ad_ldap=None): # Get module and glass name, and use getattr to get a class object. modname, classname = user_class_ref.split("/") sync_class = getattr(Utils.dyn_import(modname), classname) # Group or user sync? sync_type='user' spread=user_spread if group_spread: sync_type = 'group' spread=group_spread # Different logger for different adsyncs logger_name = "ad_" + sync_type + "sync_" + str(spread).split('@ad_')[1] logger = Utils.Factory.get_logger(logger_name) # Catch protocolError to avoid that url containing password is # written to log try: # instantiate sync_class and call full_sync sync_class(db, co, logger, url=url, ad_ldap=ad_ldap).full_sync( sync_type, delete_objects, spread, dryrun, user_spread) except xmlrpclib.ProtocolError, xpe: logger.critical("Error connecting to AD service. Giving up!: %s %s" % (xpe.errcode, xpe.errmsg))
def _import_cls(spec): mod, name = spec.split('/') mod = Utils.dyn_import(mod) cls = getattr(mod, name) return cls