def _send(self, line, log=True): """ Sends a raw IRC line unchecked. Doesn't do connected check, and is *not* threadsafe :type line: str :type log: bool """ async_util.wrap_future(self._protocol.send(line, log=log), loop=self.loop)
def connection_lost(self, exc): self._connected = False if exc: logger.error("[{}] Connection lost: {}".format( self.conn.name, exc)) async_util.wrap_future(self.conn.auto_reconnect(), loop=self.loop)
def eof_received(self): self._connected = False # create a new connected_future for when we are connected. self._connected_future = asyncio.Future(loop=self.loop) logger.info("[{}] EOF received.".format(self.conn.name)) async_util.wrap_future(self.conn.connect(), loop=self.loop) return True
def _send(self, line): """ Sends a raw IRC line unchecked. Doesn't do connected check, and is *not* threadsafe :type line: str """ logger.info("[{}] >> {}".format(self.name, line)) async_util.wrap_future(self._protocol.send(line), loop=self.loop)
def cmd(cmd, subcmd, *args): calls.append((cmd, subcmd) + args) p = ParamList.parse("* ACK :" + " ".join(args)) cmd_event = Event( irc_paramlist=p, bot=event.bot, conn=event.conn, ) async_util.wrap_future(cap.on_cap(p, cmd_event), loop=event.loop)
def connection_lost(self, exc): self._connected = False # create a new connected_future for when we are connected. self._connected_future = asyncio.Future(loop=self.loop) if exc is None: # we've been closed intentionally, so don't reconnect return logger.error("[{}] Connection lost: {}".format(self.conn.name, exc)) async_util.wrap_future(self.conn.connect(), loop=self.loop)
async def get_initial_data(bot, loop, db, event): wrap_future( asyncio.gather( *[ get_initial_connection_data(conn, loop, db, event) for conn in bot.connections.values() if conn.connected ] ) )
async def internal_launch(self, hook, event): """ Launches a hook with the data from [event] :param hook: The hook to launch :param event: The event providing data for the hook :return: a tuple of (ok, result) where ok is a boolean that determines if the hook ran without error and result is the result from the hook """ if hook.threaded: coro = self.bot.loop.run_in_executor(None, self._execute_hook_threaded, hook, event) else: coro = self._execute_hook_sync(hook, event) task = async_util.wrap_future(coro) hook.plugin.tasks.append(task) try: out = await task ok = True except Exception: logger.exception("Error in hook %s", hook.description) ok = False out = sys.exc_info() hook.plugin.tasks.remove(task) return ok, out
async def _sieve(self, sieve, event, hook): """ :type sieve: cloudbot.plugin_hooks.Hook :type event: cloudbot.event.Event :type hook: cloudbot.plugin_hooks.Hook :rtype: cloudbot.event.Event """ if sieve.threaded: coro = self.bot.loop.run_in_executor(None, sieve.function, self.bot, event, hook) else: coro = sieve.function(self.bot, event, hook) result, error = None, None task = async_util.wrap_future(coro) sieve.plugin.tasks.append(task) try: result = await task except Exception: logger.exception("Error running sieve %s on %s:", sieve.description, hook.description) error = sys.exc_info() sieve.plugin.tasks.remove(task) post_event = partial( PostHookEvent, launched_hook=sieve, launched_event=event, bot=event.bot, conn=event.conn, result=result, error=error ) for post_hook in self.hook_hooks["post"]: success, res = await self.internal_launch(post_hook, post_event(hook=post_hook)) if success and res is False: break return result
def get_initial_connection_data(conn, loop, db, event): """ - Update all user data :type conn: cloudbot.client.Client :type loop: asyncio.AbstractEventLoop :type db: sqlalchemy.orm.Session :type event: cloudbot.event.Event """ if conn.nick.endswith('-dev') and not hasattr(event, 'triggered_command'): # Ignore initial data update on development instances return fut = create_future(loop) try: lines, fut = conn.memory["sherlock"]["futures"]["who_0"][0] except LookupError: pass else: if not fut.done(): return "getdata command already running" conn.memory["sherlock"]["futures"]["who_0"][0] = ([], fut) yield from asyncio.sleep(10) now = datetime.datetime.now() conn.send("WHO 0") try: lines = yield from asyncio.wait_for(fut, 30 * 60) except asyncio.TimeoutError: return "Timeout reached" finally: with suppress(LookupError): del conn.memory["sherlock"]["futures"]["who_0"][0] users = [] for line in lines: chan, ident, host, server, nick, status, realname = line num_hops, _, realname = realname.partition(' ') users.append((nick, host)) futs = [ wrap_future( event.async_call(update_user_data, db, masks_table, 'mask', now, nick, mask)) for nick, mask in users ] for nick, mask in users: yield from asyncio.gather( ignore_timeout( set_user_data(event, db, hosts_table, 'host', now, nick, get_user_host, conn)), ignore_timeout( set_user_data(event, db, address_table, 'addr', now, nick, get_user_ip, conn))) yield from asyncio.gather(*futs) return "Done."
def data_received(self, data): self._input_buffer += data while b"\r\n" in self._input_buffer: line_data, self._input_buffer = self._input_buffer.split(b"\r\n", 1) line = decode(line_data) try: event = self.parse_line(line) except Exception: logger.exception( "[%s] Error occurred while parsing IRC line '%s' from %s", self.conn.name, line, self.conn.describe_server(), ) else: # handle the message, async async_util.wrap_future(self.bot.process(event), loop=self.loop)
def data_received(self, data): self._input_buffer += data while b"\r\n" in self._input_buffer: line_data, self._input_buffer = self._input_buffer.split(b"\r\n", 1) line = decode(line_data) try: message = Message.parse(line) except Exception: logger.exception( "[%s] Error occurred while parsing IRC line '%s' from %s", self.conn.name, line, self.conn.describe_server() ) continue command = message.command command_params = message.parameters # Reply to pings immediately if command == "PING": self.conn.send("PONG " + command_params[-1], log=False) # Parse the command and params # Content if command_params.has_trail: content_raw = command_params[-1] content = irc_clean(content_raw) else: content_raw = None content = None # Event type event_type = irc_command_to_event_type.get( command, EventType.other ) # Target (for KICK, INVITE) if event_type is EventType.kick: target = command_params[1] elif command in ("INVITE", "MODE"): target = command_params[0] else: # TODO: Find more commands which give a target target = None # Parse for CTCP if event_type is EventType.message and content_raw.startswith("\x01"): possible_ctcp = content_raw[1:] if content_raw.endswith('\x01'): possible_ctcp = possible_ctcp[:-1] if '\x01' in possible_ctcp: logger.debug( "[%s] Invalid CTCP message received, " "treating it as a mornal message", self.conn.name ) ctcp_text = None else: ctcp_text = possible_ctcp ctcp_text_split = ctcp_text.split(None, 1) if ctcp_text_split[0] == "ACTION": # this is a CTCP ACTION, set event_type and content accordingly event_type = EventType.action content = irc_clean(ctcp_text_split[1]) else: # this shouldn't be considered a regular message event_type = EventType.other else: ctcp_text = None # Channel channel = None if command_params: if command in ["NOTICE", "PRIVMSG", "KICK", "JOIN", "PART", "MODE"]: channel = command_params[0] elif command == "INVITE": channel = command_params[1] elif len(command_params) > 2 or not (command_params.has_trail and len(command_params) == 1): channel = command_params[0] prefix = message.prefix if prefix is None: nick = None user = None host = None mask = None else: nick = prefix.nick user = prefix.user host = prefix.host mask = prefix.mask if channel: # TODO Migrate plugins to accept the original case of the channel channel = channel.lower() channel = channel.split()[0] # Just in case there is more data if channel == self.conn.nick.lower(): channel = nick.lower() # Set up parsed message # TODO: Do we really want to send the raw `prefix` and `command_params` here? event = Event( bot=self.bot, conn=self.conn, event_type=event_type, content_raw=content_raw, content=content, target=target, channel=channel, nick=nick, user=user, host=host, mask=mask, irc_raw=line, irc_prefix=mask, irc_command=command, irc_paramlist=command_params, irc_ctcp_text=ctcp_text ) # handle the message, async async_util.wrap_future(self.bot.process(event), loop=self.loop)
async def load_plugin(self, path): """ Loads a plugin from the given path and plugin object, then registers all hooks from that plugin. :type path: str | Path """ path = Path(path) file_path = self.safe_resolve(path) file_name = file_path.name # Resolve the path relative to the current directory plugin_path = file_path.relative_to(self.bot.base_dir) title = '.'.join(plugin_path.parts[1:]).rsplit('.', 1)[0] if not self.can_load(title): return # make sure to unload the previously loaded plugin from this path, if it was loaded. if self.get_plugin(file_path): await self.unload_plugin(file_path) module_name = "plugins.{}".format(title) try: plugin_module = self._load_mod(module_name) except Exception: logger.exception("Error loading %s:", title) return # create the plugin plugin = Plugin(str(file_path), file_name, title, plugin_module) # proceed to register hooks # create database tables await plugin.create_tables(self.bot) # run on_start hooks for on_start_hook in plugin.hooks["on_start"]: success = await self.launch( on_start_hook, Event(bot=self.bot, hook=on_start_hook)) if not success: logger.warning( "Not registering hooks from plugin %s: on_start hook errored", plugin.title) # unregister databases plugin.unregister_tables(self.bot) return self._add_plugin(plugin) for on_cap_available_hook in plugin.hooks["on_cap_available"]: for cap in on_cap_available_hook.caps: self.cap_hooks["on_available"][cap.casefold()].append( on_cap_available_hook) self._log_hook(on_cap_available_hook) for on_cap_ack_hook in plugin.hooks["on_cap_ack"]: for cap in on_cap_ack_hook.caps: self.cap_hooks["on_ack"][cap.casefold()].append( on_cap_ack_hook) self._log_hook(on_cap_ack_hook) for periodic_hook in plugin.hooks["periodic"]: task = async_util.wrap_future(self._start_periodic(periodic_hook)) plugin.tasks.append(task) self._log_hook(periodic_hook) # register commands for command_hook in plugin.hooks["command"]: for alias in command_hook.aliases: if alias in self.commands: logger.warning( "Plugin %s attempted to register command %s which was " "already registered by %s. Ignoring new assignment.", plugin.title, alias, self.commands[alias].plugin.title) else: self.commands[alias] = command_hook self._log_hook(command_hook) # register raw hooks for raw_hook in plugin.hooks["irc_raw"]: if raw_hook.is_catch_all(): self.catch_all_triggers.append(raw_hook) else: for trigger in raw_hook.triggers: if trigger in self.raw_triggers: self.raw_triggers[trigger].append(raw_hook) else: self.raw_triggers[trigger] = [raw_hook] self._log_hook(raw_hook) # register events for event_hook in plugin.hooks["event"]: for event_type in event_hook.types: if event_type in self.event_type_hooks: self.event_type_hooks[event_type].append(event_hook) else: self.event_type_hooks[event_type] = [event_hook] self._log_hook(event_hook) # register regexps for regex_hook in plugin.hooks["regex"]: for regex_match in regex_hook.regexes: self.regex_hooks.append((regex_match, regex_hook)) self._log_hook(regex_hook) # register sieves for sieve_hook in plugin.hooks["sieve"]: self.sieves.append(sieve_hook) self._log_hook(sieve_hook) # register connect hooks for connect_hook in plugin.hooks["on_connect"]: self.connect_hooks.append(connect_hook) self._log_hook(connect_hook) for out_hook in plugin.hooks["irc_out"]: self.out_sieves.append(out_hook) self._log_hook(out_hook) for post_hook in plugin.hooks["post_hook"]: self.hook_hooks["post"].append(post_hook) self._log_hook(post_hook) for perm_hook in plugin.hooks["perm_check"]: for perm in perm_hook.perms: self.perm_hooks[perm].append(perm_hook) self._log_hook(perm_hook) # Sort hooks self.regex_hooks.sort(key=lambda x: x[1].priority) dicts_of_lists_of_hooks = (self.event_type_hooks, self.raw_triggers, self.perm_hooks, self.hook_hooks) lists_of_hooks = [ self.catch_all_triggers, self.sieves, self.connect_hooks, self.out_sieves ] lists_of_hooks.extend( chain.from_iterable(d.values() for d in dicts_of_lists_of_hooks)) for lst in lists_of_hooks: lst.sort(key=attrgetter("priority")) # we don't need this anymore del plugin.hooks["on_start"]
def load_geoip(loop): async_util.wrap_future(check_db(loop), loop=loop)
def load_plugin(self, path): """ Loads a plugin from the given path and plugin object, then registers all hooks from that plugin. Won't load any plugins listed in "disabled_plugins". :type path: str | Path """ path = Path(path) file_path = path.resolve() file_name = file_path.name # Resolve the path relative to the current directory plugin_path = file_path.relative_to(self.bot.base_dir) title = '.'.join(plugin_path.parts[1:]).rsplit('.', 1)[0] if "plugin_loading" in self.bot.config: pl = self.bot.config.get("plugin_loading") if pl.get("use_whitelist", False): if title not in pl.get("whitelist", []): logger.info( 'Not loading plugin module "{}": plugin not whitelisted' .format(title)) return else: if title in pl.get("blacklist", []): logger.info( 'Not loading plugin module "{}": plugin blacklisted'. format(title)) return # make sure to unload the previously loaded plugin from this path, if it was loaded. if str(file_path) in self.plugins: yield from self.unload_plugin(file_path) module_name = "plugins.{}".format(title) try: plugin_module = importlib.import_module(module_name) # if this plugin was loaded before, reload it if hasattr(plugin_module, "_cloudbot_loaded"): importlib.reload(plugin_module) except Exception: logger.exception("Error loading {}:".format(title)) return # create the plugin plugin = Plugin(str(file_path), file_name, title, plugin_module) # proceed to register hooks # create database tables yield from plugin.create_tables(self.bot) # run on_start hooks for on_start_hook in plugin.hooks["on_start"]: success = yield from self.launch( on_start_hook, Event(bot=self.bot, hook=on_start_hook)) if not success: logger.warning( "Not registering hooks from plugin {}: on_start hook errored" .format(plugin.title)) # unregister databases plugin.unregister_tables(self.bot) return self.plugins[plugin.file_path] = plugin self._plugin_name_map[plugin.title] = plugin for on_cap_available_hook in plugin.hooks["on_cap_available"]: for cap in on_cap_available_hook.caps: self.cap_hooks["on_available"][cap.casefold()].append( on_cap_available_hook) self._log_hook(on_cap_available_hook) for on_cap_ack_hook in plugin.hooks["on_cap_ack"]: for cap in on_cap_ack_hook.caps: self.cap_hooks["on_ack"][cap.casefold()].append( on_cap_ack_hook) self._log_hook(on_cap_ack_hook) for periodic_hook in plugin.hooks["periodic"]: task = async_util.wrap_future(self._start_periodic(periodic_hook)) plugin.tasks.append(task) self._log_hook(periodic_hook) # register commands for command_hook in plugin.hooks["command"]: for alias in command_hook.aliases: if alias in self.commands: logger.warning( "Plugin {} attempted to register command {} which was already registered by {}. " "Ignoring new assignment.".format( plugin.title, alias, self.commands[alias].plugin.title)) else: self.commands[alias] = command_hook self._log_hook(command_hook) # register raw hooks for raw_hook in plugin.hooks["irc_raw"]: if raw_hook.is_catch_all(): self.catch_all_triggers.append(raw_hook) else: for trigger in raw_hook.triggers: if trigger in self.raw_triggers: self.raw_triggers[trigger].append(raw_hook) else: self.raw_triggers[trigger] = [raw_hook] self._log_hook(raw_hook) # register events for event_hook in plugin.hooks["event"]: for event_type in event_hook.types: if event_type in self.event_type_hooks: self.event_type_hooks[event_type].append(event_hook) else: self.event_type_hooks[event_type] = [event_hook] self._log_hook(event_hook) # register regexps for regex_hook in plugin.hooks["regex"]: for regex_match in regex_hook.regexes: self.regex_hooks.append((regex_match, regex_hook)) self._log_hook(regex_hook) # register sieves for sieve_hook in plugin.hooks["sieve"]: self.sieves.append(sieve_hook) self._log_hook(sieve_hook) # register connect hooks for connect_hook in plugin.hooks["on_connect"]: self.connect_hooks.append(connect_hook) self._log_hook(connect_hook) for out_hook in plugin.hooks["irc_out"]: self.out_sieves.append(out_hook) self._log_hook(out_hook) for post_hook in plugin.hooks["post_hook"]: self.hook_hooks["post"].append(post_hook) self._log_hook(post_hook) for perm_hook in plugin.hooks["perm_check"]: for perm in perm_hook.perms: self.perm_hooks[perm].append(perm_hook) self._log_hook(perm_hook) # sort sieve hooks by priority self.sieves.sort(key=lambda x: x.priority) self.connect_hooks.sort(key=attrgetter("priority")) # Sort hooks self.regex_hooks.sort(key=lambda x: x[1].priority) dicts_of_lists_of_hooks = (self.event_type_hooks, self.raw_triggers, self.perm_hooks, self.hook_hooks) lists_of_hooks = [ self.catch_all_triggers, self.sieves, self.connect_hooks, self.out_sieves ] lists_of_hooks.extend( chain.from_iterable(d.values() for d in dicts_of_lists_of_hooks)) for lst in lists_of_hooks: lst.sort(key=attrgetter("priority")) # we don't need this anymore del plugin.hooks["on_start"]