def setup(bot): bot.config.define_section('url', UrlSection) if bot.config.url.exclude: regexes = [re.compile(s) for s in bot.config.url.exclude] else: regexes = [] # We're keeping these in their own list, rather than putting then in the # callbacks list because 1, it's easier to deal with modules that are still # using this list, and not the newer callbacks list and 2, having a lambda # just to pass is kinda ugly. if 'url_exclude' not in bot.memory: bot.memory['url_exclude'] = regexes else: exclude = bot.memory['url_exclude'] if regexes: exclude.extend(regexes) bot.memory['url_exclude'] = exclude # Ensure last_seen_url is in memory if 'last_seen_url' not in bot.memory: bot.memory['last_seen_url'] = tools.SopelMemory() # Initialize shortened_urls as a dict if it doesn't exist. if 'shortened_urls' not in bot.memory: bot.memory['shortened_urls'] = tools.SopelMemory()
def setup(bot=None): global url_finder # TODO figure out why this is needed, and get rid of it, because really? if not bot: return bot.config.define_section('url', UrlSection) if bot.config.url.exclude: regexes = [re.compile(s) for s in bot.config.url.exclude] else: regexes = [] # We're keeping these in their own list, rather than putting then in the # callbacks list because 1, it's easier to deal with modules that are still # using this list, and not the newer callbacks list and 2, having a lambda # just to pass is kinda ugly. if not bot.memory.contains('url_exclude'): bot.memory['url_exclude'] = regexes else: exclude = bot.memory['url_exclude'] if regexes: exclude.extend(regexes) bot.memory['url_exclude'] = exclude # Ensure that url_callbacks and last_seen_url are in memory if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() if not bot.memory.contains('last_seen_url'): bot.memory['last_seen_url'] = tools.SopelMemory() url_finder = re.compile( r'(?u)(%s?(?:http|https|ftp)(?:://\S+))' % (bot.config.url.exclusion_char), re.IGNORECASE)
def setup(bot): global find_urls bot.config.define_section('url', UrlSection) if bot.config.url.exclude: regexes = [re.compile(s) for s in bot.config.url.exclude] else: regexes = [] # We're keeping these in their own list, rather than putting then in the # callbacks list because 1, it's easier to deal with modules that are still # using this list, and not the newer callbacks list and 2, having a lambda # just to pass is kinda ugly. if not bot.memory.contains('url_exclude'): bot.memory['url_exclude'] = regexes else: exclude = bot.memory['url_exclude'] if regexes: exclude.extend(regexes) bot.memory['url_exclude'] = exclude # Ensure that url_callbacks and last_seen_url are in memory if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() if not bot.memory.contains('last_seen_url'): bot.memory['last_seen_url'] = tools.SopelMemory() def find_func(text, clean=False): def trim_url(url): # clean trailing sentence- or clause-ending punctuation while url[-1] in '.,?!\'":;': url = url[:-1] # clean unmatched parentheses/braces/brackets for (opener, closer) in [('(', ')'), ('[', ']'), ('{', '}'), ('<', '>')]: if (url[-1] == closer) and (url.count(opener) < url.count(closer)): url = url[:-1] return url re_url = r'(?u)((?<!%s)(?:http|https|ftp)(?::\/\/\S+))'\ % (bot.config.url.exclusion_char) r = re.compile(re_url, re.IGNORECASE) urls = re.findall(r, text) if clean: urls = [trim_url(url) for url in urls] return urls find_urls = find_func
def setup(bot): bot.config.define_section('help', HelpSection) # Initialize memory if SETTING_CACHE_NAMESPACE not in bot.memory: bot.memory[SETTING_CACHE_NAMESPACE] = tools.SopelMemory() # Initialize settings cache for section in TRACKED_SETTINGS: if section not in bot.memory[SETTING_CACHE_NAMESPACE]: bot.memory[SETTING_CACHE_NAMESPACE][section] = tools.SopelMemory() update_cache(bot) # Populate cache bot.config.define_section('help', HelpSection)
def register(self, callables, jobs, shutdowns, urls): # Append module's shutdown function to the bot's list of functions to # call on shutdown self.shutdown_methods += shutdowns for callbl in callables: if hasattr(callbl, 'rule'): for rule in callbl.rule: self._callables[callbl.priority][rule].append(callbl) else: self._callables[callbl.priority][re.compile('.*')].append( callbl) if hasattr(callbl, 'commands'): module_name = callbl.__module__.rsplit('.', 1)[-1] # TODO doc and make decorator for this. Not sure if this is how # it should work yet, so not making it public for 6.0. category = getattr(callbl, 'category', module_name) self._command_groups[category].append(callbl.commands[0]) for command, docs in callbl._docs.items(): self.doc[command] = docs for func in jobs: for interval in func.interval: job = sopel.tools.jobs.Job(interval, func) self.scheduler.add_job(job) if not self.memory.contains('url_callbacks'): self.memory['url_callbacks'] = tools.SopelMemory() for func in urls: self.memory['url_callbacks'][func.url_regex] = func
def setup(bot: Sopel): bot.settings.define_section("safety", SafetySection) if bot.settings.safety.default_mode is None: bot.settings.safety.default_mode = "on" # migrate from enabled_by_default to default_mode. TODO: remove in v8.1 or v9 if not bot.settings.safety.enabled_by_default: bot.settings.safety.default_mode = "off" LOGGER.warning( "config: enabled_by_default is deprecated, please use default_mode=off", ) if SAFETY_CACHE_KEY not in bot.memory: bot.memory[SAFETY_CACHE_KEY] = tools.SopelMemory() if SAFETY_CACHE_LOCK_KEY not in bot.memory: bot.memory[SAFETY_CACHE_LOCK_KEY] = threading.Lock() for item in bot.settings.safety.known_good: known_good.append(re.compile(item, re.I)) # clean up old files. TODO: remove in v8.1 or 9 old_file = os.path.join(bot.settings.homedir, "malwaredomains.txt") if os.path.exists(old_file) and os.path.isfile(old_file): LOGGER.info('Removing old malwaredomains file from %s', old_file) try: os.remove(old_file) except Exception as err: # for lack of a more specific error type... # Python on Windows throws an exception if the file is in use LOGGER.info('Could not delete %s: %s', old_file, str(err)) update_local_cache(bot, init=True)
def setup(bot): bot.config.define_section('wikipedia', WikipediaSection) regex = re.compile('([a-z]+).(wikipedia.org/wiki/)([^ ]+)') if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][regex] = mw_info
def register_url_callback(self, pattern, callback): """Register a ``callback`` for URLs matching the regex ``pattern``. :param pattern: compiled regex pattern to register :type pattern: :ref:`re.Pattern <python:re-objects>` :param callback: callable object to handle matching URLs :type callback: :term:`function` .. versionadded:: 7.0 This method replaces manual management of ``url_callbacks`` in Sopel's plugins, so instead of doing this in ``setup()``:: if 'url_callbacks' not in bot.memory: bot.memory['url_callbacks'] = tools.SopelMemory() regex = re.compile(r'http://example.com/path/.*') bot.memory['url_callbacks'][regex] = callback use this much more concise pattern:: regex = re.compile(r'http://example.com/path/.*') bot.register_url_callback(regex, callback) """ if 'url_callbacks' not in self.memory: self.memory['url_callbacks'] = tools.SopelMemory() if isinstance(pattern, basestring): pattern = re.compile(pattern) self.memory['url_callbacks'][pattern] = callback
def cache_lines(bot, trigger): if trigger.sender not in bot.memory['mock_lines']: bot.memory['mock_lines'][trigger.sender] = tools.SopelMemory() line = trigger.group() # don't store /me commands, or obvious bot commands if not line.startswith('\x01ACTION') and not re.match(bot.config.core.prefix, line): bot.memory['mock_lines'][trigger.sender][trigger.nick] = line
def setup(bot): bot.config.define_section('youtube', YoutubeSection) if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][regex] = get_info global API API = apiclient.discovery.build("youtube", "v3", developerKey=bot.config.youtube.api_key)
def setup(bot): global pattern_url pattern_url = bot.config.redmine.base_url if not pattern_url.endswith('/'): pattern_url = pattern_url + '/' redmine = re.compile(pattern_url + '(\S+)\/(\w+)') if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][redmine] = redmine_url
def setup(bot): global url_finder url_finder = re.compile(r'(?u)(https?:\/\/[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b(?:[-a-zA-Z0-9@:%_\+.~#?&//=]*))', re.IGNORECASE) if not bot.memory.contains('last_seen_url'): bot.memory['last_seen_url'] = tools.SopelMemory() if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() if not bot.memory.contains('help'): bot.memory['help'] = tools.SopelMemory() bot.memory['help']['title'] = tools.SopelMemory() bot.memory['help']['title']['short'] = 'Privmsg or notices link titles' bot.memory['help']['title']['long'] = { helptext('all', '!title [status]', 'Prints status of titles for you'), helptext('all', '!title on|notice', 'Sets titles to notice'), helptext('all', '!title privmsg', 'Sets titles to privmsg'), helptext('all', '!title off', 'Turns titles off') }
def setup(sopel): sopel.config.define_section('github', GitHubSection) if not sopel.memory.contains('url_callbacks'): sopel.memory['url_callbacks'] = tools.SopelMemory() sopel.memory['url_callbacks'][regex] = issue_info sopel.memory['url_callbacks'][repoRegex] = data_url sopel.memory['url_callbacks'][commitRegex] = commit_info if sopel.config.github.webhook: setup_webhook(sopel)
def setup(bot: SopelWrapper) -> None: """ Ensures that our set up configuration items are present """ # Ensure configuration bot.config.define_section('owm', OWMSection) # Load our OWM API into bot memory if 'owm' not in bot.memory: api_key = bot.config.owm.api_key owm_api = get_api(api_key) bot.memory['owm'] = tools.SopelMemory() bot.memory['owm']['api'] = owm_api
def setup(sopel): try: auth = tweepy.OAuthHandler(sopel.config.twitter.consumer_key, willie.config.twitter.consumer_secret) auth.set_access_token(sopel.config.twitter.access_token, willie.config.twitter.access_token_secret) api = tweepy.API(auth) except: raise ConfigurationError('Could not authenticate with Twitter. Are the' ' API keys configured properly?') regex = re.compile('twitter.com\/(\S*)\/status\/([\d]+)') if not sopel.memory.contains('url_callbacks'): sopel.memory['url_callbacks'] = tools.SopelMemory() sopel.memory['url_callbacks'][regex] = gettweet
def setup(bot): global regex bot.config.define_section('bugzilla', BugzillaSection) if not bot.config.bugzilla.domains: return if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() domains = '|'.join(bot.config.bugzilla.domains) regex = re.compile((r'https?://(%s)' r'(/show_bug.cgi\?\S*?)' r'(id=\d+)') % domains) bot.memory['url_callbacks'][regex] = show_bug
def __init__(self, nick, admin=False, owner=False): self.nick = nick self.user = "******" channel = tools.Identifier("#Sopel") self.channels = tools.SopelIdentifierMemory() self.channels[channel] = tools.target.Channel(channel) self.users = tools.SopelIdentifierMemory() self.privileges = tools.SopelMemory() self.memory = tools.SopelMemory() self.memory['url_callbacks'] = tools.SopelMemory() self.config = MockConfig() self._init_config() self.output = [] if admin: self.config.core.admins = [self.nick] if owner: self.config.core.owner = self.nick
def setup(bot): global find_urls bot.config.define_section('url', UrlSection) if bot.config.url.exclude: regexes = [re.compile(s) for s in bot.config.url.exclude] else: regexes = [] # We're keeping these in their own list, rather than putting then in the # callbacks list because 1, it's easier to deal with modules that are still # using this list, and not the newer callbacks list and 2, having a lambda # just to pass is kinda ugly. if not bot.memory.contains('url_exclude'): bot.memory['url_exclude'] = regexes else: exclude = bot.memory['url_exclude'] if regexes: exclude.extend(regexes) bot.memory['url_exclude'] = exclude # Ensure that url_callbacks and last_seen_url are in memory if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() if not bot.memory.contains('last_seen_url'): bot.memory['last_seen_url'] = tools.SopelMemory() def find_func(text): re_url = r'(?u)((?<!%s)(?:http|https|ftp)(?::\/\/\S+))'\ % (bot.config.url.exclusion_char) r = re.compile(re_url, re.IGNORECASE) urls = re.findall(r, text) return urls find_urls = find_func
def setup(bot=None): global url_finder, exclusion_char if not bot: return if bot.config.has_option('url', 'exclude'): regexes = [ re.compile(s) for s in bot.config.url.get_list(bot.config.exclude) ] else: regexes = [] # We're keeping these in their own list, rather than putting then in the # callbacks list because 1, it's easier to deal with modules that are still # using this list, and not the newer callbacks list and 2, having a lambda # just to pass is kinda ugly. if not bot.memory.contains('url_exclude'): bot.memory['url_exclude'] = regexes else: exclude = bot.memory['url_exclude'] if regexes: exclude.append(regexes) bot.memory['url_exclude'] = regexes # Ensure that url_callbacks and last_seen_url are in memory if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() if not bot.memory.contains('last_seen_url'): bot.memory['last_seen_url'] = tools.SopelMemory() if bot.config.has_option('url', 'exclusion_char'): exclusion_char = bot.config.url.exclusion_char url_finder = re.compile(r'(?u)(%s?(?:http|https|ftp)(?:://\S+))' % (exclusion_char))
def setup(bot): """ Tests the validity of the client ID given in the configuration. If it is not, initializes sopel's memory callbacks for imgur URLs, and uses them as the trigger for the link parsing function. """ try: client = ImgurClient(bot.config.imgur.client_id) client.request('gallery.json') except HTTPError: raise ConfigurationError( 'Could not validate the client ID with Imgur. \ Are you sure you set it up correctly?') imgur_regex = re.compile('(?:https?://)?(?:i\.)?imgur\.com/(.*)$') if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][imgur_regex] = imgur
def setup(sopel): sopel.config.define_section('github', GitHubSection) if 'url_callbacks' not in sopel.memory: sopel.memory['url_callbacks'] = tools.SopelMemory() sopel.memory['url_callbacks'][repoRegex] = repo_info sopel.memory['url_callbacks'][issueRegex] = issue_info sopel.memory['url_callbacks'][commitRegex] = commit_info if sopel.config.github.webhook: setup_webhook(sopel) if not sopel.config.github.client_secret: if sopel.config.github.secret: sopel.config.github.client_secret = sopel.config.github.secret del sopel.config.github.secret sopel.config.save() tools.stderr( "[GitHub] Migrated `secret` to `client_secret` in config.")
def setup(bot): bot.config.define_section('safety', SafetySection) if 'safety_cache' not in bot.memory: bot.memory['safety_cache'] = tools.SopelMemory() if 'safety_cache_lock' not in bot.memory: bot.memory['safety_cache_lock'] = threading.Lock() for item in bot.config.safety.known_good: known_good.append(re.compile(item, re.I)) old_file = os.path.join(bot.config.homedir, 'malwaredomains.txt') if os.path.exists(old_file) and os.path.isfile(old_file): LOGGER.info('Removing old malwaredomains file from %s', old_file) try: os.remove(old_file) except Exception as err: # for lack of a more specific error type... # Python on Windows throws an exception if the file is in use LOGGER.info('Could not delete %s: %s', old_file, str(err)) loc = os.path.join(bot.config.homedir, 'unsafedomains.txt') if os.path.isfile(loc): if os.path.getmtime(loc) < time.time() - 24 * 60 * 60: # File exists but older than one day — update it _download_domain_list(loc) else: _download_domain_list(loc) with open(loc, 'r') as f: for line in f: clean_line = str(line).strip().lower() if not clean_line or clean_line[0] == '#': # blank line or comment continue parts = clean_line.split(' ', 1) try: domain = parts[1] except IndexError: # line does not contain a hosts entry; skip it continue if '.' in domain: # only publicly routable domains matter; skip loopback/link-local stuff malware_domains.add(domain)
def setup(sopel): sopel.config.define_section('github', GithubSection) if not sopel.memory.contains('url_callbacks'): sopel.memory['url_callbacks'] = tools.SopelMemory() sopel.memory['url_callbacks'][regex] = issue_info sopel.memory['url_callbacks'][repoRegex] = data_url sopel.memory['url_callbacks'][commitRegex] = commit_info if sopel.config.github.webhook: setup_webhook(sopel) conn = sopel.db.connect() c = conn.cursor() try: c.execute('SELECT * FROM gh_repo') except Exception: create_repo_table(sopel, c) conn.commit() conn.close()
def process_urls(bot, trigger, urls): """ For each URL in the list, ensure that it isn't handled by another module. If not, find where it redirects to, if anywhere. If that redirected URL should be handled by another module, dispatch the callback for it. Return a list of (title, hostname) tuples for each URL which is not handled by another module. """ results = [] shorten_url_length = bot.config.url.shorten_url_length for url in urls: if not url.startswith(bot.config.url.exclusion_char): # Magic stuff to account for international domain names try: url = web.iri_to_uri(url) except Exception: # TODO: Be specific pass # First, check that the URL we got doesn't match matched = check_callbacks(bot, trigger, url, False) if matched: continue # If the URL is over bot.config.url.shorten_url_length, # shorten the URL tinyurl = None if (shorten_url_length > 0) and (len(url) > shorten_url_length): # Check bot memory to see if the shortened URL is already in # memory if not bot.memory.contains('shortened_urls'): # Initialize shortened_urls as a dict if it doesn't exist. bot.memory['shortened_urls'] = tools.SopelMemory() if bot.memory['shortened_urls'].contains(url): tinyurl = bot.memory['shortened_urls'][url] else: tinyurl = get_tinyurl(url) bot.memory['shortened_urls'][url] = tinyurl # Finally, actually show the URL title = find_title(url, verify=bot.config.core.verify_ssl) if title: results.append((title, get_hostname(url), tinyurl)) return results
def setup(bot): bot.config.define_section('safety', SafetySection) if 'safety_cache' not in bot.memory: bot.memory['safety_cache'] = tools.SopelMemory() if 'safety_cache_lock' not in bot.memory: bot.memory['safety_cache_lock'] = threading.Lock() for item in bot.config.safety.known_good: known_good.append(re.compile(item, re.I)) loc = os.path.join(bot.config.homedir, 'malwaredomains.txt') if os.path.isfile(loc): if os.path.getmtime(loc) < time.time() - 24 * 60 * 60 * 7: # File exists but older than one week — update it _download_malwaredomains_db(loc) else: _download_malwaredomains_db(loc) with open(loc, 'r') as f: for line in f: clean_line = unicode(line).strip().lower() if clean_line != '': malware_domains.add(clean_line)
def setup(bot): global url_finder, exclusion_char, api_url, api_key, api_user, api_private if bot.config.bookie.api_url: try: # say we have "https://example.com/prefix/api/v1/admin/account?api_key=XXXXXX" p = urlparse(bot.config.bookie.api_url) # "https://example.com" api_url = p.scheme + '://' + p.netloc # "/prefix" prefix = p.path.split(api_suffix)[0] if prefix: api_url += prefix # "/api/v1/" api_url += api_suffix # the path element after api_suffix # that is, "admin" api_user = p.path.split(api_suffix)[1].split('/')[0] # "XXXXXX" api_key = p.query.split('=')[1] except Exception as e: raise ConfigurationError('Bookie api_url badly formatted: %s' % str(e)) else: raise ConfigurationError('Bookie module not configured') api_private = validate_private(bot.config.bookie.private) if bot.config.has_option('url', 'exclusion_char'): exclusion_char = bot.config.url.exclusion_char url_finder = re.compile( r'(?u)(.*?)\s*(%s?(?:http|https|ftp)(?:://\S+)\s*(.*?))' % (exclusion_char)) if bot.config.bookie.auto: if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][re.compile('.*')] = bmark
def setup(bot): if not bot.memory.contains('url_callbacks'): bot.memory['url_callbacks'] = tools.SopelMemory() bot.memory['url_callbacks'][instagram_pattern] = instaparse
def setup(bot): if 'nettix_token' not in bot.memory: bot.memory['nettix_token'] = tools.SopelMemory()
def setup(bot): bot.config.define_section("qotd", QotdSection) bot.memory['qotd'] = tools.SopelMemory()
def __init__(self, config, daemon=False): irc.Bot.__init__(self, config) self._daemon = daemon # Used for iPython. TODO something saner here # `re.compile('.*') is re.compile('.*')` because of caching, so we need # to associate a list with each regex, since they are unexpectedly # indistinct. self._callables = { 'high': collections.defaultdict(list), 'medium': collections.defaultdict(list), 'low': collections.defaultdict(list) } self._plugins = {} self.config = config """The :class:`sopel.config.Config` for the current Sopel instance.""" self.doc = {} """A dictionary of command names to their documentation. Each command is mapped to its docstring and any available examples, if declared in the module's code. .. versionchanged:: 3.2 Use the first item in each callable's commands list as the key, instead of the function name as declared in the source code. """ self._command_groups = collections.defaultdict(list) """A mapping of module names to a list of commands in it.""" self.stats = {} # deprecated, remove in 7.0 self._times = {} """ A dictionary mapping lowercased nicks to dictionaries which map function names to the time which they were last used by that nick. """ self.server_capabilities = {} """A dict mapping supported IRCv3 capabilities to their options. For example, if the server specifies the capability ``sasl=EXTERNAL``, it will be here as ``{"sasl": "EXTERNAL"}``. Capabilities specified without any options will have ``None`` as the value. For servers that do not support IRCv3, this will be an empty set. """ self.enabled_capabilities = set() """A set containing the IRCv3 capabilities that the bot has enabled.""" self._cap_reqs = dict() """A dictionary of capability names to a list of requests.""" self.privileges = dict() """A dictionary of channels to their users and privilege levels. The value associated with each channel is a dictionary of :class:`sopel.tools.Identifier`\\s to a bitwise integer value, determined by combining the appropriate constants from :mod:`sopel.module`. .. deprecated:: 6.2.0 Use :attr:`channels` instead. Will be removed in Sopel 8. """ self.channels = tools.SopelMemory() # name to chan obj """A map of the channels that Sopel is in. The keys are :class:`sopel.tools.Identifier`\\s of the channel names, and map to :class:`sopel.tools.target.Channel` objects which contain the users in the channel and their permissions. """ self.users = tools.SopelMemory() # name to user obj """A map of the users that Sopel is aware of. The keys are :class:`sopel.tools.Identifier`\\s of the nicknames, and map to :class:`sopel.tools.target.User` instances. In order for Sopel to be aware of a user, it must be in at least one channel which they are also in. """ self.db = SopelDB(config) """The bot's database, as a :class:`sopel.db.SopelDB` instance.""" self.memory = tools.SopelMemory() """ A thread-safe dict for storage of runtime data to be shared between modules. See :class:`sopel.tools.SopelMemory`. """ self.shutdown_methods = [] """List of methods to call on shutdown.""" self.scheduler = sopel.tools.jobs.JobScheduler(self) self.scheduler.start() # Set up block lists # Default to empty if not self.config.core.nick_blocks: self.config.core.nick_blocks = [] if not self.config.core.host_blocks: self.config.core.host_blocks = [] self.setup()