Пример #1
0
    def check_blacklist(self, _url, context):
        for entry in context["config"]["blacklist"]:
            if re.match(entry,
                        _url.to_string(),
                        flags=str_to_regex_flags("ui")):
                self.logger.debug("Matched blacklist regex: %s" % entry)
                return True

        return False
Пример #2
0
    def check_blacklist(self, _url, context):
        for entry in context["config"]["blacklist"]:
            if re.match(
                    entry, _url.to_string(), flags=str_to_regex_flags("ui")
            ):
                self.logger.debug(
                    "Matched blacklist regex: %s" % entry
                )
                return True

        return False
Пример #3
0
    def get_proxy(self, _url=None, group=None):
        """
        :type _url: URL
        :type group: basestring
        """

        if group is not None:
            proxy = self.config.get("proxies", {}).get("groups",
                                                       {}).get(group, None)

            if proxy:
                return proxy

        if _url is not None:
            domain_proxies = self.config.get("proxies", {}).get("domains", {})

            for pattern in domain_proxies.iterkeys():
                if re.match(pattern, _url.domain, str_to_regex_flags("iu")):
                    return domain_proxies[pattern]

        return self.config.get("proxies", {}).get("global", None)
Пример #4
0
    def get_proxy(self, _url=None, group=None):
        """
        :type _url: URL
        :type group: basestring
        """

        if group is not None:
            proxy = self.config.get(
                "proxies", {}
            ).get("groups", {}).get(group, None)

            if proxy:
                return proxy

        if _url is not None:
            domain_proxies = self.config.get("proxies", {}).get("domains", {})

            for pattern in domain_proxies.iterkeys():
                if re.match(pattern, _url.domain, str_to_regex_flags("iu")):
                    return domain_proxies[pattern]

        return self.config.get("proxies", {}).get("global", None)
Пример #5
0
 def url_can_resolve(self, url):
     return not re.match(
         ".*\.onion|.*\.i2p",
         url.domain,
         str_to_regex_flags("iu")
     )
Пример #6
0
    def get_session(self, url, context):
        sessions = context.get("config", {}).get("sessions", {})

        if not sessions.get("enable", False):
            self.urls_plugin.logger.debug("Sessions are disabled.")

            proxy = self.urls_plugin.get_proxy(url)

            if not proxy:
                s = Session()
            else:
                s = ProxySession(proxy)

            s.session_type = None

            return s

        for entry in sessions["never"]:
            if re.match(entry, url.domain, flags=str_to_regex_flags("ui")):
                self.urls_plugin.logger.debug(
                    "Domain {0} is blacklisted for sessions.".format(
                        url.domain
                    )
                )
                proxy = self.urls_plugin.get_proxy(url)

                if not proxy:
                    s = Session()
                else:
                    s = ProxySession(proxy)

                s.session_type = None

                return s

        for group, entries in sessions["group"].iteritems():
            for entry in entries:
                try:
                    if re.match(
                            entry, url.domain, flags=str_to_regex_flags("ui")
                    ):
                        self.urls_plugin.logger.debug(
                            "Domain {0} uses the '{1}' group sessions.".format(
                                url.domain, group
                            )
                        )

                        if group not in self.group_sessions:
                            proxy = self.urls_plugin.get_proxy(group=group)

                            if not proxy:
                                s = Session()
                            else:
                                s = ProxySession(proxy)

                            s.cookies = (
                                self.get_cookie_jar(
                                    "/groups/{0}.txt".format(
                                        group
                                    )
                                )
                            )

                            s.session_type = "group"
                            s.cookies.set_mode(
                                context.get("config")
                                .get("sessions")
                                .get("cookies")
                                .get("group")
                            )

                            self.group_sessions[group] = s

                        return self.group_sessions[group]
                except ValueError as e:
                    self.urls_plugin.logger.error(
                        "Failed to create cookie jar: {0}".format(e)
                    )
                    continue

        self.urls_plugin.logger.debug(
            "Domain {0} uses the global session storage.".format(
                url.domain
            )
        )

        proxy = self.urls_plugin.get_proxy(url)

        if not proxy:
            return self.global_session
        else:
            s = ProxySession(proxy)
            s.cookies = self.get_cookie_jar("/global.txt")
            s.session_type = "global"
            s.cookies.set_mode(
                self.plugin.config.get("sessions", {})
                    .get("cookies", {})
                    .get("global", "discard")
            )

            return s
Пример #7
0
class WebsiteHandler(URLHandler):
    name = "website"

    criteria = {
        "protocol": re.compile(r"http|https", str_to_regex_flags("iu"))
    }

    global_session = None
    resolver = None

    cookies_base_path = "data/plugins/urls/cookies"

    def __init__(self, plugin):
        self.group_sessions = {}

        super(WebsiteHandler, self).__init__(plugin)

        if not os.path.exists(self.cookies_base_path):
            os.makedirs(self.cookies_base_path)

        if not os.path.exists(self.cookies_base_path + "/groups"):
            os.makedirs(self.cookies_base_path + "/groups")

        self.reload()

    def url_can_resolve(self, url):
        return not re.match(
            ".*\.onion|.*\.i2p",
            url.domain,
            str_to_regex_flags("iu")
        )

    @inlineCallbacks
    def call(self, url, context):
        if self.url_can_resolve(url):
            try:
                ip = yield self.resolver.get_host_by_name(url.domain)
                ip = IPAddress(ip)
            except Exception:
                # context["event"].target.respond(
                #     u'[Error] Failed to handle URL: {}'.format(
                #         url.to_string()
                #     )
                # )

                self.plugin.logger.exception("Error while checking DNS")
                returnValue(STOP_HANDLING)
                return

            if ip.is_loopback() or ip.is_private() or ip.is_link_local() \
                    or ip.is_multicast():
                self.plugin.logger.warn(
                    "Prevented connection to private/internal address"
                )

                returnValue(STOP_HANDLING)
                return

        headers = {}

        if url.domain in context["config"]["spoofing"]:
            user_agent = context["config"]["spoofing"][url.domain]

            if user_agent:
                headers["User-Agent"] = user_agent
        else:
            headers["User-Agent"] = context["config"].get(
                "default_user_agent",
                "Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 "
                "Firefox/36.0"
            )

        domain_langs = context.get("config") \
            .get("accept_language", {}) \
            .get("domains", {})

        if url.domain in domain_langs:
            headers["Accept-Language"] = domain_langs.get(url.domain)
        else:
            headers["Accept-Language"] = context.get("config") \
                .get("accept_language", {}) \
                .get("default", "en")

        session = self.get_session(url, context)
        session.get(unicode(url), headers=headers, stream=True,
                    background_callback=self.background_callback) \
            .addCallback(self.callback, url, context, session) \
            .addErrback(self.errback, url, context, session)

        returnValue(STOP_HANDLING)

    def teardown(self):
        # Save all our cookie stores
        if self.global_session is not None:
            self.global_session.cookies.save(ignore_discard=True)
            self.global_session.close()

        for session in self.group_sessions.itervalues():
            session.cookies.save(ignore_discard=True)
            session.close()

        if self.resolver is not None:
            self.resolver.close()

    def reload(self):
        self.teardown()
        self.group_sessions = {}
        self.resolver = AddressResolver()

        proxy = self.plugin.get_proxy()

        if not proxy:
            self.global_session = Session()
        else:
            self.global_session = ProxySession(proxy)

        try:
            self.global_session.cookies = self.get_cookie_jar("/global.txt")
            self.global_session.session_type = "global"
            self.global_session.cookies.set_mode(
                self.plugin.config.get("sessions", {})
                    .get("cookies", {})
                    .get("global", "discard")
            )
        except ValueError as e:
            self.urls_plugin.logger.error(
                "Failed to create global cookie jar: {0}".format(e)
            )

    def save_session(self, session):
        if session.session_type:
            session.cookies.save(ignore_discard=True)

    def background_callback(self, session, response):
        """
        Does basic processing of the response in the background, including
        reading the response's content. As such, response.content should not be
        used.
        :param session:
        :param response:
        :return: response, content
        """
        conns_conf = self.urls_plugin.config.get("connections", {})
        max_read = conns_conf.get("max_read_size", 1024 * 16)
        # iter_content seems to only use chunk_size as a suggestion, so a large
        # value (such as full value of max_read) can give a far large result
        # than expected. Using max_read / 16 seems to give us a decent level of
        # accuracy.
        chunk_size = conns_conf.get("chunk_read_size",
                                    max(1024, int(max_read / 16)))

        if "content-type" not in response.headers:
            response.headers["Content-Type"] = ""

        content_type = response.headers["content-type"].lower()

        if ";" in content_type:
            parts = content_type.split(";")
            content_type = parts[0]

        if content_type not in self.urls_plugin.config["content_types"]:
            self.plugin.logger.debug(
                "Unsupported Content-Type: %s"
                % response.headers["content-type"]
            )
            return response, None  # Not a supported content-type

        # If the response specifies a charset in the header, let requests
        # attempt to decode the contents. We can't use response.encoding as
        # it falls back to ISO-8859-1 in cases such as Content-Type: text/html.
        # In those cases, we want BeautifulSoup to perform its satanic ritual
        # to magically figure out the encoding.
        decode_unicode = False
        if ";" in content_type:
            for x in content_type.split(";"):
                split = x.strip().split("=")
                if len(split) > 1:
                    if split[0].lower() == "charset":
                        charset = split[1].lower()
                        if charset == "binary":
                            # Not a webpage, so return None content
                            self.urls_plugin.logger.debug(
                                "Unsupported charset: {0}", charset)
                            return response, None
                        # Contains charset header - let requests decode
                        decode_unicode = True
                        self.urls_plugin.logger.trace(
                            "Charset specified in header: {0}", charset)
                        break

        amount_read = 0
        chunks = []
        self.plugin.logger.trace("Starting read...")
        # We must close this when finished otherwise they'll hang if we don't
        # read everything
        with closing(response) as c_resp:
            for chunk in c_resp.iter_content(chunk_size=chunk_size,
                                             decode_unicode=decode_unicode):
                # See comment beside chunk_size def - it's not a fixed limit
                chunks.append(chunk)
                amount_read += len(chunk)
                self.plugin.logger.trace("Read a chunk of {0} bytes",
                                         len(chunk))
                if amount_read >= max_read:
                    self.plugin.logger.debug(
                        "Stopped reading response after {0} bytes",
                        amount_read
                    )
                    break
        self.plugin.logger.trace("Done reading")
        # chunks can be bytes or unicode
        if decode_unicode:
            joiner = u""
        else:
            joiner = b""
        content = joiner.join(chunks)
        self.plugin.logger.trace("background_callback done")
        return response, content

    @inlineCallbacks
    def callback(self, result, url, context, session):
        response = result[0]
        content = result[1]

        self.plugin.logger.trace(
            "Headers: {0}", list(response.headers)
        )

        self.plugin.logger.trace("HTTP code: {0}", response.status_code)

        new_url = urlparse.urlparse(response.url)

        if self.url_can_resolve(url):
            try:
                ip = yield self.resolver.get_host_by_name(new_url.hostname)
                ip = IPAddress(ip)
            except Exception:
                # context["event"].target.respond(
                #     u'[Error] Failed to handle URL: {}'.format(
                #         url.to_string()
                #     )
                # )

                self.plugin.logger.exception("Error while checking DNS")
                returnValue(STOP_HANDLING)
                return

            if ip.is_loopback() or ip.is_private() or ip.is_link_local() \
                    or ip.is_multicast():
                self.plugin.logger.warn(
                    "Prevented connection to private/internal address"
                )

                returnValue(STOP_HANDLING)
                return

        if content is None:
            self.plugin.logger.debug("No content returned")
            return

        soup = BeautifulSoup(content)

        if soup.title and soup.title.text:
            title = soup.title.text.strip()
            title = re.sub("[\n\s]+", " ", title)
            title = to_unicode(title)

            title_limit = self.urls_plugin.config.get("max_title_length", 150)

            if len(title) > title_limit:
                title = title[:title_limit - 15] + u"... (truncated)"

            if response.status_code == requests.codes.ok:
                context["event"].target.respond(
                    u'"{0}" at {1}'.format(
                        title, new_url.hostname
                    )
                )
            else:
                context["event"].target.respond(
                    u'[HTTP {0}] "{1}" at {2}'.format(
                        response.status_code,
                        title, new_url.hostname
                    )
                )

        else:
            if response.status_code != requests.codes.ok:
                context["event"].target.respond(
                    u'HTTP Error {0}: "{1}" at {2}'.format(
                        response.status_code,
                        STATUS_CODES.get(response.status_code, "Unknown"),
                        new_url.hostname
                    )
                )
            else:
                self.plugin.logger.debug("No title")

        self.save_session(session)

    def errback(self, error, url, context, session):
        # if isinstance(error.value, SSLError):
        #     context["event"].target.respond(
        #         u'[Error] URL has SSL errors and may be unsafe: {}'.format(
        #             url.to_string()
        #         )
        #     )
        # else:
        #     context["event"].target.respond(
        #         u'[Error] Failed to handle URL: {}'.format(url.to_string())
        #     )

        if isinstance(error.value, ResponseNeverReceived):
            for f in error.value.reasons:
                f.printDetailedTraceback()
                self.plugin.logger.error(f.getErrorMessage())
        else:
            self.plugin.logger.error(error.getErrorMessage())
            error.printDetailedTraceback()

        self.save_session(session)

    def get_cookie_jar(self, filename):
        cj = ChocolateCookieJar(self.cookies_base_path + filename)

        try:
            cj.load()
        except LoadError:
            self.plugin.logger.exception(
                "Failed to load cookie jar {0}".format(filename)
            )
        except IOError as e:
            self.plugin.logger.debug(
                "Failed to load cookie jar {0}: {1}".format(filename, e)
            )

        return cj

    def get_session(self, url, context):
        sessions = context.get("config", {}).get("sessions", {})

        if not sessions.get("enable", False):
            self.urls_plugin.logger.debug("Sessions are disabled.")

            proxy = self.urls_plugin.get_proxy(url)

            if not proxy:
                s = Session()
            else:
                s = ProxySession(proxy)

            s.session_type = None

            return s

        for entry in sessions["never"]:
            if re.match(entry, url.domain, flags=str_to_regex_flags("ui")):
                self.urls_plugin.logger.debug(
                    "Domain {0} is blacklisted for sessions.".format(
                        url.domain
                    )
                )
                proxy = self.urls_plugin.get_proxy(url)

                if not proxy:
                    s = Session()
                else:
                    s = ProxySession(proxy)

                s.session_type = None

                return s

        for group, entries in sessions["group"].iteritems():
            for entry in entries:
                try:
                    if re.match(
                            entry, url.domain, flags=str_to_regex_flags("ui")
                    ):
                        self.urls_plugin.logger.debug(
                            "Domain {0} uses the '{1}' group sessions.".format(
                                url.domain, group
                            )
                        )

                        if group not in self.group_sessions:
                            proxy = self.urls_plugin.get_proxy(group=group)

                            if not proxy:
                                s = Session()
                            else:
                                s = ProxySession(proxy)

                            s.cookies = (
                                self.get_cookie_jar(
                                    "/groups/{0}.txt".format(
                                        group
                                    )
                                )
                            )

                            s.session_type = "group"
                            s.cookies.set_mode(
                                context.get("config")
                                .get("sessions")
                                .get("cookies")
                                .get("group")
                            )

                            self.group_sessions[group] = s

                        return self.group_sessions[group]
                except ValueError as e:
                    self.urls_plugin.logger.error(
                        "Failed to create cookie jar: {0}".format(e)
                    )
                    continue

        self.urls_plugin.logger.debug(
            "Domain {0} uses the global session storage.".format(
                url.domain
            )
        )

        proxy = self.urls_plugin.get_proxy(url)

        if not proxy:
            return self.global_session
        else:
            s = ProxySession(proxy)
            s.cookies = self.get_cookie_jar("/global.txt")
            s.session_type = "global"
            s.cookies.set_mode(
                self.plugin.config.get("sessions", {})
                    .get("cookies", {})
                    .get("global", "discard")
            )

            return s
Пример #8
0
from system.protocols.generic.channel import Channel
from system.storage.formats import Formats
from system.plugins.plugin import PluginObject
from plugins.urls.constants import PREFIX_TRANSLATIONS, STOP_HANDLING
from plugins.urls.events import URLsPluginLoaded
from plugins.urls.handlers.website import WebsiteHandler
from plugins.urls.matching import extract_urls
from plugins.urls.shorteners.tinyurl import TinyURLShortener
from plugins.urls.url import URL
from utils.misc import str_to_regex_flags

__author__ = 'Gareth Coles'
__all__ = ["URLsPlugin"]


HTTP_S_REGEX = re.compile("http|https", flags=str_to_regex_flags("iu"))


class URLsPlugin(PluginObject):
    channels = None
    config = None
    shortened = None

    shorteners = None
    handlers = None

    def setup(self):
        self.shorteners = {}
        self.handlers = defaultdict(list)

        # Load up the configuration
Пример #9
0
from plugins.urls.shorteners.exceptions import ShortenerDown
from system.protocols.generic.channel import Channel
from system.storage.formats import Formats
from system.plugins.plugin import PluginObject
from plugins.urls.constants import PREFIX_TRANSLATIONS, STOP_HANDLING
from plugins.urls.events import URLsPluginLoaded
from plugins.urls.handlers.website import WebsiteHandler
from plugins.urls.matching import extract_urls
from plugins.urls.shorteners.tinyurl import TinyURLShortener
from plugins.urls.url import URL
from utils.misc import str_to_regex_flags

__author__ = 'Gareth Coles'
__all__ = ["URLsPlugin"]

HTTP_S_REGEX = re.compile("http|https", flags=str_to_regex_flags("iu"))


class URLsPlugin(PluginObject):
    channels = None
    config = None
    shortened = None

    shorteners = None
    handlers = None

    def setup(self):
        self.shorteners = {}
        self.handlers = defaultdict(list)

        # Load up the configuration
Пример #10
0
class FListHandler(URLHandler):
    criteria = {
        "protocol":
        re.compile(r"http|https", str_to_regex_flags("iu")),
        "domain":
        re.compile(r"(www\.f-list\.net)|(f-list\.net)",
                   str_to_regex_flags("iu")),
        "path":
        re.compile(r"/c/.*", str_to_regex_flags("iu")),
        "permission":
        "urls.trigger.nsfw"
    }

    ticket = ""  # API auth ticket; needs manual renewing
    last_renewal = None  # So we know when we renewed last
    session = None

    name = "f-list"

    @property
    def username(self):
        return self.plugin.config.get("f-list", {}).get("username", "")

    @property
    def password(self):
        return self.plugin.config.get("f-list", {}).get("password", "")

    @property
    def kinks_limit(self):
        return self.plugin.config.get("f-list", {}).get("kink-sample", 2)

    def __init__(self, plugin):
        super(FListHandler, self).__init__(plugin)

        if not (self.username and self.password):
            raise ApiKeyMissing()

        self.reload()
        self.get_ticket()

    def reload(self):
        self.teardown()

        self.session = Session()

    def teardown(self):
        if self.session is not None:
            self.session.close()

    def get_string(self, string):
        formatting = self.plugin.config.get("osu", {}).get("formatting", {})

        if string not in formatting:
            return strings[string]
        return formatting[string]

    @inlineCallbacks
    def get(self, *args, **kwargs):
        r = yield self.session.get(*args, **kwargs)
        data = r.json()

        if "error" in data and data["error"]:
            raise FListError(data["error"])

        returnValue(data)

    @inlineCallbacks
    def post(self, *args, **kwargs):
        r = yield self.session.post(*args, **kwargs)
        data = r.json()

        if "error" in data and data["error"]:
            raise FListError(data["error"])

        returnValue(data)

    @inlineCallbacks
    def get_ticket(self):
        now = datetime.now()
        then = now - timedelta(minutes=4)

        if not self.last_renewal or then > self.last_renewal:
            data = yield self.post(URL_TICKET,
                                   params={
                                       "account": self.username,
                                       "password": self.password
                                   })

            self.ticket = data["ticket"]
            self.last_renewal = datetime.now()

        returnValue(self.ticket)

    def get_sample(self, items, count):
        if not items:
            return ["Nothing"]
        if len(items) <= count:
            return items
        return [i for i in random.sample(items, count)]

    @inlineCallbacks
    def call(self, url, context):
        target = url.path

        while target.endswith("/"):
            target = target[:-1]

        target = target.split("/")

        if "" in target:
            target.remove("")
        if " " in target:
            target.remove(" ")

        message = ""

        try:
            if len(target) < 2:  # It's the front page or invalid, don't bother
                returnValue(CASCADE)
            elif target[0].lower() == "c":  # Character page
                message = yield self.character(target[1])

        except Exception:
            self.plugin.logger.exception("Error handling URL: {}".format(url))
            returnValue(CASCADE)

        # At this point, if `message` isn't set then we don't understand the
        # url, and so we'll just allow it to pass down to the other handlers

        if message:
            context["event"].target.respond(message)
            returnValue(STOP_HANDLING)
        else:
            returnValue(CASCADE)

    @inlineCallbacks
    def character(self, char_name):
        char_name = urlparse.unquote(char_name)
        ticket = yield self.get_ticket()
        params = {
            "ticket": ticket,
            "name": char_name,
            "account": self.username
        }

        char_info = yield self.post(URL_CHAR_INFO, params=params)
        char_kinks = yield self.post(URL_CHAR_KINKS, params=params)

        char_info = flatten_character(char_info)
        char_kinks = flatten_kinks(char_kinks)

        data = char_info["info"]

        data["sample_kinks"] = {
            "fave":
            ", ".join(
                self.get_sample(char_kinks["preferences"]["fave"],
                                self.kinks_limit)),
            "yes":
            ", ".join(
                self.get_sample(char_kinks["preferences"]["yes"],
                                self.kinks_limit)),
            "maybe":
            ", ".join(
                self.get_sample(char_kinks["preferences"]["maybe"],
                                self.kinks_limit)),
            "no":
            ", ".join(
                self.get_sample(char_kinks["preferences"]["no"],
                                self.kinks_limit)),
        }

        data["given"] = {"name": char_name}

        returnValue(
            self.get_string("character").format(**data).replace(
                u"&amp;", u"&"))
Пример #11
0
__author__ = 'Gareth Coles'

import re

from utils.misc import str_to_regex_flags

# Want to play with the regex?
# See https://regex101.com/r/bD7xH7/2

_regex = r"""(?P<prefix>[^\w\s\n]+|)
(?P<protocol>[\w]+)
:/[/]+
(?P<basic>[\w]+:[\w]+|)(?:@|)
(?P<domain>[^/:\n\s]+|)
(?P<port>:[0-9]+|)
(?P<url>[^\s\n]+|)"""

_r = re.compile(_regex, str_to_regex_flags("iux"))


def extract_urls(text):
    return re.findall(_r, text)


def is_url(text):
    return re.match(_r, text)


REGEX_TYPE = type(_r)
Пример #12
0
class OsuHandler(URLHandler):
    criteria = {
        "protocol": re.compile(r"http|https", str_to_regex_flags("iu")),
        "domain": re.compile(r"osu\.ppy\.sh", str_to_regex_flags("iu"))
    }

    session = None
    name = "osu"

    @property
    def api_key(self):
        return self.plugin.config.get("osu", {}).get("api_key", "")

    def __init__(self, plugin):
        super(OsuHandler, self).__init__(plugin)

        if not self.api_key:
            raise ApiKeyMissing()

        self.reload()

    def reload(self):
        self.teardown()

        self.session = Session()

    def teardown(self):
        if self.session is not None:
            self.session.close()

    def get_string(self, string):
        formatting = self.plugin.config.get("osu", {}).get("formatting", {})

        if string not in formatting:
            return strings[string]
        return formatting[string]

    @inlineCallbacks
    def get(self, *args, **kwargs):
        params = kwargs.get("params", {})
        kwargs["params"] = self.merge_params(params)

        r = yield self.session.get(*args, **kwargs)
        returnValue(r)

    def parse_fragment(self, url):
        """
        Sometimes osu pages have query-style fragments for some reason

        :param url: URL object to parse fragment from
        :type url: plugins.urls.url.URL

        :return: Parsed fragment as a dict
        :rtype: dict
        """

        parsed = {}

        if not url.fragment:
            return parsed

        for element in url.fragment.split("&"):
            if "=" in element:
                left, right = element.split("=", 1)
                parsed[left] = right
            else:
                parsed[element] = None

        return parsed

    def merge_params(self, params):
        params.update({"k": self.api_key})

        return params

    @inlineCallbacks
    def call(self, url, context):
        target = url.path

        while target.endswith("/"):
            target = target[:-1]

        target = target.split("/")

        if "" in target:
            target.remove("")
        if " " in target:
            target.remove(" ")

        message = ""

        try:
            if len(target) < 2:  # It's the front page or invalid, don't bother
                returnValue(CASCADE)
            elif target[0] in [  # Special cases we don't care about
                    "forum", "wiki", "news"
            ]:
                returnValue(True)
            elif target[0].lower() == "p":  # Old-style page URL
                if target[1].lower() == "beatmap":
                    if "b" in url.query:
                        message = yield self.beatmap(url, url.query["b"])
            elif target[0].lower() == "u":  # User page
                message = yield self.user(url, target[1])
            elif target[0].lower() == "s":  # Beatmap set
                message = yield self.mapset(url, target[1])
            elif target[0].lower() == "b":  # Specific beatmap
                message = yield self.beatmap(url, target[1])

        except Exception:
            self.plugin.logger.exception("Error handling URL: {}".format(url))
            returnValue(CASCADE)

        # At this point, if `message` isn't set then we don't understand the
        # url, and so we'll just allow it to pass down to the other handlers

        if message:
            context["event"].target.respond(message)
            returnValue(STOP_HANDLING)
        else:
            returnValue(CASCADE)

    @inlineCallbacks
    def beatmap(self, url, beatmap):
        fragment = self.parse_fragment(url)

        params = {}

        if url.query:
            params.update(url.query)

        if fragment:
            params.update(fragment)

        params["b"] = beatmap

        r = yield self.get(URL_BEATMAPS, params=params)
        beatmap = r.json()[0]

        if "m" not in params:
            params["m"] = beatmap["mode"]

        for key in ["favourite_count", "playcount", "passcount"]:
            beatmap[key] = locale.format("%d",
                                         int(beatmap[key]),
                                         grouping=True)

        for key in ["difficultyrating"]:
            beatmap[key] = int(round(float(beatmap[key])))

        if "approved" in beatmap:
            beatmap["approved"] = OSU_APPROVALS.get(beatmap["approved"],
                                                    u"Unknown approval")

        beatmap["mode"] = OSU_MODES[beatmap["mode"]]

        scores = None

        try:
            r = yield self.get(URL_SCORES, params=params)
            scores = r.json()

            for score in scores:
                for key in [
                        "score", "count50", "count100", "count300",
                        "countmiss", "countkatu", "countgeki"
                ]:
                    score[key] = locale.format("%d",
                                               int(score[key]),
                                               grouping=True)
                for key in ["pp"]:
                    score[key] = int(round(float(score[key])))

                score["enabled_mods"] = ", ".join(
                    get_mods(int(score["enabled_mods"])))
        except Exception:
            pass

        data = beatmap

        if beatmap["approved"] in [
                u"Pending", u"WIP", u"Graveyard", u"Unknown approval"
        ]:
            message = self.get_string("beatmap-unapproved")
        elif scores is None:
            message = self.get_string("beatmap-mode-mismatch")
        elif not scores:
            message = self.get_string("beatmap-no-scores")
        else:
            data["scores"] = scores
            message = self.get_string("beatmap")

        returnValue(message.format(**data))

    @inlineCallbacks
    def mapset(self, url, mapset):
        params = {"s": mapset}

        r = yield self.get(URL_BEATMAPS, params=params)
        data = r.json()

        modes = {}
        to_join = []

        for beatmap in data:
            modes[beatmap["mode"]] = modes.get(beatmap["mode"], 0) + 1
            beatmap["mode"] = OSU_MODES[beatmap["mode"]]

            for key in ["favourite_count", "playcount", "passcount"]:
                beatmap[key] = locale.format("%d",
                                             int(beatmap[key]),
                                             grouping=True)

            for key in ["difficultyrating"]:
                beatmap[key] = int(round(float(beatmap[key])))

            if "approved" in beatmap:
                beatmap["approved"] = OSU_APPROVALS.get(
                    beatmap["approved"],
                    u"Unknown approval: {}".format(beatmap["approved"]))

        for k, v in modes.iteritems():
            if v:
                to_join.append("{} x{}".format(OSU_MODES[k], v))

        first = data[0]

        data = {"beatmaps": data, "counts": ", ".join(to_join)}

        data.update(first)

        returnValue(self.get_string("mapset").format(**data))

    @inlineCallbacks
    def user(self, url, user):
        fragment = self.parse_fragment(url)

        params = {
            "u": user,
        }

        if "m" in fragment:  # Focused mode
            m = fragment["m"].lower()

            if m in OSU_MODES:
                params["m"] = OSU_MODES[m]

            else:
                try:
                    params["m"] = int(m)
                except ValueError:
                    pass

        # This logic is down to being able to specify either a username or ID.
        # The osu backend has to deal with this and so the api lets us specify
        # either "string" or "id" for usernames and IDs respectively. This
        # may be useful for usernames that are numerical, so we allow users
        # to add this to the fragment if they wish.

        if "t" in fragment:  # This once was called "t"..
            params["type"] = fragment["t"]
        elif "type" in fragment:  # ..but now is "type" for some reason
            params["type"] = fragment["type"]

        r = yield self.get(URL_USER, params=params)
        data = r.json()[0]  # It's a list for some reason

        for key in ["level", "accuracy"]:  # Round floats
            data[key] = int(round(float(data[key])))

        for key in [
                "ranked_score", "pp_raw", "pp_rank", "count300", "count100",
                "count50", "playcount", "total_score", "pp_country_rank"
        ]:  # Localisé number formatting
            data[key] = locale.format("%d", int(data[key]), grouping=True)

        epic_factors = [int(event["epicfactor"]) for event in data["events"]]

        epic_total = reduce(sum, epic_factors, 0)
        epic_avg = 0

        if epic_total:
            epic_avg = round(epic_total / (1.0 * len(epic_factors)), 2)

        data["events"] = "{} events at an average of {}/32 epicness".format(
            len(epic_factors), epic_avg)

        returnValue(self.get_string("user").format(**data))