Пример #1
0
def test_find_unique_urls() -> None:
    from pajbot.modules.linkchecker import find_unique_urls

    assert find_unique_urls("pajlada.se test http://pajlada.se") == {"http://pajlada.se"}
    assert find_unique_urls("pajlada.se pajlada.com foobar.se") == {
        "http://pajlada.se",
        "http://pajlada.com",
        "http://foobar.se",
    }
    assert find_unique_urls("foobar.com foobar.com") == {"http://foobar.com"}
    assert find_unique_urls("foobar.com foobar.se"), {"http://foobar.com" == "http://foobar.se"}
    assert find_unique_urls("www.foobar.com foobar.se"), {"http://www.foobar.com" == "http://foobar.se"}

    # TODO: Edge case, this behaviour should probably be changed. These URLs should be considered the same.
    # Use is_same_url method?
    assert find_unique_urls("pajlada.se/ pajlada.se"), {"http://pajlada.se/" == "http://pajlada.se"}

    # TODO: The protocol of a URL is entirely thrown away, this behaviour should probably be changed.
    assert find_unique_urls("https://pajlada.se/ https://pajlada.se") == {
        "https://pajlada.se/",
        "https://pajlada.se",
    }

    assert find_unique_urls("foo 192.168.0.1 bar") == {
        "http://192.168.0.1",
    }

    assert find_unique_urls("omg this isn't chatting, this is meme-ing...my vanity") == set()
    assert find_unique_urls("foo 1.40 bar") == set()
Пример #2
0
    def test_find_unique_urls(self):
        from pajbot.modules.linkchecker import find_unique_urls
        from pajbot.bot import Bot
        import re

        regex = re.compile(Bot.url_regex_str, re.IGNORECASE)

        self.assertEqual(
            find_unique_urls(regex, 'pajlada.se test http://pajlada.se'),
            {'http://pajlada.se'})
        self.assertEqual(
            find_unique_urls(regex, 'pajlada.se pajlada.com foobar.se'),
            {'http://pajlada.se', 'http://pajlada.com', 'http://foobar.se'})
        self.assertEqual(find_unique_urls(regex, 'foobar.com foobar.com'),
                         {'http://foobar.com'})
        self.assertEqual(find_unique_urls(regex, 'foobar.com foobar.se'),
                         {'http://foobar.com', 'http://foobar.se'})
        self.assertEqual(find_unique_urls(regex, 'www.foobar.com foobar.se'),
                         {'http://www.foobar.com', 'http://foobar.se'})

        # TODO: Edge case, this behaviour should probably be changed. These URLs should be considered the same.
        # Use is_same_url method?
        self.assertEqual(find_unique_urls(regex, 'pajlada.se/ pajlada.se'),
                         {'http://pajlada.se/', 'http://pajlada.se'})

        # TODO: The protocol of a URL is entirely thrown away, this behaviour should probably be changed.
        self.assertEqual(
            find_unique_urls(regex, 'https://pajlada.se/ https://pajlada.se'),
            {'http://pajlada.se/', 'http://pajlada.se'})
Пример #3
0
def test_find_unique_urls():
    from pajbot.modules.linkchecker import find_unique_urls
    from pajbot.bot import URL_REGEX

    assert find_unique_urls(
        URL_REGEX,
        "pajlada.se test http://pajlada.se") == {"http://pajlada.se"}
    assert find_unique_urls(URL_REGEX, "pajlada.se pajlada.com foobar.se") == {
        "http://pajlada.se",
        "http://pajlada.com",
        "http://foobar.se",
    }
    assert find_unique_urls(URL_REGEX,
                            "foobar.com foobar.com") == {"http://foobar.com"}
    assert find_unique_urls(URL_REGEX, "foobar.com foobar.se"), {
        "http://foobar.com" == "http://foobar.se"
    }
    assert find_unique_urls(URL_REGEX, "www.foobar.com foobar.se"), {
        "http://www.foobar.com" == "http://foobar.se"
    }

    # TODO: Edge case, this behaviour should probably be changed. These URLs should be considered the same.
    # Use is_same_url method?
    assert find_unique_urls(URL_REGEX, "pajlada.se/ pajlada.se"), {
        "http://pajlada.se/" == "http://pajlada.se"
    }

    # TODO: The protocol of a URL is entirely thrown away, this behaviour should probably be changed.
    assert find_unique_urls(URL_REGEX,
                            "https://pajlada.se/ https://pajlada.se") == {
                                "https://pajlada.se/",
                                "https://pajlada.se",
                            }
Пример #4
0
    def ab(bot, source, message, **rest):
        if not message:
            return False

        # check if there is a link in the message
        check_message = find_unique_urls(URL_REGEX, message)
        if len(check_message) > 0:
            return False

        msg_parts = message.split(" ")
        if len(msg_parts) >= 2:
            outer_str = msg_parts[0]
            inner_str = f" {outer_str} ".join(msg_parts[1:] if len(msg_parts) >= 3 else msg_parts[1])
            bot.say(f"{source}, {outer_str} {inner_str} {outer_str}")
Пример #5
0
    def ab(self, **options):
        bot = options['bot']
        message = options['message']
        source = options['source']

        if message:
            """ check if there is a link in the message """
            check_message = find_unique_urls(bot.url_regex, message)
            if check_message == set():
                msg_parts = message.split(' ')
                if len(msg_parts) >= 2:
                    outer_str = msg_parts[0]
                    inner_str = ' {} '.format(outer_str).join(msg_parts[1:] if len(msg_parts) >= 3 else msg_parts[1])
                    bot.say('{0}, {1} {2} {1}'.format(source.username_raw, outer_str, inner_str))
                    return True

        return
Пример #6
0
    def ab(**options):
        bot = options["bot"]
        message = options["message"]
        source = options["source"]

        if message:
            """ check if there is a link in the message """
            check_message = find_unique_urls(URL_REGEX, message)
            if check_message == set():
                msg_parts = message.split(" ")
                if len(msg_parts) >= 2:
                    outer_str = msg_parts[0]
                    inner_str = " {} ".format(outer_str).join(msg_parts[1:] if len(msg_parts) >= 3 else msg_parts[1])
                    bot.say("{0}, {1} {2} {1}".format(source.username_raw, outer_str, inner_str))
                    return True

        return
Пример #7
0
    def ab(self, **options):
        bot = options['bot']
        message = options['message']
        source = options['source']

        if message:
            """ check if there is a link in the message """
            check_message = find_unique_urls(bot.url_regex, message)
            if check_message == set():
                msg_parts = message.split(' ')
                if len(msg_parts) >= 2:
                    outer_str = msg_parts[0]
                    inner_str = ' {} '.format(outer_str).join(
                        msg_parts[1:] if len(msg_parts) >= 3 else msg_parts[1])
                    bot.say('{0}, {1} {2} {1}'.format(source.username_raw,
                                                      outer_str, inner_str))
                    return True

        return
Пример #8
0
    def test_find_unique_urls(self):
        from pajbot.modules.linkchecker import find_unique_urls
        from pajbot.bot import Bot
        import re

        regex = re.compile(Bot.url_regex_str, re.IGNORECASE)

        self.assertEqual(find_unique_urls(regex, 'pajlada.se test http://pajlada.se'), {'http://pajlada.se'})
        self.assertEqual(find_unique_urls(regex, 'pajlada.se pajlada.com foobar.se'), {'http://pajlada.se', 'http://pajlada.com', 'http://foobar.se'})
        self.assertEqual(find_unique_urls(regex, 'foobar.com foobar.com'), {'http://foobar.com'})
        self.assertEqual(find_unique_urls(regex, 'foobar.com foobar.se'), {'http://foobar.com', 'http://foobar.se'})
        self.assertEqual(find_unique_urls(regex, 'www.foobar.com foobar.se'), {'http://www.foobar.com', 'http://foobar.se'})

        # TODO: Edge case, this behaviour should probably be changed. These URLs should be considered the same.
        # Use is_same_url method?
        self.assertEqual(find_unique_urls(regex, 'pajlada.se/ pajlada.se'), {'http://pajlada.se/', 'http://pajlada.se'})

        # TODO: The protocol of a URL is entirely thrown away, this behaviour should probably be changed.
        self.assertEqual(find_unique_urls(regex, 'https://pajlada.se/ https://pajlada.se'), {'https://pajlada.se/', 'https://pajlada.se'})
Пример #9
0
 def find_unique_urls(self, message):
     from pajbot.modules.linkchecker import find_unique_urls
     return find_unique_urls(self.url_regex, message)
Пример #10
0
    def find_unique_urls(self, message):
        from pajbot.modules.linkchecker import find_unique_urls

        return find_unique_urls(URL_REGEX, message)
Пример #11
0
 def find_unique_urls(self, message):
     from pajbot.modules.linkchecker import find_unique_urls
     return find_unique_urls(self.url_regex, message)
Пример #12
0
    def find_unique_urls(self, message: str) -> Set[str]:
        from pajbot.modules.linkchecker import find_unique_urls

        return find_unique_urls(message)