コード例 #1
0
    def test_len(self):
        n = 3000
        cache = Cache(self.generate_string())
        for i in range(n):
            cache.add(self.generate_string())

        self.assertEqual(len(cache), n)
コード例 #2
0
def parse_search_candidates(search_result: str, base_url: str,
                            cache: Cache) -> list:
    hyperlinks_list = []

    results_html = bs4.BeautifulSoup(search_result, "html.parser")
    html_headings = results_html.findAll("div",
                                         class_="mw-search-result-heading")

    for heading in html_headings[:NUM_SEARCH_CANDIDATES]:
        heading_a = heading.find("a")
        heading_link_end = heading_a["href"]
        heading_title = heading_a["title"]
        heading_link = f"{base_url}{heading_link_end}"

        if heading_link not in cache:
            cache.add(heading_link)

        if heading_link[-1] == ")":
            heading_link = list(heading_link)
            heading_link[-1] = "\\)"
            heading_link = "".join(heading_link)

        hyperlink = f"[{heading_title}]({heading_link})"
        hyperlinks_list.append(hyperlink)

    return hyperlinks_list
コード例 #3
0
    def test_delete_unpopular(self):
        cache = Cache()

        for i in range(10):
            cache.add(i)

        asdf = cache["3"]
        sfdg = cache["4"]
        srgd = cache.get("3")
        jhkh = cache.get("4")

        deleted = cache.delete_unpopular(2)
        self.assertEqual(deleted, 8)
        self.assertTrue(len(cache) == 2)
コード例 #4
0
    def test_delete_deprecated(self):
        cache = Cache()
        cache.set_item_lifetime(seconds=1)

        for i in range(10):
            cache.add(i)
            time.sleep(0.5)

        deleted = cache.delete_deprecated()
        self.assertTrue(len(cache) == 1)
        self.assertEqual(deleted, 9)
        cache.reset_item_lifetime()
        self.assertEqual(cache.item_lifetime, None)
        cache.delete_deprecated()
        self.assertTrue(len(cache) == 1)
コード例 #5
0
def parse_wiki_search_candidates(search_result: str,
                                 base_url: str,
                                 cache: Cache = None) -> list:
    """
    Parse potential matches for a wiki search in a "Did you mean?" manner. Supports the official osrs wiki and
    melvoridle wiki. If a cache object is passed, all wiki page urls found are added into it so they can be found

    :param search_result:
    :param base_url:
    :param cache:
    :return:
    """
    hyperlinks_list = []

    results_html = BeautifulSoup(search_result, "html.parser")
    html_headings = results_html.findAll("div",
                                         class_="mw-search-result-heading")

    for heading in html_headings:
        heading_a = heading.find("a")
        heading_link_end = heading_a["href"]
        heading_title = heading_a["title"]
        heading_link = f"{base_url}{heading_link_end}"

        if cache is not None:
            cache.add(heading_link)

        if heading_link[-1] == ")":
            heading_link = list(heading_link)
            heading_link[-1] = "\\)"
            heading_link = "".join(heading_link)

        hyperlink = f"[{heading_title}]({heading_link})"
        hyperlinks_list.append(hyperlink)

    return hyperlinks_list
コード例 #6
0
    def test_add_delete(self):
        cache = Cache()
        cache.allow_type_override = False

        cache.add(1234)
        cache.add([x for x in range(10)])
        cache.add([random.randint(0, 100) for _ in range(10)], "randList")

        self.assertTrue(len(cache) == 3)
        cache.delete("1234")
        self.assertTrue(len(cache) == 2)
        del cache["randList"]
        self.assertTrue(len(cache) == 1)

        cache.add(123, "123")
        cache.add(123)
        cache.add("Does not raise", 123)
        self.assertRaises(TypeError, cache.add, "Should raise", "123")
        self.assertRaises(TypeError, cache.add, [], "123")
        self.assertRaises(TypeError, cache.add, 0, 123)