def test_safe_map(self): def f(i): if i <= 10: raise Exception return i self.assertEquals([20, 30], list(Util.safe_map(f, [1, 5, 20, 10, 30, 4])))
def get_for_keyword(self, keyword): logger.info(lambda: "Fetching quotes from Goodreads for keyword=%s" % keyword) url = iri2uri("https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s" % keyword) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/tag.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri("https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s&page=%d" % (keyword, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def get_for_author(self, author): logger.info(lambda: "Fetching quotes from Goodreads for author=%s" % author) url = iri2uri("https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s" % author) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/search.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri("https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s&page=%d" % (author, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def get_for_keyword(self, keyword): logger.info(lambda: "Fetching quotes from Goodreads for keyword=%s" % keyword) url = iri2uri(u"https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s" % keyword) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/tag.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri(u"https://www.goodreads.com/quotes/tag?utf8=\u2713&id=%s&page=%d" % (keyword, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def get_for_author(self, author): logger.info(lambda: "Fetching quotes from Goodreads for author=%s" % author) url = iri2uri(u"https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s" % author) soup = Util.html_soup(url) page_links = list(Util.safe_map(int, [pagelink.contents[0] for pagelink in soup.find_all(href=re.compile('quotes/search.*page='))])) if page_links: page = random.randint(1, max(page_links)) url = iri2uri(u"https://www.goodreads.com/quotes/search?utf8=\u2713&q=%s&page=%d" % (author, page)) soup = Util.html_soup(url) return self.get_from_soup(url, soup)
def test_safe_map(self): def f(i): if i <= 10: raise Exception return i self.assertEquals([20,30], list(Util.safe_map(f, [1,5,20,10,30,4])))