Ejemplo n.º 1
0
def crawl(start_url: str):
    # Use a breakpoint in the code line below to debug your script.
    print(f'start crawling {start_url}'
          )  # Press Strg+F8 to toggle the breakpoint.

    crawler_fetcher = crawler.ArticleFetcher(start_url)
    pages = crawler_fetcher.fetch__all__pages(
    )  # --> fetch_all _pages return list of list of articles ()

    counter = 0

    # flatten with generator-support):
    articles = (article for pg in pages for article in pg)

    for a in articles:

        counter += 1
        if counter == 8:
            break
        print(a.emoji + ": " + a.title)
import crawler

fetcher = crawler.ArticleFetcher()

for element in fetcher.fetch():
    print(element.emoji + ": " + element.title)