示例#1
0
def main(urls):
    for url in urls:
        print url
        scrapers = scrape_logic.build_scrapers(url)
        for s in scrapers:
            print s.debug_string()
示例#2
0
def scrape_entities_from_url(url, page_source=None, force_fetch_page=False,
        max_results=None, allow_expansion=True, for_guide=False):
    scrapers = scrape_logic.build_scrapers(url, page_source, force_fetch_page, for_guide=for_guide)
    scrapers = scrapers[:max_results] if max_results else scrapers
    return utils.parallelize(entity_from_scraper, [(scr, url) for scr in scrapers])