def crawl_all(saver=jdump,threads=4): m=Multiplexer(scrape,saver,threads=threads) m.start() seen=[] for term in xrange(1,8): for url, name in get_meps(term=term): if not url in seen: m.addjob(url) seen.append(url) m.finish() logger.info('end of crawl')
def crawl(urls, threads=4): m=Multiplexer(scrape,save, threads=threads) m.start() [m.addjob(url) for url, title in urls] m.finish() logger.info('end of crawl')
def crawler(meps,saver=jdump,threads=4): m=Multiplexer(scrape,saver,threads=threads) m.start() [m.addjob(mepid) for mepid in meps] m.finish() logger.info('end of crawl')
def crawl(urls, threads=4): m = Multiplexer(scrape, save, threads=threads) m.start() [m.addjob(url) for url, title in urls] m.finish() logger.info('end of crawl')
def crawler(targets,saver=jdump,threads=4, term='7'): m=Multiplexer(scrape,saver,threads=threads) m.start() [m.addjob(url, data) for url, data in targets(term=term)] m.finish() logger.info('end of crawl')
def crawl(saver=jdump,threads=4): m=Multiplexer(scrape,saver,threads=threads) m.start() [m.addjob(url) for url, name in get_meps()] m.finish() logger.info('end of crawl')
def crawler(saver=jdump,threads=4): m=Multiplexer(scrape,saver,threads=threads) m.start() [m.addjob(url, data) for url, data in getComAgendas()] m.finish() logger.info('end of crawl')
def crawler(meps, saver=jdump, threads=4, term=current_term): m = Multiplexer(scrape, saver, threads=threads) m.start() [m.addjob(url, data) for url, data in meps(term=term)] m.finish() logger.info("end of crawl")
def crawler(meps, saver=jdump, threads=4, term=current_term): m = Multiplexer(scrape, saver, threads=threads) m.start() [m.addjob(url, data) for url, data in meps(term=term)] m.finish() logger.info('end of crawl')