예제 #1
0
def crawl_all(saver=jdump,threads=4):
    m=Multiplexer(scrape,saver,threads=threads)
    m.start()
    seen=[]
    for term in xrange(1,8):
        for url, name in get_meps(term=term):
            if not url in seen:
                m.addjob(url)
                seen.append(url)
    m.finish()
    logger.info('end of crawl')
예제 #2
0
def crawl(urls, threads=4):
    m=Multiplexer(scrape,save, threads=threads)
    m.start()
    [m.addjob(url) for url, title in urls]
    m.finish()
    logger.info('end of crawl')
예제 #3
0
def crawler(meps,saver=jdump,threads=4):
    m=Multiplexer(scrape,saver,threads=threads)
    m.start()
    [m.addjob(mepid) for mepid in meps]
    m.finish()
    logger.info('end of crawl')
예제 #4
0
파일: oeil.py 프로젝트: jacob414/parltrack
def crawl(urls, threads=4):
    m = Multiplexer(scrape, save, threads=threads)
    m.start()
    [m.addjob(url) for url, title in urls]
    m.finish()
    logger.info('end of crawl')
예제 #5
0
def crawler(targets,saver=jdump,threads=4, term='7'):
    m=Multiplexer(scrape,saver,threads=threads)
    m.start()
    [m.addjob(url, data) for url, data in targets(term=term)]
    m.finish()
    logger.info('end of crawl')
예제 #6
0
def crawl(saver=jdump,threads=4):
    m=Multiplexer(scrape,saver,threads=threads)
    m.start()
    [m.addjob(url) for url, name in get_meps()]
    m.finish()
    logger.info('end of crawl')
예제 #7
0
def crawler(saver=jdump,threads=4):
    m=Multiplexer(scrape,saver,threads=threads)
    m.start()
    [m.addjob(url, data) for url, data in getComAgendas()]
    m.finish()
    logger.info('end of crawl')
예제 #8
0
파일: ep_meps.py 프로젝트: tttp/parltrack
def crawler(meps, saver=jdump, threads=4, term=current_term):
    m = Multiplexer(scrape, saver, threads=threads)
    m.start()
    [m.addjob(url, data) for url, data in meps(term=term)]
    m.finish()
    logger.info("end of crawl")
예제 #9
0
def crawler(meps, saver=jdump, threads=4, term=current_term):
    m = Multiplexer(scrape, saver, threads=threads)
    m.start()
    [m.addjob(url, data) for url, data in meps(term=term)]
    m.finish()
    logger.info('end of crawl')