link_process = [ find_links, fix_and_unique_urls, ] image_process = [ find_images, fix_and_unique_urls, ] web_process = [ #pipelined.from_input('page_fetcher'), fake_get_pages, pipelined.tee(link_process, image_process), pipelined.chunked(1), save_links, ] source = [ 'http://onet.pl', 'http://google.pl', ] source = range(12) #p = pipelined.Pipeline(web_process) #p.feed(source) #for r in p.run(): # print(r) #r = pipelined.run(web_process, source) #, page_fetcher=fake_get_pages)
def call_FUT(self, size, iterable): from pipelined import chunked stream = iter(iterable) return chunked(size)(stream)