] link_process = [ find_links, fix_and_unique_urls, ] image_process = [ find_images, fix_and_unique_urls, ] web_process = [ #pipelined.from_input('page_fetcher'), fake_get_pages, pipelined.tee(link_process, image_process), pipelined.chunked(1), save_links, ] source = [ 'http://onet.pl', 'http://google.pl', ] source = range(12) #p = pipelined.Pipeline(web_process) #p.feed(source) #for r in p.run(): # print(r)
def call_FUT(self, pipelines, iterable): from pipelined import tee return tee(*pipelines)(iterable)