Example #1
0
def process_page(pages):

    for page in pages:
        pipelined.run(link_process, [page])
        pipelined.run(image_process, [page])
Example #2
0
    save_links,
]

source = [
    'http://onet.pl',
    'http://google.pl',
]
source = range(12)

#p = pipelined.Pipeline(web_process)
#p.feed(source)
#for r in p.run():
#    print(r)

#r = pipelined.run(web_process, source)  #, page_fetcher=fake_get_pages)
#print("FINISHED", r)

def process_numbers(stream):
    for num in stream:
        if num == 2:
            1/0
        yield num

exc_test = [
    process_numbers,
    sum,
]

r = pipelined.run(exc_test, range(10))
print(r)