def write_body(target_file, limit): canceled = False todo = done = 0 filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat") for feed_source in feed.iter(filename): todo += 1 try: ok, result = feed.read(feed_source, limit) if not ok: print("Failed to read:", result) elif result is not None: print("Read {} at {}".format(feed_source.title, feed_source.url)) for item in result: target_file.write(item) done += 1 except KeyboardInterrupt: print("Cancelling...") canceled = True break return todo, done, canceled
def main(): limit, concurrency = handle_commandline() print("Starting...") filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat") futures = set() with concurrent.futures.ThreadPoolExecutor( max_workers=concurrency) as executor: # TODO Fix me! # BEGIN Write correct implementation here for feed_source in feed.iter(filename): future = executor.submit(feed.read, feed_source, limit) futures.add(future) done, filename, canceled = process(futures) if canceled: executor.shutdown() # END print("Read {}/{} feeds using {} threads{}".format(done, len(futures), concurrency, " [canceled]" if canceled else "")) print() if not canceled: webbrowser.open(filename)
def main(): limit, concurrency = handle_commandline() print("Starting...") filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat") futures = set() with concurrent.futures.ThreadPoolExecutor( max_workers=concurrency) as executor: # TODO Fix me! # BEGIN Write correct implementation here for feed_source in feed.iter(filename): future = executor.submit(feed.read, feed_source, limit) futures.add(future) done, filename, canceled = process(futures) if canceled: executor.shutdown() # END print("Read {}/{} feeds using {} threads{}".format( done, len(futures), concurrency, " [canceled]" if canceled else "")) print() if not canceled: webbrowser.open(filename)
def add_jobs(filename, jobs): for todo, feed_source in enumerate(feed.iter(filename), start=1): jobs.put(feed_source) return todo