for directory in directories: if not os.path.exists(directory): logger.debug('making directory %s', directory) os.makedirs(directory) logger.info('Writing images to disk.') for filename, image_data in downloaded: with open(filename, 'wb') as outfile: outfile.write(image_data) if __name__ == '__main__': from common import CommonParser parser = CommonParser ( description='Scrapes images from the given 4chan links.' ) parser.add_argument ( 'link', nargs='+', help='boards/pages/threads, may either be full URLs or names like /g/' ) parser.add_argument ( '-o', '--output', metavar='directory', default='.', help='where to create the directory hierarchy, defaults to \'.\'' ) parser.add_argument (
pool.push(work, e) if not links: links = all_boards for link in map(classify, links): pool.push(work, link) pool.join() logger.info('Join complete.') if __name__ == '__main__': from common import CommonParser parser = CommonParser ( description='Builds the web cache.', epilog='if no links are given all of 4chan is scraped' ) parser.add_argument ( 'link', nargs='*', help='boards/pages/threads, may either be full URLs or names like /g/' ) args = parser.parse_args() if parser.sanity_check(args): exit(1) parser.pre_process(args) build_cache(*args.link) parser.post_process(args)
for directory in directories: if not os.path.exists(directory): logger.debug('making directory %s', directory) os.makedirs(directory) logger.info('Writing images to disk.') for filename, image_data in downloaded: with open(filename, 'w') as outfile: outfile.write(image_data) if __name__ == '__main__': from common import CommonParser parser = CommonParser ( description='Scrapes images from the given 4chan links.' ) parser.add_argument ( 'link', nargs='+', help='boards/pages/threads, may either be full URLs or names like /g/' ) parser.add_argument ( '-o', '--output', metavar='directory', default='.', help='where to create the directory hierarchy, defaults to \'.\'' ) parser.add_argument (