def scrapeShogunThreaded(): nut.initTitles() nut.initFiles() scrapeThreads = [] numThreads = 8 q = queue.Queue() for region in cdn.regions(): q.put(region) for i in range(numThreads): t = threading.Thread(target=scrapeShogunWorker, args=[q]) t.daemon = True t.start() scrapeThreads.append(t) q.join() for i in range(numThreads): q.put(None) for t in scrapeThreads: t.join() Titles.saveAll()
def scrapeShogun(): nut.initTitles() nut.initFiles() for region in cdn.regions(): cdn.Shogun.scrapeTitles(region) Titles.saveAll()
def scrapeShogun(force=False, region=None): initTitles() initFiles() if region is None: for region in cdn.regions(): cdn.Shogun.scrapeTitles(region, force=force) else: cdn.Shogun.scrapeTitles(region, force=force) Titles.saveAll()
def scrapeShogunThreaded(force=False): if not hasCdn: return initTitles() initFiles() scrapeThreads = [] numThreads = 8 if Config.reverse: q = queue.LifoQueue() else: q = queue.Queue() for region in cdn.regions(): q.put(region) for i in range(numThreads): t = threading.Thread(target=scrapeShogunWorker, args=[q, force]) t.daemon = True t.start() scrapeThreads.append(t) Print.info('joining shogun queue') q.join() for i in range(numThreads): q.put(None) i = 0 for t in scrapeThreads: i += 1 t.join() Print.info('joined thread %d of %d' % (i, len(scrapeThreads))) # q.join() Print.info('saving titles') Titles.saveAll() Print.info('titles saved')