elif crawler_class == Giphy: crawler_uris = {site: "https://api.giphy.com/v1/gifs/search?q=%s" % site for site in crawler_sites} if crawler_class_name not in crawlers: crawlers[crawler_class_name] = {} crawlers[crawler_class_name] = {site: crawler_class(crawler_uris[site], site) for site in crawler_uris} return crawlers, factors (sources, factors) = get_crawlers(config, "Sites") if not sources: raise Exception("no sources configured") if factors: Crawler.set_factors(factors) # wrapper function for cache filling def cache_fill_loop(): global sources while True: # fill cache up to min_cache_imgs per site info = Crawler.info() for crawler in sources: for site in sources[crawler]: key = crawler + "_" + site if key not in info["images_per_site"] or info["images_per_site"][key] < min_cache_imgs_before_refill: try: sources[crawler][site].crawl()