def caching_fn(wrapped, instance, args, kwargs): index_name = utils.index_location_format % (repo_name) index = utils.get_index_from_store(utils.config_dir, index_name) if index: return index print "Downloading index for", repo_name index = wrapped(*args, **kwargs) utils.store_index(index, utils.config_dir, index_name) return index
def get_chapters(url, name): chapters_index_name = utils.index_location_format % re.sub( r'[ -/]', '_', name.lower()) chapters = utils.get_index_from_store(site_folder, chapters_index_name) if chapters: return {int(chapter): url for chapter, url in chapters.items()} soup = utils.get_parsed(site + url) div = soup.find(id='chapterlist') chapters = {} for link in div.find_all('a'): url = link.get('href') if url and url.startswith('/') and len(url) > 1: chapters[int(link.string.replace(name, '').strip())] = url utils.store_index(chapters, site_folder, chapters_index_name) return chapters