def regenerate_all_pages_related_to_videos(video_ids): """Same as above, but on a list of videos""" paths_to_regenerate = set() # unique set for video_id in video_ids: for video_path in get_video_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union( generate_all_paths(path=video_path, base_path=topic_tools.get_topic_tree() ['path'])) # start at the root for exercise_path in get_exercise_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union( generate_all_paths(path=exercise_path, base_path=topic_tools.get_topic_tree() ['path'])) # start at the root # Now, regenerate any page. for path in paths_to_regenerate: create_cache(path=path, force=True) return paths_to_regenerate
def invalidate_all_pages_related_to_video(video_id=None): """Given a video file, recurse backwards up the hierarchy and invalidate all pages. Also include video pages and related exercise pages. """ # Expire all video files and related paths video_paths = get_video_page_paths(video_id=video_id) exercise_paths = get_exercise_page_paths(video_id=video_id) leaf_paths = set(video_paths).union(set(exercise_paths)) for leaf_path in leaf_paths: all_paths = generate_all_paths(path=leaf_path, base_path=topic_tools.get_topic_tree()['path']) for path in filter(has_cache_key, all_paths): # start at the root expire_page(path=path)
def invalidate_all_pages_related_to_video(video_id=None): """Given a video file, recurse backwards up the hierarchy and invalidate all pages. Also include video pages and related exercise pages. """ # Expire all video files and related paths video_paths = get_video_page_paths(video_id=video_id) exercise_paths = get_exercise_page_paths(video_id=video_id) leaf_paths = set(video_paths).union(set(exercise_paths)) for leaf_path in leaf_paths: all_paths = generate_all_paths( path=leaf_path, base_path=topic_tools.get_topic_tree()['path']) for path in filter(has_cache_key, all_paths): # start at the root expire_page(path=path)
from shared import topic_tools TOPICS = topic_tools.get_topic_tree() NODE_CACHE = topic_tools.get_node_cache() SLUG2ID_MAP = topic_tools.get_slug2id_map()
from shared import topic_tools TOPICS = topic_tools.get_topic_tree() NODE_CACHE = topic_tools.get_node_cache() SLUG2ID_MAP = topic_tools.get_slug2id_map()
def regenerate_all_pages_related_to_videos(video_ids): """Same as above, but on a list of videos""" paths_to_regenerate = set() # unique set for video_id in video_ids: for video_path in get_video_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=video_path, base_path=topic_tools.get_topic_tree()['path'])) # start at the root for exercise_path in get_exercise_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=exercise_path, base_path=topic_tools.get_topic_tree()['path'])) # start at the root # Now, regenerate any page. for path in paths_to_regenerate: create_cache(path=path, force=True) return paths_to_regenerate
def get_video_ids_for_topic(topic_id, topic_tree=None): """Nobody actually calls this, just for utility when using the shell.""" topic_tree = topic_tree or get_topic_tree() return utils.videos.get_video_ids_for_topic(topic_id, topic_tree)
def get_video_ids_for_topic(topic_id, topic_tree=None): """Nobody actually calls this, just for utility when using the shell.""" topic_tree = topic_tree or get_topic_tree() return utils.videos.get_video_ids_for_topic(topic_id, topic_tree)
def get_annotated_topic_tree(request): statusdict = dict(VideoFile.objects.values_list("youtube_id", "percent_complete")) return JsonResponse(annotate_topic_tree(get_topic_tree(), statusdict=statusdict))