def test_video_added_with_cache(self): """ Add a video in the filesystem, call videoscan to create the videofile object and cache items """ # Call videoscan, and validate. out = call_command("videoscan", auto_cache=True) self.assertEqual(VideoFile.objects.all().count(), 1, "Make sure there is now one VideoFile object.") self.assertEqual(VideoFile.objects.all()[0].youtube_id, self.youtube_id, "Make sure the video is the one we created.") self.assertTrue(self.get_num_cache_entries() > 0, "Check that cache is not empty.") cached_paths = topic_tools.get_video_page_paths(video_id=self.video_id) for path in cached_paths: self.assertTrue(caching.has_cache_key(path), "Check that cache has path %s" % path)
def invalidate_all_pages_related_to_video(video_id=None): """Given a video file, recurse backwards up the hierarchy and invalidate all pages. Also include video pages and related exercise pages. """ # Expire all video files and related paths video_paths = topic_tools.get_video_page_paths(video_id=video_id) exercise_paths = topic_tools.get_exercise_page_paths(video_id=video_id) leaf_paths = set(video_paths).union(set(exercise_paths)) for leaf_path in leaf_paths: all_paths = generate_all_paths(path=leaf_path, base_path=topic_tools.get_topic_tree()['path']) for path in filter(has_cache_key, all_paths): # start at the root expire_page(path=path)
def regenerate_all_pages_related_to_videos(video_ids): """Regenerate all webpages related to a specific list of videos. This is good for increasing new server performance.""" paths_to_regenerate = set() # unique set for video_id in video_ids: for video_path in topic_tools.get_video_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=video_path, base_path=topic_tools.get_topic_tree()['path'])) # start at the root for exercise_path in topic_tools.get_exercise_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union(generate_all_paths(path=exercise_path, base_path=topic_tools.get_topic_tree()['path'])) # start at the root # Now, regenerate any page. for path in paths_to_regenerate: create_cache_entry(path=path, force=True) return paths_to_regenerate
def invalidate_all_pages_related_to_video(video_id=None): """Given a video file, recurse backwards up the hierarchy and invalidate all pages. Also include video pages and related exercise pages. """ # Expire all video files and related paths video_paths = topic_tools.get_video_page_paths(video_id=video_id) exercise_paths = topic_tools.get_exercise_page_paths(video_id=video_id) leaf_paths = set(video_paths).union(set(exercise_paths)) for leaf_path in leaf_paths: all_paths = generate_all_paths( path=leaf_path, base_path=topic_tools.get_topic_tree()['path']) for path in filter(has_cache_key, all_paths): # start at the root expire_page(path=path)
def test_video_deleted_with_cache(self): """ Run videoscan to create cache items, then re-run to verify that the cache is cleared. """ out = call_command("videoscan", auto_cache=True) cached_paths = topic_tools.get_video_page_paths(video_id=self.video_id) for path in cached_paths: self.assertTrue(caching.has_cache_key(path), "Check that cache has path %s" % path) self.assertTrue(os.path.exists(self.fake_video_file), "Check that video file exists.") # Remove the video os.remove(self.fake_video_file) self.assertFalse(os.path.exists(self.fake_video_file), "Check that video file no longer exists.") # Call videoscan, and validate. out = call_command("videoscan") self.assertEqual(VideoFile.objects.all().count(), 0, "Make sure there are now no VideoFile objects.") for path in cached_paths: self.assertFalse(caching.has_cache_key(path), "Check that cache does NOT have path %s" % path)
def regenerate_all_pages_related_to_videos(video_ids): """Regenerate all webpages related to a specific list of videos. This is good for increasing new server performance.""" paths_to_regenerate = set() # unique set for video_id in video_ids: for video_path in topic_tools.get_video_page_paths(video_id=video_id): paths_to_regenerate = paths_to_regenerate.union( generate_all_paths(path=video_path, base_path=topic_tools.get_topic_tree() ['path'])) # start at the root for exercise_path in topic_tools.get_exercise_page_paths( video_id=video_id): paths_to_regenerate = paths_to_regenerate.union( generate_all_paths(path=exercise_path, base_path=topic_tools.get_topic_tree() ['path'])) # start at the root # Now, regenerate any page. for path in paths_to_regenerate: create_cache_entry(path=path, force=True) return paths_to_regenerate