def get_playlist_videos(playlist_title): query = Playlist.all() query.filter('title =', playlist_title) playlist = query.get() query = VideoPlaylist.all() query.filter('playlist =', playlist) query.filter('live_association = ', True) query.order('video_position') videos = [] for pv in query.fetch(500): v = pv.video video_dict = { 'youtube_id': v.youtube_id, 'youtube_url': v.url, 'title': v.title, 'description': v.description, 'keywords': v.keywords, 'readable_id': v.readable_id, 'ka_url': "http://www.khanacademy.org/video/%s?playlist=%s" % (v.readable_id, urllib.quote_plus(playlist_title)), 'video_position': pv.video_position, 'views': v.views, } videos.append(video_dict) return json.dumps(videos, indent=4)
def video_title_dicts(): live_video_dict = {} for video_playlist in VideoPlaylist.all().filter('live_association = ', True): live_video_dict[VideoPlaylist.video.get_value_for_datastore(video_playlist)] = True live_videos = filter(lambda video: video.key() in live_video_dict, Video.all()) return map(lambda video: {"title": video.title, "url": "/video/%s" % video.readable_id}, live_videos)
def get(self): query = VideoPlaylist.all() all_video_playlists = query.fetch(200) video_playlists_to_delete = [] for video_playlist in all_video_playlists: if video_playlist.live_association != True: video_playlists_to_delete.append(video_playlist) db.delete(video_playlists_to_delete)
def commitLiveAssociations(self): association_generation = int(Setting.last_youtube_sync_generation_start()) video_playlists_to_put = [] for video_playlist in VideoPlaylist.all(): live = (video_playlist.last_live_association_generation >= association_generation) if video_playlist.live_association != live: video_playlist.live_association = live video_playlists_to_put.append(video_playlist) db.put(video_playlists_to_put)
def commitLiveAssociations(self): association_generation = int( Setting.last_youtube_sync_generation_start()) video_playlists_to_put = [] for video_playlist in VideoPlaylist.all(): live = (video_playlist.last_live_association_generation >= association_generation) if video_playlist.live_association != live: video_playlist.live_association = live video_playlists_to_put.append(video_playlist) db.put(video_playlists_to_put)
def copyTopicsToPlaylist(self): topic_list = Topic.get_content_topics() for topic in topic_list: playlist = Playlist.all().filter("title =", topic.standalone_title).get() if playlist: logging.info("Copying topic " + topic.standalone_title + " to playlist.") child_keys = topic.child_keys vps = VideoPlaylist.all().filter( "playlist =", playlist).order("video_position").fetch(10000) playlist_keys = [] for vp in vps: try: playlist_keys.append(vp.video.key()) except db.ReferencePropertyResolveError: logging.info( "Found reference to missing video in VideoPlaylist!" ) topic_keys = [ key for key in topic.child_keys if key.kind() == "Video" ] if playlist_keys == topic_keys: logging.info( "Child keys identical. No changes will be made.") else: # logging.info("PLAYLIST: " + repr([str(key) for key in playlist_keys])) # logging.info("TOPIC: " + repr([str(key) for key in topic_keys])) logging.info("Deleting old VideoPlaylists...") db.delete(vps) vps = [] for i, child_key in enumerate(topic.child_keys): if child_key.kind() == "Video": vps.append( VideoPlaylist(video=child_key, playlist=playlist, video_position=i, live_association=True)) logging.info("Creating new VideoPlaylists...") db.put(vps) else: logging.info("Playlist matching topic " + topic.standalone_title + " not found.")
def video_title_dicts(): live_video_dict = {} for video_playlist in VideoPlaylist.all().filter('live_association = ', True): live_video_dict[VideoPlaylist.video.get_value_for_datastore( video_playlist)] = True live_videos = filter(lambda video: video.key() in live_video_dict, Video.all()) return map( lambda video: { "title": video.title, "url": "/video/%s" % video.readable_id }, live_videos)
def get_playlist_videos_json(playlist_title): query = Playlist.all() query.filter('title =', playlist_title) playlist = query.get() video_query = Video.all() video_query.filter('playlists = ', playlist_title) video_key_dict = Video.get_dict(video_query, lambda video: video.key()) video_playlist_query = VideoPlaylist.all() video_playlist_query.filter('playlist =', playlist) video_playlist_query.filter('live_association =', True) video_playlist_key_dict = VideoPlaylist.get_key_dict(video_playlist_query) return json.dumps(get_playlist_video_api_dicts(playlist, video_key_dict, video_playlist_key_dict), indent=4)
def get_video_library_json_compressed(): playlist_api_dicts = [] playlists = get_all_topic_playlists() video_key_dict = Video.get_dict(Video.all(), lambda video: video.key()) video_playlist_query = VideoPlaylist.all() video_playlist_query.filter('live_association =', True) video_playlist_key_dict = VideoPlaylist.get_key_dict(video_playlist_query) for playlist in playlists: playlist_api_dict = JsonApiDict.playlist(playlist) playlist_api_dict["videos"] = get_playlist_video_api_dicts(playlist, video_key_dict, video_playlist_key_dict) playlist_api_dicts.append(playlist_api_dict) # We compress this huge json payload so it'll fit in memcache return zlib.compress(json.dumps(playlist_api_dicts, indent=4))
def get(self): playlist_title = "SAT Preparation" query = Playlist.all() query.filter('title =', playlist_title) playlist = query.get() query = VideoPlaylist.all() query.filter('playlist =', playlist) query.filter('live_association = ', True) #need to change this to true once I'm done with all of my hacks query.order('video_position') playlist_videos = query.fetch(500) template_values = { 'videos': playlist_videos, } self.render_jinja2_template('sat.html', template_values)
def get_video_library_json_compressed(): playlist_api_dicts = [] playlists = get_all_topic_playlists() video_key_dict = Video.get_dict(Video.all(), lambda video: video.key()) video_playlist_query = VideoPlaylist.all() video_playlist_query.filter('live_association =', True) video_playlist_key_dict = VideoPlaylist.get_key_dict(video_playlist_query) for playlist in playlists: playlist_api_dict = JsonApiDict.playlist(playlist) playlist_api_dict["videos"] = get_playlist_video_api_dicts( playlist, video_key_dict, video_playlist_key_dict) playlist_api_dicts.append(playlist_api_dict) # We compress this huge json payload so it'll fit in memcache return zlib.compress(json.dumps(playlist_api_dicts, indent=4))
def copyTopicsToPlaylist(self): topic_list = Topic.get_content_topics() for topic in topic_list: playlist = Playlist.all().filter("title =", topic.standalone_title).get() if playlist: logging.info("Copying topic " + topic.standalone_title + " to playlist.") child_keys = topic.child_keys vps = VideoPlaylist.all().filter("playlist =", playlist).order("video_position").fetch(10000) playlist_keys = [] for vp in vps: try: playlist_keys.append(vp.video.key()) except db.ReferencePropertyResolveError: logging.info("Found reference to missing video in VideoPlaylist!") topic_keys = [key for key in topic.child_keys if key.kind() == "Video"] if playlist_keys == topic_keys: logging.info("Child keys identical. No changes will be made.") else: # logging.info("PLAYLIST: " + repr([str(key) for key in playlist_keys])) # logging.info("TOPIC: " + repr([str(key) for key in topic_keys])) logging.info("Deleting old VideoPlaylists...") db.delete(vps) vps = [] for i, child_key in enumerate(topic.child_keys): if child_key.kind() == "Video": vps.append(VideoPlaylist( video=child_key, playlist=playlist, video_position=i, live_association = True )) logging.info("Creating new VideoPlaylists...") db.put(vps) else: logging.info("Playlist matching topic " + topic.standalone_title + " not found.")
def get_playlist_videos(playlist_title): query = Playlist.all() query.filter('title =', playlist_title) playlist = query.get() query = VideoPlaylist.all() query.filter('playlist =', playlist) query.filter('live_association = ', True) query.order('video_position') videos = [] for pv in query.fetch(500): v = pv.video video_dict = {'youtube_id': v.youtube_id, 'youtube_url': v.url, 'title': v.title, 'description': v.description, 'keywords': v.keywords, 'readable_id': v.readable_id, 'ka_url': "http://www.khanacademy.org/video/%s?playlist=%s" % (v.readable_id, urllib.quote_plus(playlist_title)), 'video_position': pv.video_position, 'views': v.views, } videos.append(video_dict) return json.dumps(videos, indent=4)
def library_content_html(): # No cache found -- regenerate HTML smart_history = getSmartHistoryContent() all_playlists = [] dict_videos = {} dict_videos_counted = {} dict_playlists = {} dict_playlists_by_title = {} dict_video_playlists = {} async_queries = [ Video.all(), Playlist.all(), VideoPlaylist.all().filter('live_association = ', True).order('video_position'), ] results = util.async_queries(async_queries) for video in results[0].get_result(): dict_videos[video.key()] = video for playlist in results[1].get_result(): dict_playlists[playlist.key()] = playlist if playlist.title in topics_list: dict_playlists_by_title[playlist.title] = playlist for video_playlist in results[2].get_result(): playlist_key = VideoPlaylist.playlist.get_value_for_datastore(video_playlist) video_key = VideoPlaylist.video.get_value_for_datastore(video_playlist) if dict_videos.has_key(video_key) and dict_playlists.has_key(playlist_key): video = dict_videos[video_key] playlist = dict_playlists[playlist_key] fast_video_playlist_dict = {"video":video, "playlist":playlist} if dict_video_playlists.has_key(playlist_key): dict_video_playlists[playlist_key].append(fast_video_playlist_dict) else: dict_video_playlists[playlist_key] = [fast_video_playlist_dict] if dict_playlists_by_title.has_key(playlist.title): # Only count videos in topics_list dict_videos_counted[video.youtube_id] = True # Update count of all distinct videos associated w/ a live playlist Setting.count_videos(len(dict_videos_counted.keys())) for topic in topics_list: if topic in dict_playlists_by_title: playlist = dict_playlists_by_title[topic] playlist_key = playlist.key() playlist_videos = dict_video_playlists.get(playlist_key) or [] if not playlist_videos: logging.error('Playlist %s has no videos!', playlist.title) playlist_data = { 'title': topic, 'topic': topic, 'playlist': playlist, 'videos': playlist_videos, 'next': None } all_playlists.append(playlist_data) playlist_data_prev = None for playlist_data in all_playlists: if playlist_data_prev: playlist_data_prev['next'] = playlist_data playlist_data_prev = playlist_data # Separating out the columns because the formatting is a little different on each column template_values = { 'App' : App, 'all_playlists': all_playlists, 'smart_history': smart_history, } html = shared_jinja.get().render_template("library_content_template.html", **template_values) # Set shared date of last generated content Setting.cached_library_content_date(str(datetime.datetime.now())) return html
def get(self): query = VideoPlaylist.all() all_video_playlists = query.fetch(100000) for video_playlist in all_video_playlists: video_playlist.live_association = False db.put(all_video_playlists)
def library_content_html(): # No cache found -- regenerate HTML smart_history = getSmartHistoryContent() all_playlists = [] dict_videos = {} dict_videos_counted = {} dict_playlists = {} dict_playlists_by_title = {} dict_video_playlists = {} async_queries = [ Video.all(), Playlist.all(), VideoPlaylist.all().filter('live_association = ', True).order('video_position'), ] results = util.async_queries(async_queries) for video in results[0].get_result(): dict_videos[video.key()] = video for playlist in results[1].get_result(): dict_playlists[playlist.key()] = playlist if playlist.title in topics_list: dict_playlists_by_title[playlist.title] = playlist for video_playlist in results[2].get_result(): playlist_key = VideoPlaylist.playlist.get_value_for_datastore( video_playlist) video_key = VideoPlaylist.video.get_value_for_datastore(video_playlist) if dict_videos.has_key(video_key) and dict_playlists.has_key( playlist_key): video = dict_videos[video_key] playlist = dict_playlists[playlist_key] fast_video_playlist_dict = {"video": video, "playlist": playlist} if dict_video_playlists.has_key(playlist_key): dict_video_playlists[playlist_key].append( fast_video_playlist_dict) else: dict_video_playlists[playlist_key] = [fast_video_playlist_dict] if dict_playlists_by_title.has_key(playlist.title): # Only count videos in topics_list dict_videos_counted[video.youtube_id] = True # Update count of all distinct videos associated w/ a live playlist Setting.count_videos(len(dict_videos_counted.keys())) for topic in topics_list: if topic in dict_playlists_by_title: playlist = dict_playlists_by_title[topic] playlist_key = playlist.key() playlist_videos = dict_video_playlists.get(playlist_key) or [] if not playlist_videos: logging.error('Playlist %s has no videos!', playlist.title) playlist_data = { 'title': topic, 'topic': topic, 'playlist': playlist, 'videos': playlist_videos, 'next': None } all_playlists.append(playlist_data) playlist_data_prev = None for playlist_data in all_playlists: if playlist_data_prev: playlist_data_prev['next'] = playlist_data playlist_data_prev = playlist_data # Separating out the columns because the formatting is a little different on each column template_values = { 'App': App, 'all_playlists': all_playlists, 'smart_history': smart_history, } html = shared_jinja.get().render_template("library_content_template.html", **template_values) # Set shared date of last generated content Setting.cached_library_content_date(str(datetime.datetime.now())) return html
def updateVideoAndPlaylistData(self): yt_service = gdata.youtube.service.YouTubeService() # Now that we run these queries from the App Engine servers, we need to # explicitly specify our developer_key to avoid being lumped together w/ rest of GAE and # throttled by YouTube's "Too many request" quota yt_service.developer_key = "AI39si6ctKTnSR_Vx7o7GpkpeSZAKa6xjbZz6WySzTvKVYRDAO7NHBVwofphk82oP-OSUwIZd0pOJyNuWK8bbOlqzJc9OFozrQ" yt_service.client_id = "n/a" video_youtube_id_dict = Video.get_dict(Video.all(), lambda video: video.youtube_id) video_playlist_key_dict = VideoPlaylist.get_key_dict( VideoPlaylist.all()) association_generation = int( Setting.last_youtube_sync_generation_start()) logging.info("Fetching playlists") playlist_start_index = 1 playlist_feed = yt_service.GetYouTubePlaylistFeed( uri= 'http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50' % playlist_start_index) while len(playlist_feed.entry) > 0: for playlist in playlist_feed.entry: logging.info("Playlist: %s", playlist.id.text) playlist_id = playlist.id.text.replace( 'http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists/', '') playlist_uri = playlist.id.text.replace( 'users/KhanAcademyHebrew/', '') query = Playlist.all() query.filter('youtube_id =', playlist_id) playlist_data = query.get() if not playlist_data: playlist_data = Playlist(youtube_id=playlist_id) logging.info('Creating Playlist: %s', playlist.title.text) playlist_data.url = playlist_uri playlist_data.title = playlist.title.text.decode("utf-8") playlist_data.description = playlist.description.text.decode( "utf-8") playlist_data.tags = [] for category in playlist.category: if "tags.cat" in category.scheme: playlist_data.tags.append(category.term) playlist_data.put() for i in range(0, 10): start_index = i * 50 + 1 video_feed = yt_service.GetYouTubePlaylistVideoFeed( uri=playlist_uri + '?start-index=' + str(start_index) + '&max-results=50') video_data_list = [] if len(video_feed.entry) <= 0: # No more videos in playlist break for video in video_feed.entry: if not video.media.player: logging.warning( "Could not parse video - skipping... (%s, %s)", video, video.media) continue video_id = cgi.parse_qs( urlparse( video.media.player.url).query)['v'][0].decode( 'utf-8') video_data = None if video_youtube_id_dict.has_key(video_id): video_data = video_youtube_id_dict[video_id] logging.info( 'Found Video: %s (%s)', video.media.title.text.decode('utf-8'), video_id) if not video_data: video_data = Video(youtube_id=video_id) logging.info( 'Creating Video: %s (%s)', video.media.title.text.decode('utf-8'), video_id) video_data.title = video.media.title.text.decode( 'utf-8') video_data.url = video.media.player.url.decode('utf-8') video_data.duration = int(video.media.duration.seconds) if video.statistics: video_data.views = int(video.statistics.view_count) if video.media.description.text is not None: video_data.description = video.media.description.text.decode( 'utf-8') else: video_data.decription = ' ' if video.media.keywords.text: video_data.keywords = video.media.keywords.text.decode( 'utf-8') else: video_data.keywords = '' video_data.position = video.position video_data_list.append(video_data) db.put(video_data_list) playlist_videos = [] for video_data in video_data_list: playlist_video = None if video_playlist_key_dict.has_key( playlist_data.key()): if video_playlist_key_dict[ playlist_data.key()].has_key( video_data.key()): playlist_video = video_playlist_key_dict[ playlist_data.key()][video_data.key()] if not playlist_video: playlist_video = VideoPlaylist( playlist=playlist_data.key(), video=video_data.key()) logging.info('Creating VideoPlaylist: %s, %s', playlist_data.title, video_data.title) else: logging.info('Updating VideoPlaylist: %s, %s', playlist_video.playlist.title, playlist_video.video.title) playlist_video.last_live_association_generation = association_generation playlist_video.video_position = int( video_data.position.text) playlist_videos.append(playlist_video) db.put(playlist_videos) # Check next set of playlists playlist_start_index += 50 playlist_feed = yt_service.GetYouTubePlaylistFeed( uri= 'http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50' % playlist_start_index)
def updateVideoAndPlaylistData(self): yt_service = YouTubeService() video_youtube_id_dict = Video.get_dict(Video.all(), lambda video: video.youtube_id) video_playlist_key_dict = VideoPlaylist.get_key_dict(VideoPlaylist.all()) association_generation = int(Setting.last_youtube_sync_generation_start()) logging.info("Fetching playlists") playlist_start_index = 1 playlist_feed = yt_service.GetYouTubePlaylistFeed( uri="http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50" % playlist_start_index ) while len(playlist_feed.entry) > 0: for playlist in playlist_feed.entry: logging.info("Playlist: %s", playlist.id.text) playlist_id = playlist.id.text.replace( "http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists/", "" ) playlist_uri = playlist.id.text.replace("users/KhanAcademyHebrew/", "") query = Playlist.all() query.filter("youtube_id =", playlist_id) playlist_data = query.get() if not playlist_data: playlist_data = Playlist(youtube_id=playlist_id) logging.info("Creating Playlist: %s", playlist.title.text) playlist_data.url = playlist_uri playlist_data.title = playlist.title.text.decode("utf-8") playlist_data.description = playlist.description.text.decode("utf-8") playlist_data.tags = [] for category in playlist.category: if "tags.cat" in category.scheme: playlist_data.tags.append(category.term) playlist_data.put() for i in range(0, 10): start_index = i * 50 + 1 video_feed = yt_service.GetYouTubePlaylistVideoFeed( uri=playlist_uri + "?start-index=" + str(start_index) + "&max-results=50" ) video_data_list = [] if len(video_feed.entry) <= 0: # No more videos in playlist break for video in video_feed.entry: if not video.media.player: logging.warning("Could not parse video - skipping... (%s, %s)", video, video.media) continue video_id = cgi.parse_qs(urlparse(video.media.player.url).query)["v"][0].decode("utf-8") video_data = None if video_youtube_id_dict.has_key(video_id): video_data = video_youtube_id_dict[video_id] logging.info("Found Video: %s (%s)", video.media.title.text.decode("utf-8"), video_id) if not video_data: video_data = Video(youtube_id=video_id) logging.info("Creating Video: %s (%s)", video.media.title.text.decode("utf-8"), video_id) video_data.title = video.media.title.text.decode("utf-8") video_data.url = video.media.player.url.decode("utf-8") video_data.duration = int(video.media.duration.seconds) if video.statistics: video_data.views = int(video.statistics.view_count) if video.media.description.text is not None: video_data.description = video.media.description.text.decode("utf-8") else: video_data.decription = " " if video.media.keywords.text: video_data.keywords = video.media.keywords.text.decode("utf-8") else: video_data.keywords = "" video_data.position = video.position video_data_list.append(video_data) db.put(video_data_list) playlist_videos = [] for video_data in video_data_list: playlist_video = None if video_playlist_key_dict.has_key(playlist_data.key()): if video_playlist_key_dict[playlist_data.key()].has_key(video_data.key()): playlist_video = video_playlist_key_dict[playlist_data.key()][video_data.key()] if not playlist_video: playlist_video = VideoPlaylist(playlist=playlist_data.key(), video=video_data.key()) logging.info("Creating VideoPlaylist: %s, %s", playlist_data.title, video_data.title) else: logging.info( "Updating VideoPlaylist: %s, %s", playlist_video.playlist.title, playlist_video.video.title, ) playlist_video.last_live_association_generation = association_generation playlist_video.video_position = int(video_data.position.text) playlist_videos.append(playlist_video) db.put(playlist_videos) # Check next set of playlists playlist_start_index += 50 playlist_feed = yt_service.GetYouTubePlaylistFeed( uri="http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50" % playlist_start_index )
def updateVideoAndPlaylistData(self): yt_service = gdata.youtube.service.YouTubeService() # Now that we run these queries from the App Engine servers, we need to # explicitly specify our developer_key to avoid being lumped together w/ rest of GAE and # throttled by YouTube's "Too many request" quota yt_service.developer_key = "AI39si6ctKTnSR_Vx7o7GpkpeSZAKa6xjbZz6WySzTvKVYRDAO7NHBVwofphk82oP-OSUwIZd0pOJyNuWK8bbOlqzJc9OFozrQ" yt_service.client_id = "n/a" video_youtube_id_dict = Video.get_dict(Video.all(), lambda video: video.youtube_id) video_playlist_key_dict = VideoPlaylist.get_key_dict(VideoPlaylist.all()) association_generation = int(Setting.last_youtube_sync_generation_start()) logging.info("Fetching playlists") playlist_start_index = 1 playlist_feed = yt_service.GetYouTubePlaylistFeed(uri='http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50' % playlist_start_index) while len(playlist_feed.entry) > 0: for playlist in playlist_feed.entry: logging.info("Playlist: %s", playlist.id.text) playlist_id = playlist.id.text.replace('http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists/', '') playlist_uri = playlist.id.text.replace('users/KhanAcademyHebrew/', '') query = Playlist.all() query.filter('youtube_id =', playlist_id) playlist_data = query.get() if not playlist_data: playlist_data = Playlist(youtube_id=playlist_id) logging.info('Creating Playlist: %s', playlist.title.text) playlist_data.url = playlist_uri playlist_data.title = playlist.title.text.decode("utf-8") playlist_data.description = playlist.description.text.decode("utf-8") playlist_data.tags = [] for category in playlist.category: if "tags.cat" in category.scheme: playlist_data.tags.append(category.term) playlist_data.put() for i in range(0, 10): start_index = i * 50 + 1 video_feed = yt_service.GetYouTubePlaylistVideoFeed(uri=playlist_uri + '?start-index=' + str(start_index) + '&max-results=50') video_data_list = [] if len(video_feed.entry) <= 0: # No more videos in playlist break for video in video_feed.entry: if not video.media.player: logging.warning("Could not parse video - skipping... (%s, %s)", video, video.media) continue video_id = cgi.parse_qs(urlparse(video.media.player.url).query)['v'][0].decode('utf-8') video_data = None if video_youtube_id_dict.has_key(video_id): video_data = video_youtube_id_dict[video_id] logging.info('Found Video: %s (%s)', video.media.title.text.decode('utf-8'), video_id) if not video_data: video_data = Video(youtube_id=video_id) logging.info('Creating Video: %s (%s)', video.media.title.text.decode('utf-8'), video_id) video_data.title = video.media.title.text.decode('utf-8') video_data.url = video.media.player.url.decode('utf-8') video_data.duration = int(video.media.duration.seconds) if video.statistics: video_data.views = int(video.statistics.view_count) if video.media.description.text is not None: video_data.description = video.media.description.text.decode('utf-8') else: video_data.decription = ' ' if video.media.keywords.text: video_data.keywords = video.media.keywords.text.decode('utf-8') else: video_data.keywords = '' video_data.position = video.position video_data_list.append(video_data) db.put(video_data_list) playlist_videos = [] for video_data in video_data_list: playlist_video = None if video_playlist_key_dict.has_key(playlist_data.key()): if video_playlist_key_dict[playlist_data.key()].has_key(video_data.key()): playlist_video = video_playlist_key_dict[playlist_data.key()][video_data.key()] if not playlist_video: playlist_video = VideoPlaylist(playlist=playlist_data.key(), video=video_data.key()) logging.info('Creating VideoPlaylist: %s, %s', playlist_data.title, video_data.title) else: logging.info('Updating VideoPlaylist: %s, %s', playlist_video.playlist.title, playlist_video.video.title) playlist_video.last_live_association_generation = association_generation playlist_video.video_position = int(video_data.position.text) playlist_videos.append(playlist_video) db.put(playlist_videos) # Check next set of playlists playlist_start_index += 50 playlist_feed = yt_service.GetYouTubePlaylistFeed(uri='http://gdata.youtube.com/feeds/api/users/KhanAcademyHebrew/playlists?start-index=%s&max-results=50' % playlist_start_index)