def parse_feed_url(self, url): feed_parser = FeedParser(url) entry = '' try: for vt, info, entry in feed_parser.items(): if vt: self.video_types.append((vt, info)) if len(self.video_types) >= self.VIDEOS_LIMIT: self.video_limit_routreach = True break invalid_feed = False if hasattr(feed_parser.feed, 'version') and feed_parser.feed.version: try: self.feed_urls.append((url, entry and entry['link'])) except KeyError: invalid_feed = True else: invalid_feed = True if invalid_feed: raise forms.ValidationError(_(u'Sorry, we could not find a valid feed at the URL you provided. Please check the URL and try again.')) except FeedParserError, e: raise forms.ValidationError(e)
def _get_videos_from_query(self, query): uri = "http://gdata.youtube.com/feeds/api/videos?q=%s&v=2" uri = uri % query from videos.feed_parser import FeedParser urls = [] feed_parser = FeedParser(uri) _iter = feed_parser.items() for vt, info, entry in _iter: urls.append(vt.convert_to_video_url()) next_url = [ x for x in feed_parser.feed.feed.get('links', []) if x['rel'] == 'next' ] while next_url: url = next_url[0].href feed_parser = FeedParser(url) _iter = feed_parser.items() for vt, info, entry in _iter: urls.append(vt.convert_to_video_url()) self.log("%s videos loaded" % len(urls)) next_url = [ x for x in feed_parser.feed.feed.get('links', []) if x['rel'] == 'next' ] self._sleep() return VideoUrl.objects.select_related('video').filter(url__in=urls)