def bulk_import(parsed_feed): base_url = parsed_feed.feed.summary_detail.base feeds = [] while len(parsed_feed.entries): feeds.append(parsed_feed) parsed_feed = feedparser.parse('%s?page=%i' % (base_url, len(feeds) + 1)) # clear the count cache if parsed_feed.feed.link in _cached_video_count: del _cached_video_count[parsed_feed.feed.link] return util.join_feeds(feeds)
def bulk_import(parsed_feed): startindex = int(_opensearch_get(parsed_feed, 'startindex')) itemsperpage = int(_opensearch_get(parsed_feed, 'itemsperpage')) totalresults = int(_opensearch_get(parsed_feed, 'totalresults')) feeds = [] for i in range(startindex, max(totalresults, itemsperpage), itemsperpage): if '?' in parsed_feed.href: postfix = '&start-index=%i' % (i, ) else: postfix = '?start-index=%i' % (i, ) feeds.append(feedparser.parse(parsed_feed.href + postfix)) return util.join_feeds(feeds)
def bulk_import(parsed_feed): base_url = parsed_feed.feed.summary_detail.base feeds = [] while len(parsed_feed.entries): feeds.append(parsed_feed) parsed_feed = feedparser.parse( '%s?page=%i' % (base_url, len(feeds) + 1)) # clear the count cache if parsed_feed.feed.link in _cached_video_count: del _cached_video_count[parsed_feed.feed.link] return util.join_feeds(feeds)
def bulk_import(parsed_feed): startindex = int(_opensearch_get(parsed_feed, 'startindex')) itemsperpage = int(_opensearch_get(parsed_feed, 'itemsperpage')) totalresults = int(_opensearch_get(parsed_feed, 'totalresults')) feeds = [] for i in range(startindex, max(totalresults, itemsperpage), itemsperpage): if '?' in parsed_feed.href: postfix = '&start-index=%i' % (i,) else: postfix = '?start-index=%i' % (i,) feeds.append(feedparser.parse(parsed_feed.href + postfix)) return util.join_feeds(feeds)
def bulk_import(parsed_feed): url_list = bulk_import_url_list(parsed_feed) feeds = [feedparser.parse(url) for url in url_list] return util.join_feeds(feeds)