def get_feed_from_url(url): """ Download a rss 2.0 feed and returns a class which represents that feed. :param url: URL of the feed to download. :return: Feed """ rss = get_rss_feed_parser(url) if not is_rss_feed_valid(rss): raise LookupError("Sorry! We cannot parse the rss feed. Missing required element(s).") feed = Feed() feed.title = rss["channel"]["title"] feed.link = rss["channel"]["link"] feed.description = rss["channel"]["description"] feed.image = FeedItemImage( url=rss["channel"]["image"]["url"], title=rss["channel"]["image"]["title"], link=rss["channel"]["image"]["link"], width=rss["channel"]["image"]["width"], height=rss["channel"]["image"]["height"], ) feed.items = [] for item in rss["items"]: feed.items.append( FeedItem( title=item["title"], link=item["link"], guid=item["guid"], pub_date=datetime.fromtimestamp(mktime(item["published_parsed"])), ) ) return feed
def FeedFromDict(dico, config): dico = normalize_feed_dict(dico) feed = Feed(dico['url']) feed.title = dico['title'] feed.link = dico['link'] feed.subtitle = dico['subtitle'] feed.author = dico['author'] feed.generator = dico['generator'] feed.encoding = dico['encoding'] feed.updated = dico['updated'] feed.entries_hash = dico['entries_hash'] feed.refresh_interval = config.default_refresh_interval feed.max_entries = config.default_max_entries feed.highlight_news = config.default_highlight_news feed.entries_per_page = config.default_entries_per_page feed.has_news = False return feed