def update_feed_details(self, ids): feeds = Feed.query.filter(Feed.id.in_(ids)).all() app.logger.info(u"Admin Updating Details for Feeds: {0}".format(u", ".join(map(str, feeds)))) for feed in feeds: feedinfo = find_feeds(feed.topic, get_feedinfo=True)[0] if feedinfo.url == feed.topic: print(vars(feedinfo)) feed.update_from_feedinfo(feedinfo) db.session.add(feed) db.session.commit() return
def search_feeds(): """ Search for feeds for a given list of urls """ urls = request.form.getlist('urls[]') print('Sent URLs: {0}'.format(urls)) app.logger.info('Searching for Feeds at Urls: {0}'.format(urls)) feeds = [] not_found = [] excluded = [] for url in urls: if not url: continue parsed = urllib.parse.urlparse(url) domain_root = parsed.netloc or parsed.path if domain_root in excluded_domains: app.logger.warning(u'Skipping Url: {0}, excluded domain' .format(url)) excluded.append(url) continue found = find_feeds(url, check_all=True, get_feedinfo=True) if not found: app.logger.info(u'No feeds found at url: {0}'.format(url)) not_found.append(url) continue app.logger.info(u'Found feeds at url {0}: {1}' .format(url, list(f.url for f in found))) for feedinfo in found: if not comment_regex.search( feedinfo.url) and feedinfo not in feeds: feed = Feed.query.filter_by(topic=feedinfo.url).first() if feed: app.logger.info('Updating Feed: {0} with info: {1}' .format(feed, feedinfo)) feed.update_from_feedinfo(feedinfo) db.session.add(feed) if feed.status == STATUS.EXCLUDED: continue if feed.status == STATUS.SUBSCRIBED: feedinfo.subscribed = True feeds.append(feedinfo) db.session.commit() json_feeds = [] if feeds: feed_info_schema = FeedInfoSchema(many=True) result = feed_info_schema.dump(feeds) print('Feeds: {0}'.format(feeds)) json_feeds = result.data session['feeds'] = json_feeds session['feed_urls'] = list(f.url for f in feeds) print('Session feeds: {0}'.format(session['feeds'])) print('Returning results') return jsonify({"feeds": json_feeds, "not_found": not_found, "excluded": excluded})