def execute_comments_for_user_rss(self, request, **kwargs): user = db.query(User).get(kwargs["id"]) content_item_ids_user_commented = db.query(distinct(Comment.content_item_id)).filter(Comment.identity_id.in_([identity.id for identity in user.identities])) rss = PyRSS2Gen.RSS2( title = config.build_title(u"Новые комментарии для %s" % (all_social_service[user.default_identity.service].get_user_name(user.default_identity.service_data))), link = config.url + request.path, description = "", lastBuildDate = datetime.now(), items = [ PyRSS2Gen.RSSItem( title = u"%(username)s - %(title)s" % { "username" : all_social_service[comment.identity.user.default_identity.service].get_user_name(comment.identity.user.default_identity.service_data), "title" : self._item_dict(comment.content_item)["title"], }, link = self._item_dict(comment.content_item)["url"] + "#comment-%d" % ( db.query(func.count(Comment)).filter(Comment.content_item == comment.content_item, Comment.created_at < comment.created_at).scalar() + 1 ), description = self._process_comment_text(comment.text), guid = PyRSS2Gen.Guid(self._item_dict(comment.content_item)["url"] + "#comment-%d" % ( db.query(func.count(Comment)).filter(Comment.content_item == comment.content_item, Comment.created_at < comment.created_at).scalar() + 1 )), pubDate = comment.created_at ) for comment in db.query(Comment).filter( Comment.content_item_id.in_(content_item_ids_user_commented), ~Comment.identity_id.in_([identity.id for identity in user.identities]) ).order_by(Comment.created_at.desc())[:50] ] ) rss_string = StringIO.StringIO() rss.write_xml(rss_string, "utf-8") return Response(rss_string.getvalue(), mimetype="application/rss+xml")
def execute_feed(self, request, **kwargs): feed = self.feeds[kwargs["feed"]] format = kwargs.get("format", "html") f = self.feeds[kwargs["feed"]]["url"] q = db.query(ContentItem).filter(ContentItem.parent_id == None, ContentItem.type.in_(feed["types"]), ContentItem.permissions_for(request.user)).options(subqueryload("children"), subqueryload("comments"), subqueryload("tags")) t = [] if "tag" in kwargs: tag = db.query(Tag).filter(Tag.url == kwargs["tag"]).first() if tag is None: raise NotFound() f += "/tag/" + kwargs["tag"] q = q.filter(ContentItem.id.in_([content_item.id for content_item in tag.content_items])) t.append(tag.title) if feed["rss_allow"]: rss_url = config.url + "/" + feed["url"] + "/rss/" if feed["url"] != "" else config.url + "/rss/" rss_title = config.build_title(feed["title"]) if format == "rss": items = q.order_by(ContentItem.created_at.desc())[:feed["rss_items"]] rss = PyRSS2Gen.RSS2( title = rss_title, link = rss_url, description = "", lastBuildDate = datetime.now(), items = [ PyRSS2Gen.RSSItem( title = item_dict["title"], link = item_dict["url"], description = item_dict["description"], guid = PyRSS2Gen.Guid(item_dict["url"]), pubDate = item.created_at ) for item_dict in [self._item_dict(item) for item in items] ] ) rss_string = StringIO.StringIO() rss.write_xml(rss_string, "utf-8") return Response(rss_string.getvalue(), mimetype="application/rss+xml") else: rss_url = None rss_title = None if format == "json": count = int(request.args.get("count", "100")) if "before" in request.args: q = q.filter(ContentItem.created_at < dateutil.parser.parse(request.args["before"])).order_by(ContentItem.created_at.desc()) elif "after" in request.args: q = q.filter(ContentItem.created_at > dateutil.parser.parse(request.args["after"])).order_by(ContentItem.created_at) else: q = q.order_by(ContentItem.created_at.desc()) items = q[:count] else: title = [feed["title"]] + t if "page" in kwargs: page = kwargs["page"] items = list(reversed(q.order_by(ContentItem.created_at)[(page - 1) * feed["per_page"] : page * feed["per_page"]])) if len(items) == 0: raise NotFound() items_skipped = q.filter(ContentItem.created_at > items[0].created_at).count() else: page = None items = q.order_by(ContentItem.created_at.desc())[:feed["per_page"]] items_skipped = 0 dates = [created_at for (created_at,) in q.order_by(ContentItem.created_at).values(ContentItem.created_at)] pages = list(reversed([ ( page, dates[page * feed["per_page"]], dates[min((page + 1) * feed["per_page"], len(dates)) - 1] ) for page in xrange(0, int(ceil(float(len(dates)) / feed["per_page"]))) ])) seasons = [] month2season = { 1 : "winter", 2 : "winter", 3 : "spring", 4 : "spring", 5 : "spring", 6 : "summer", 7 : "summer", 8 : "summer", 9 : "autumn", 10 : "autumn", 11 : "autumn", 12 : "winter" } for date in dates: if len(seasons) == 0 or seasons[-1][0] != month2season[date.month]: seasons.append((month2season[date.month], 1)) else: seasons[-1] = (seasons[-1][0], seasons[-1][1] + 1) seasons = list(reversed(seasons)) items_formatted = [self._render_item_preview(request, kwargs["feed"], "DESC", item) for item in items] if format == "json": return Response(simplejson.dumps(items_formatted), mimetype="application/json") else: return self.render_to_response(request, [ "content/feed/%s/feed.html" % (kwargs["feed"],), "content/feed/feed.html", ], **{ "breadcrumbs" : title, "rss_url" : rss_url, "rss_title" : rss_title, "body_class" : "feed " + kwargs["feed"], "feed" : kwargs["feed"], "feed_url" : f.lstrip("/"), "items" : "".join(items_formatted), "items_skipped" : items_skipped, "pagination" : { "page" : page, "pages" : pages, "seasons" : seasons, "url" : re.sub("/page/([0-9]+)/", "/", request.path), "per_page" : feed["per_page"], }, })