def notify(header, body, cfg, link): fn = cfg['rss.file'] pickle_fn = fn + '.pickle' try: with open(pickle_fn, 'rb') as inp: previous = pickle.load(inp) except Exception as e: logging.error(e) previous = [] previous = previous[:max(10, len(previous))] feed = Feed( title=cfg['rss.title'], link='', description=cfg['rss.description'], language=u"en", ) previous.insert(0, Item(header, link, 'author', body)) for item in previous: feed.add_item(title=item.title, description=item.description, link=item.link, author_name=item.author_name, pubdate=item.pubdate) with open(cfg['rss.file'], 'wb') as out: feed.write(out, 'utf-8') with open(pickle_fn, 'wb') as p: pickle.dump(previous, p)
def feed_rss(self): q = db.session.query(Dataset) if not (c.account and c.account.admin): q = q.filter_by(private=False) feed_items = q.order_by(Dataset.created_at.desc()).limit(20) items = [] for feed_item in feed_items: items.append({ 'title': feed_item.label, 'pubdate': feed_item.updated_at, 'link': url(controller='dataset', action='view', dataset=feed_item.name, qualified=True), 'description': feed_item.description, 'author_name': ', '.join([person.fullname for person in feed_item.managers if person.fullname]), }) feed = Rss201rev2Feed(_('Recently Created Datasets'), url( controller='home', action='index', qualified=True), _('Recently created datasets in the OpenSpending Platform'), author_name='Openspending') for item in items: feed.add_item(**item) sio = StringIO() feed.write(sio, 'utf-8') response.content_type = 'application/xml' return sio.getvalue()
def public_journal_rss(self): """ Produce an rss2 feed via feedgenerator module """ c.following = self.sa.query(UserFollowing)\ .filter(UserFollowing.user_id == self.rhodecode_user.user_id)\ .options(joinedload(UserFollowing.follows_repository))\ .all() journal = self._get_journal_data(c.following) feed = Rss201rev2Feed(title=self.title % 'rss', link=url('public_journal_rss', qualified=True), description=_('Public journal'), language=self.language, ttl=self.ttl) for entry in journal[:self.feed_nr]: #tmpl = h.action_parser(entry)[0] action, action_extra = h.action_parser(entry, feed=True) title = "%s - %s %s" % (entry.user.short_contact, action, entry.repository.repo_name) desc = action_extra() feed.add_item(title=title, pubdate=entry.action_date, link=url('', qualified=True), author_email=entry.user.email, author_name=entry.user.full_contact, description=desc) response.content_type = feed.mime_type return feed.writeString('utf-8')
def rss(self, repo_name): """Produce an rss2 feed via feedgenerator module""" feed = Rss201rev2Feed(title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): desc_msg = [] desc_msg.append('%s - %s<br/><pre>' % (cs.author, cs.date)) desc_msg.append(self.__changes(cs)) feed.add_item( title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(desc_msg), ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def feed_rss(): q = db.session.query(Dataset) if not auth.account.is_admin(): q = q.filter_by(private=False) feed_items = q.order_by(Dataset.created_at.desc()).limit(20) items = [] for feed_item in feed_items: items.append({ 'title': feed_item.label, 'pubdate': feed_item.updated_at, 'link': '/datasets/%s' % feed_item.name, 'description': feed_item.description, 'author_name': ', '.join([ person.fullname for person in feed_item.managers if person.fullname ]), }) desc = gettext('Recently created datasets on %(site_title)s', site_title=current_app.config.get('SITE_TITLE')) feed = Rss201rev2Feed(gettext('Recently Created Datasets'), url_for('home.index'), desc) for item in items: feed.add_item(**item) sio = StringIO() feed.write(sio, 'utf-8') return Response(sio.getvalue(), mimetype='application/xml')
def rss_feed(events, name, link, description): rss = Feed(name, link.encode('utf-8'), description) def event_item(event): try: item_link = event.event.link_path(event, absolute=True) except: item_link = link rss.add_item(title=u"%s %s" % (event.user.name, formatting.as_unicode(event)), link=item_link.encode('utf-8'), pubdate=event.time, description=event.text(), author_name=event.user.name, unique_id=item_link) response.content_type = 'application/rss+xml' pager.NamedPager('rss', events, event_item, size=50).here() return rss.writeString('utf-8')
def make_feed(title, author='OffenesParlament.de', positionen=[], debatten=[], limit=10): items = [] for position in positionen: ablauf = position.ablauf items.append({ 'title': '[Drs] ' + position.typ + ': ' + position.ablauf.titel, 'pubdate': position.date, 'link': url_for('ablauf.view', wahlperiode=position.ablauf.wahlperiode, key=position.ablauf.key, _external=True) + '#' + position.key, 'description': position.ablauf.abstrakt }) for debatte in debatten: if debatte.nummer is None: continue items.append({ 'title': '[Rede] ' + debatte.titel, 'pubdate': debatte.sitzung.date, 'link': url_for('debatte', wahlperiode=debatte.sitzung.wahlperiode, nummer=debatte.sitzung.nummer, debatte=debatte.nummer, _external=True), 'description': debatte.text }) feed = Rss201rev2Feed(title, url_for('index', _external=True), 'Was passiert im Bundestag?', author_name=author) items = sorted(items, key=lambda i: i.get('pubdate').isoformat(), reverse=True) for item in items[:10]: feed.add_item(**item) sio = StringIO() feed.write(sio, 'utf-8') return Response(sio.getvalue(), status=200, mimetype='application/xml')
def _rss_feed(self, repos, public=True): journal = self._get_journal_data(repos) if public: _link = url('public_journal_atom', qualified=True) _desc = '%s %s %s' % (c.rhodecode_name, _('public journal'), 'rss feed') else: _link = url('journal_atom', qualified=True) _desc = '%s %s %s' % (c.rhodecode_name, _('journal'), 'rss feed') feed = Rss201rev2Feed(title=_desc, link=_link, description=_desc, language=self.language, ttl=self.ttl) for entry in journal[:self.feed_nr]: user = entry.user if user is None: #fix deleted users user = AttributeDict({ 'short_contact': entry.username, 'email': '', 'full_contact': '' }) action, action_extra, ico = h.action_parser(entry, feed=True) title = "%s - %s %s" % (user.short_contact, action(), entry.repository.repo_name) desc = action_extra() _url = None if entry.repository is not None: _url = url('changelog_home', repo_name=entry.repository.repo_name, qualified=True) feed.add_item(title=title, pubdate=entry.action_date, link=_url or url('', qualified=True), author_email=user.email, author_name=user.full_contact, description=desc) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _get_feed_from_cache(key): feed = Rss201rev2Feed( title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl ) for cs in reversed(list(c.rhodecode_repo[-self.feed_nr:])): feed.add_item(title=self._get_title(cs), link=url('changeset_home', repo_name=repo_name, revision=cs.raw_id, qualified=True), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _get_feed_from_cache(key, kind): feed = Rss201rev2Feed( title=_('%s %s feed') % (c.site_name, repo_name), link=h.canonical_url('summary_home', repo_name=repo_name), description=_('Changes on %s repository') % repo_name, language=language, ttl=ttl) rss_items_per_page = safe_int(CONFIG.get('rss_items_per_page', 20)) for cs in reversed( list(c.db_repo_scm_instance[-rss_items_per_page:])): feed.add_item( title=self._get_title(cs), link=h.canonical_url('changeset_home', repo_name=repo_name, revision=cs.raw_id), author_name=cs.author, description=''.join(self.__get_desc(cs)), pubdate=cs.date, ) response.content_type = feed.mime_type return feed.writeString('utf-8')
def _generate_feed(cache_key): feed = Rss201rev2Feed(title=self.title % repo_name, link=url('summary_home', repo_name=repo_name, qualified=True), description=self.description % repo_name, language=self.language, ttl=self.ttl) for commit in reversed(self._get_commits()): date = self._set_timezone(commit.date) feed.add_item( title=self._get_title(commit), author_name=commit.author, description=self._get_description(commit), link=url('changeset_home', repo_name=repo_name, revision=commit.raw_id, qualified=True), pubdate=date, ) return feed.mime_type, feed.writeString('utf-8')
def rss(self, watch, authid, uid, feedType): if not g.OPT.allowFeeds: abort(403) if not self.currentUserIsAuthorized(): user = User.getByUid(uid) if not user or not int(authid) == user.authid(): return redirect_to('boardBase') if user.isAdmin() and not checkAdminIP(): return redirect_to('boardBase') # enable static files downloading session['feedAuth'] = True session.save() self.setCookie() else: user = self.userInst self.userInst = user title = u'' descr = u'%s News Feed' % g.OPT.baseDomain posts = [] if re.compile("^\d+$").match(watch): watch = int(watch) thePost = Post.getPost(watch) if not thePost: abort(404) title = _(u"%s: thread #%d") % (g.OPT.title, watch) thread = Post.buildThreadFilter(user, thePost.id).first() if not thread: abort(404) replies = thread.filterReplies().all() posts = [thread] if replies: posts += replies else: title = _(u"%s: %s") % (g.OPT.title, watch) filter = Post.buildMetaboardFilter(watch, user)[0] tpp = user.threadsPerPage posts = filter.order_by(Post.bumpDate.desc())[0:tpp] feed = None link = h.url_for('feed', uid=uid, authid=authid, watch=watch, feedType=feedType) args = dict( title=title, link=link, description=descr, language=u"en", ) if feedType == 'rss': feed = Rss201rev2Feed(**args) response.content_type = 'application/rss+xml' else: feed = Atom1Feed(**args) response.content_type = 'application/atom+xml' for post in posts: parent = post.parentPost if not parent: parent = post parent.enableShortMessages = False title = None if not post.parentPost: post.replies = post.replyCount title = _(u"Thread #%d") % post.id else: post.replies = None title = _(u"#%d") % post.id descr = self.render('rssPost', 'std', thread=parent, post=post, disableFiltering=True) #.decode('utf-8') feed.add_item(title=title, link=h.url_for('thread', post=post.id), description=descr) out = feed.writeString('utf-8') #css = str(h.staticFile(g.OPT.styles[0] + ".css")) #out = out.replace('<?xml version="1.0" encoding="utf-8"?>', # '<?xml version="1.0" encoding="utf-8"?>\n<?xml-stylesheet type="text/css" href="%s"?>' % css) return out