def revisions_feed(request=None, pagename=None, feedtype="atom"): if pagename is None: pagename ='Home' page = get_page(request.site._id, pagename) if not page: raise NotFound all_revisions = [page] + page.revisions() if feedtype == "atom": feed = AtomFeed( title="%s: Latest revisions of %s" % (request.site.cname, page.title), subtitle=request.site.subtitle, updated = page.updated, feed_url = request.url ) for rev in all_revisions: title = '' _url="%s%s" % (request.host_url, url_for("revision_page", pagename=pagename, nb_revision=rev.nb_revision )) for change in rev.changes: if change['type'] != "unmod": title = "\n".join(change['changed']['lines']) title = do_truncate(do_striptags(title), 60) title = title and title or "Edited." feed.add(title, convert_markdown(rev.content), updated=rev.updated, url=_url, id=_url, author=rev.title.replace(' ', '_') ) return feed.get_response() else: json = { 'title': "%s: Latest revisions of %s" % (request.site.cname, page.title), 'subtitle': request.site.subtitle, 'updated':datetime_tojson(page.updated), 'feed_url': request.url, 'revisions': [] } for rev in all_revisions: title = '' for change in rev.changes: if change['type'] != "unmod": title = "\n".join(change['changed']['lines']) title = do_truncate(do_striptags(title), 60) title = title and title or "Edited." url = "%s%s" % (request.host_url, url_for("revision_page", cname=request.site.cname, pagename=pagename, nb_revision=rev.nb_revision )) json['revisions'].append({ 'title': title, 'content': rev.content, 'url': url, 'updated':datetime_tojson(rev.updated), 'id':rev.nb_revision }) return send_json(json)
def site_changes(request, feedtype=None): pages = all_pages(request.site._id) changes = get_changes(request.site._id) if feedtype == "atom": feed = AtomFeed( title="%s: Latest changes" % request.site.title and request.site.title or request.site.cname, subtitle=request.site.subtitle, updated = changes[0].updated, feed_url = request.url ) for rev in changes: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) feed.add(rev.title, convert_markdown(rev.content), updated=rev.updated, url=_url, id=_url, author=rev.title.replace(' ', '_') ) return feed.get_response() elif feedtype == 'json': json = { 'title': "%s: Latest changes" % request.site.title and request.site.title or request.site.cname, 'subtitle': request.site.subtitle, 'updated':datetime_tojson(changes[0].updated), 'feed_url': request.url, 'pages': [] } for rev in changes: url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) json['pages'].append({ 'title': rev.title, 'content': rev.content, 'url': url, 'updated':datetime_tojson(rev.updated), 'id':rev.title.replace(' ', '_') }) return send_json(json) elif feedtype == 'rdf': site_title = request.site.title and request.site.title or request.site.cname site_url = request.host_url if not local.site_url: site_url += local.site_url sioc = SiocWiki(site_url, site_title, datetime_tojson(request.site.created)) for rev in changes: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) sioc.add_page(rev.content, rev.title, _url, datetime_tojson(rev.updated)) return send_sioc(sioc.to_str()) return render_response('site/changes.html', changes=changes, pages=pages)
def save(self, session): """ save session in couchdb database """ expire = datetime.now() + timedelta(seconds=settings.SESSION_COOKIE_AGE) try: db["session/%s" % session.sid] = { 'session_key':session.sid, 'session_data': _encode_session_data(dict(session)), 'expire_date': datetime_tojson(expire) } except: s = db["session/%s" % session.sid] s['session_data'] = _encode_session_data(dict(session)) s['expire_date'] = datetime_tojson(expire) db['session/%s' % session.sid] = s
def show_page(request=None, pagename=None): mimetypes = request.accept_mimetypes if pagename is None: pagename ='home' #pagename = pagename.replace(" ", "_") redirect_from = request.values.get('redirect_from', '') page = get_page(request.site._id, pagename) if not page or page._id is None: alias = AliasPage.get_alias(request.site._id, pagename) if alias is not None: page = Page.get(alias.page) return redirect(url_for('show_page', pagename=page.title.replace(' ', '_'), redirect_from=pagename)) if not page or page._id is None: if pagename.lower() in FORBIDDEN_PAGES: redirect_url = "%s?error=%s" % ( url_for('show_page', pagename='home'), u"Page name invalid." ) return redirect(redirect_url) page = Page( site=request.site._id, title=pagename.replace("_", " ") ) if mimeparse.best_match(['application/rdf+xml', 'text/xml', 'text/html'], request.headers['ACCEPT']) == "application/rdf+xml": site_title = request.site.title and request.site.title or request.site.cname site_url = request.host_url if not local.site_url: site_url += local.site_url sioc = SiocWiki(site_url, site_title, datetime_tojson(request.site.created)) sioc.add_page(page.content, page.title, request.url, datetime_tojson(page.updated)) return send_sioc(sioc.to_str()) # get all pages pages = all_pages(request.site._id) response = render_response('page/show.html', page=page, pages=pages, lexers=LEXERS_CHOICE, redirect_from=redirect_from) return response
def site_export(request, feedtype="atom"): def _zinfo(fname, date_time): zinfo = zipfile.ZipInfo() zinfo.filename = fname zinfo.compress_type = zipfile.ZIP_DEFLATED zinfo.date_time = date_time return zinfo pages = all_pages(request.site._id) if pages: pages.sort(lambda a,b: cmp(a.updated, b.updated)) if feedtype == "atom": feed = AtomFeed( title="%s: Latest changes" % request.site.title and request.site.title or request.site.cname, subtitle=request.site.subtitle, updated = pages[0].updated, feed_url = request.url ) for page in pages: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=page.title.replace(' ', '_'))) feed.add(page.title, escape(page.content), updated=page.updated, url=_url, id=_url, author=page.title.replace(' ', '_') ) return feed.get_response() elif feedtype == "json": json = { 'title': "%s: Latest changes" % request.site.title and request.site.title or request.site.cname, 'subtitle': request.site.subtitle, 'updated':datetime_tojson(pages[0].updated), 'pages': [] } for page in pages: url = url_for("show_page", pagename=page.title.replace(' ', '_') ) json['pages'].append({ 'title': page.title, 'content': page.content, 'url': url, 'updated':datetime_tojson(page.updated), 'id':page.title.replace(' ', '_') }) return send_json(json) elif feedtype == "zip": pages = all_pages(request.site._id) zip_content = StringIO() zfile = zipfile.ZipFile(zip_content, "w", zipfile.ZIP_DEFLATED) import time, codecs for page in pages: zinfo = _zinfo("markdown/%s" % smart_str(page.title.replace(" ", "_")) + ".txt", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + page.content.encode('utf-8')) zinfo = _zinfo("%s" % smart_str(page.title.replace(" ", "_")) + ".html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export.html", page=page, request=request, pages=pages).encode( "utf-8" )) zinfo = _zinfo("index.html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export_index.html", pages=pages, request=request).encode( "utf-8" )) zfile.close() response = BCResponse(zip_content.getvalue()) response.headers['content-type'] = "application/x-zip-compressed" return response