def diff_page(request=None, pagename=None): if pagename is None: pagename ='Home' page = get_page(request.site._id, pagename) if not page: if request.is_xhr: return send_json({'ok': False, 'reason': 'not found'}) raise NotFound diff = '' rev1 = rev2 = page revisions = request.values.getlist('r') if revisions and len(revisions) >=2: diff, rev1, rev2 = get_diff(page, revisions[0], revisions[1]) if request.is_xhr: return send_json({ 'ok': True, 'diff': render_template('page/diff_inc.html', diff=diff, rev1=rev1, rev2=rev2) }) all_revisions = [page] + page.revisions() # get all pages pages = all_pages(request.site._id) return render_response('page/diff.html', page=page, pages=pages, diff=diff, rev1=rev1, rev2=rev2, revisions=all_revisions)
def site_address(request): error = None if request.is_xhr: alias = request.values.get('alias') if alias is None: return send_json({ 'ok': False, 'error': u"alias is empty or length < 3" }) elif get_site(alias, True) and request.site.alias != alias or alias in FORBIDDEN_CNAME: return send_json({ 'ok': False, 'error': u"A site with this name has already been registered in couch.it" }) return send_json({ 'ok': True }) if request.method == "POST": alias = request.form.get('alias') if not alias or len(alias) <= 3: error = u"alias is empty or length < 3" elif not re_address.match(alias): error = u"Address name is invalid. It should only contain string and _ or -." elif get_site(alias, True) and request.site.alias != alias: error = u"A site with this name has already been registered in couch.it" else: site = get_site(request.site.cname) site.alias = alias site.save() request.site = site redirect_url = "http://%s.%s" % (site.alias, settings.SERVER_NAME) return redirect(redirect_url) return render_response('site/site_address.html', error=error)
def proxy(request): """ simple proxy to manage remote connexion via ajax""" url = request.values.get('url', None) host = host = url.split("/")[2] if host not in settings.ALLOWED_HOSTS: return send_json({'error': "host isn't allowed"}) if request.method == "POST" or request.method == "PUT": length = request.environ['CONTENT_LENGTH'] headers = { "Content-Type": os.environ["CONTENT_TYPE"], "Accept": os.environ["ACCEPT"] } body = input_stream.read() r = urllib2.Request(url, body, headers) y = urllib2.urlopen(r) else: headers = { "Content-Type": request.environ["CONTENT_TYPE"], "Accept": request.environ["HTTP_ACCEPT"] } r = urllib2.Request(url, headers=headers) y = urllib2.urlopen(r) i = y.info() if i.has_key("Content-Type"): content_type = i["Content-Type"] else: content_type = 'text/plain' resp = y.read() response = BCResponse(resp) response.content_type = content_type return response
def revisions_feed(request=None, pagename=None, feedtype="atom"): if pagename is None: pagename ='Home' page = get_page(request.site._id, pagename) if not page: raise NotFound all_revisions = [page] + page.revisions() if feedtype == "atom": feed = AtomFeed( title="%s: Latest revisions of %s" % (request.site.cname, page.title), subtitle=request.site.subtitle, updated = page.updated, feed_url = request.url ) for rev in all_revisions: title = '' _url="%s%s" % (request.host_url, url_for("revision_page", pagename=pagename, nb_revision=rev.nb_revision )) for change in rev.changes: if change['type'] != "unmod": title = "\n".join(change['changed']['lines']) title = do_truncate(do_striptags(title), 60) title = title and title or "Edited." feed.add(title, convert_markdown(rev.content), updated=rev.updated, url=_url, id=_url, author=rev.title.replace(' ', '_') ) return feed.get_response() else: json = { 'title': "%s: Latest revisions of %s" % (request.site.cname, page.title), 'subtitle': request.site.subtitle, 'updated':datetime_tojson(page.updated), 'feed_url': request.url, 'revisions': [] } for rev in all_revisions: title = '' for change in rev.changes: if change['type'] != "unmod": title = "\n".join(change['changed']['lines']) title = do_truncate(do_striptags(title), 60) title = title and title or "Edited." url = "%s%s" % (request.host_url, url_for("revision_page", cname=request.site.cname, pagename=pagename, nb_revision=rev.nb_revision )) json['revisions'].append({ 'title': title, 'content': rev.content, 'url': url, 'updated':datetime_tojson(rev.updated), 'id':rev.nb_revision }) return send_json(json)
def site_check_akismet(request, key): if request.site.alias: site_url = "http://%s.%s" % (request.site.alias, settings.SERVER_NAME) else: site_url = "http://%s/%s" % (settings.SERVER_NAME, request.site.cname) ak = Akismet(site_url, key) resp = res.verify_key() return send_json({"valid": resp})
def site_changes(request, feedtype=None): pages = all_pages(request.site._id) changes = get_changes(request.site._id) if feedtype == "atom": feed = AtomFeed( title="%s: Latest changes" % request.site.title and request.site.title or request.site.cname, subtitle=request.site.subtitle, updated = changes[0].updated, feed_url = request.url ) for rev in changes: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) feed.add(rev.title, convert_markdown(rev.content), updated=rev.updated, url=_url, id=_url, author=rev.title.replace(' ', '_') ) return feed.get_response() elif feedtype == 'json': json = { 'title': "%s: Latest changes" % request.site.title and request.site.title or request.site.cname, 'subtitle': request.site.subtitle, 'updated':datetime_tojson(changes[0].updated), 'feed_url': request.url, 'pages': [] } for rev in changes: url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) json['pages'].append({ 'title': rev.title, 'content': rev.content, 'url': url, 'updated':datetime_tojson(rev.updated), 'id':rev.title.replace(' ', '_') }) return send_json(json) elif feedtype == 'rdf': site_title = request.site.title and request.site.title or request.site.cname site_url = request.host_url if not local.site_url: site_url += local.site_url sioc = SiocWiki(site_url, site_title, datetime_tojson(request.site.created)) for rev in changes: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=rev.title.replace(' ', '_'))) sioc.add_page(rev.content, rev.title, _url, datetime_tojson(rev.updated)) return send_sioc(sioc.to_str()) return render_response('site/changes.html', changes=changes, pages=pages)
def site_settings(request): if request.is_xhr and request.method == "POST": data = json.loads(request.data) allow_javascript = data.get('allow_javascript', False) and True or False site = get_site(request.site.cname) site.title = data.get('title', site.title) site.subtitle = data.get('subtitle', site.subtitle) site.email = data.get('email', site.email) site.privacy = data.get('privacy', site.privacy) site.akismet_key = data.get('akismet_key', "") site.allow_javascript = allow_javascript site.save() request.site = site return send_json({ 'ok': True }) site_address = None if request.site.alias is not None and request.site.alias: site_address = "http://%s.%s" % (request.site.alias, settings.SERVER_NAME) # get all pages pages = all_pages(request.site._id) return render_response('site/settings.html', pages=pages, site_address=site_address)
def site_export(request, feedtype="atom"): def _zinfo(fname, date_time): zinfo = zipfile.ZipInfo() zinfo.filename = fname zinfo.compress_type = zipfile.ZIP_DEFLATED zinfo.date_time = date_time return zinfo pages = all_pages(request.site._id) if pages: pages.sort(lambda a,b: cmp(a.updated, b.updated)) if feedtype == "atom": feed = AtomFeed( title="%s: Latest changes" % request.site.title and request.site.title or request.site.cname, subtitle=request.site.subtitle, updated = pages[0].updated, feed_url = request.url ) for page in pages: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=page.title.replace(' ', '_'))) feed.add(page.title, escape(page.content), updated=page.updated, url=_url, id=_url, author=page.title.replace(' ', '_') ) return feed.get_response() elif feedtype == "json": json = { 'title': "%s: Latest changes" % request.site.title and request.site.title or request.site.cname, 'subtitle': request.site.subtitle, 'updated':datetime_tojson(pages[0].updated), 'pages': [] } for page in pages: url = url_for("show_page", pagename=page.title.replace(' ', '_') ) json['pages'].append({ 'title': page.title, 'content': page.content, 'url': url, 'updated':datetime_tojson(page.updated), 'id':page.title.replace(' ', '_') }) return send_json(json) elif feedtype == "zip": pages = all_pages(request.site._id) zip_content = StringIO() zfile = zipfile.ZipFile(zip_content, "w", zipfile.ZIP_DEFLATED) import time, codecs for page in pages: zinfo = _zinfo("markdown/%s" % smart_str(page.title.replace(" ", "_")) + ".txt", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + page.content.encode('utf-8')) zinfo = _zinfo("%s" % smart_str(page.title.replace(" ", "_")) + ".html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export.html", page=page, request=request, pages=pages).encode( "utf-8" )) zinfo = _zinfo("index.html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export_index.html", pages=pages, request=request).encode( "utf-8" )) zfile.close() response = BCResponse(zip_content.getvalue()) response.headers['content-type'] = "application/x-zip-compressed" return response
def edit_page(request, pagename=None): if pagename is None: pagename ='Home' page = get_page(request.site._id, pagename) if not page or page._id is None: page = Page( site=request.site._id, title=pagename.replace("_", " ") ) if request.is_xhr and request.method=="POST": error = "" data = json.loads(request.data) new_title = data.get('new_title') if new_title and new_title is not None: try: page.rename(new_title) redirect_url = url_for('show_page', pagename=new_title.replace(' ', '_')) return send_json({"ok": True, "redirect_url": redirect_url}) except PageExist: error = "A page already exist with this name" else: error = "An unexpected error happened, please contact administrator." else: error = u"New title is empty" return send_json({ "ok": False, "error": error }) if request.method == "POST": if 'new_title' in request.form: new_title = request.form['new_title'] try: page.rename(new_title) redirect_url = url_for('show_page', pagename=new_title.replace(' ', '_')) return redirect(redirect_url) except PageExist: error = "A page already exist with this name" else: error = "An unexpected error happened, please contact administrator." else: content = request.form.get('content', '') # check spam with akismet if request.site.akismet_key: if request.site.alias: site_url = "http://%s.%s" % (request.site.alias, settings.SERVER_NAME) else: site_url = "http://%s/%s" % (settings.SERVER_NAME, request.site.cname) ak = Akismet(site_url, request.site.akismet_key) try: is_spam = ak.comment_check(request.environ['REMOTE_ADDR'], request.environ['HTTP_USER_AGENT'], content) except: # fail silently is_spam = False else: is_spam = False page.content = content page.is_spam = is_spam # flag page page.user = UserInfos(ip=request.environ['REMOTE_ADDR'], ua=request.environ['HTTP_USER_AGENT']) page.save() if not page.is_spam and page.title != "Home": redirect_url = url_for('show_page', pagename=pagename) else: redirect_url = url_for('show_spam', pagename=pagename) return redirect(redirect_url) return redirect(url_for('show_page', pagename=pagename, error=error))