def proxy(request): """ simple proxy to manage remote connexion via ajax""" url = request.values.get('url', None) host = host = url.split("/")[2] if host not in settings.ALLOWED_HOSTS: return send_json({'error': "host isn't allowed"}) if request.method == "POST" or request.method == "PUT": length = request.environ['CONTENT_LENGTH'] headers = { "Content-Type": os.environ["CONTENT_TYPE"], "Accept": os.environ["ACCEPT"] } body = input_stream.read() r = urllib2.Request(url, body, headers) y = urllib2.urlopen(r) else: headers = { "Content-Type": request.environ["CONTENT_TYPE"], "Accept": request.environ["HTTP_ACCEPT"] } r = urllib2.Request(url, headers=headers) y = urllib2.urlopen(r) i = y.info() if i.has_key("Content-Type"): content_type = i["Content-Type"] else: content_type = 'text/plain' resp = y.read() response = BCResponse(resp) response.content_type = content_type return response
def send_json(Body, etag=None): resp = BCResponse(json.dumps(Body)) resp.add_etag() resp.headers['content-type'] = 'application/json' return resp
def site_export(request, feedtype="atom"): def _zinfo(fname, date_time): zinfo = zipfile.ZipInfo() zinfo.filename = fname zinfo.compress_type = zipfile.ZIP_DEFLATED zinfo.date_time = date_time return zinfo pages = all_pages(request.site._id) if pages: pages.sort(lambda a,b: cmp(a.updated, b.updated)) if feedtype == "atom": feed = AtomFeed( title="%s: Latest changes" % request.site.title and request.site.title or request.site.cname, subtitle=request.site.subtitle, updated = pages[0].updated, feed_url = request.url ) for page in pages: _url = "%s%s" % (request.host_url, url_for("show_page", pagename=page.title.replace(' ', '_'))) feed.add(page.title, escape(page.content), updated=page.updated, url=_url, id=_url, author=page.title.replace(' ', '_') ) return feed.get_response() elif feedtype == "json": json = { 'title': "%s: Latest changes" % request.site.title and request.site.title or request.site.cname, 'subtitle': request.site.subtitle, 'updated':datetime_tojson(pages[0].updated), 'pages': [] } for page in pages: url = url_for("show_page", pagename=page.title.replace(' ', '_') ) json['pages'].append({ 'title': page.title, 'content': page.content, 'url': url, 'updated':datetime_tojson(page.updated), 'id':page.title.replace(' ', '_') }) return send_json(json) elif feedtype == "zip": pages = all_pages(request.site._id) zip_content = StringIO() zfile = zipfile.ZipFile(zip_content, "w", zipfile.ZIP_DEFLATED) import time, codecs for page in pages: zinfo = _zinfo("markdown/%s" % smart_str(page.title.replace(" ", "_")) + ".txt", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + page.content.encode('utf-8')) zinfo = _zinfo("%s" % smart_str(page.title.replace(" ", "_")) + ".html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export.html", page=page, request=request, pages=pages).encode( "utf-8" )) zinfo = _zinfo("index.html", time.localtime()[:6]) zfile.writestr(zinfo, codecs.BOM_UTF8 + render_template("page/export_index.html", pages=pages, request=request).encode( "utf-8" )) zfile.close() response = BCResponse(zip_content.getvalue()) response.headers['content-type'] = "application/x-zip-compressed" return response
def send_sioc(data): resp = BCResponse(data) resp.add_etag() resp.headers['content-type'] = 'application/rdf+xml' return resp