def create_single(posts, index): """ Create a single html file for a single post. """ # actual post p = posts[index] post_str = html.block( "".join([ html.date(p["meta"]["datetime"]), html.h(2, p["meta"]["heading"]), html.p(p["html"]), html.author()]), "single") # links to previous and next posts prev_ls = [] if index > 0: prev_ls.append("next: " + html.post_link(posts[index-1])) if index < len(posts) - 1: prev_ls.append("previous: " + html.post_link(posts[index+1])) prev_str = html.block("<br>".join(prev_ls), "btw") # comments and comment form comments_str = "" for c in p["comments"]: comments_str += html.block( "".join([ html.p(markdown.markdown(c["text"])), html.comment_meta(c)])) comment_form_str = html.comment_form( post_id(p["meta"]), post_url(p["meta"])) # write to file file_contents = post_str+prev_str+comments_str+comment_form_str urlpath = str(p["meta"]["datetime"].year)+"/"+p["meta"]["url_heading"] write_out(urlpath+".html", html.render_front(file_contents, p["meta"]["heading"]))
def create_stats(p, start): """ Create a small compilation statistic page. """ write_out("stats.html", html.render_front( html.block("".join([ html.h(2, "Statistics"), html.pre("".join([ "last compile on ", dt.datetime.now().strftime("%A, %x %T"), "\n\nnumber of posts: ", str(len(p)), "\nrendering took ", str(time.clock()-start), " seconds"]))])), "statistics"))
def create_errors(): """ Create custom error pages. """ log_compile("creating error pages") content = { 401: ("401 Unauthorized", "Go away."), 403: ("403 Forbidden", "Yep, forbidden."), 404: ("404 Not Found", "Sorry."), 500: ("500 Internal Server Error", "Please try again later.")} for n, msg in content.items(): write_out(str(n)+".html", html.render_front( html.block(html.h(2, msg[0])+html.p(msg[1])), msg[0])) log_compile("done")
def html(): return h.h("my bookmarks",[ h.returntohome(), [[h.newtabopen(x[1],x[0]),h.space()] for x in bookmarks()],br, [[br,h.newtabopen(x,x)] for x in u.readfilenn("/home/umar/addedbookmarks")],br, "reddits",br, h.tabularize([h.newtabopen("http://reddit.com/r/"+"+".join(x[1]), x[0]) for x in reddits()],2),br, "google news searches",br, h.tabularize(googlenewsurls(),2), "google video searches",br, googlevideosearches(), h.returntohome() ])
def create_index(posts): """ Create the overview index.html file. """ log_compile("creating index page") o = "" for p in posts: o += html.block( "".join([ html.h(2, html.post_link(p)), html.hex_dump(p["text"]), html.pre( "-rw-r--r-- 1 rwos rwos " +str(len(p["text"]))+" " +html.date(p["meta"]["datetime"])+" " +p["meta"]["url_heading"][0:20]), html.pre(str(len(p["comments"]))+" comment(s)")])) # TODO: pagination, with configurable limit write_out("index.html", html.render_front(o)) log_compile("done")
def show_overview(): """ Show an overview over the blog. """ posts = psblog.get_all_posts() comments_num = 0 for p in posts: comments_num += len(p["comments"]) o = [html.h(2, "Overview"), html.p(html.a("?page=list", str(len(posts))+" Posts")), html.p(html.a("#TODO", str(comments_num)+" Comments")), html.p(html.a("?page=add_new", "Add New Post")), html.p(html.a("?page=compile", "Re-Compile"))] o = html.block("".join(o)) o += html.block(html.p("last compile log:"+html.pre( psblog.readfile(config.log_dir+"compile.log")))) log_ls = psblog.readfile(config.log_dir+"psblog.log").splitlines() log_ls.reverse() o += html.block(html.p("blog log:"+html.pre("\n".join(log_ls)))) print(html.render_admin(o))