def generate_feed(config_agent, req_path, tpl_render): folder_pages_full_path = config_agent.config.get("paths", "pages_path") cache_file_full_path = os.path.join(folder_pages_full_path, ".zw_all_pages_list_cache") buf = cache.get_all_pages_list_from_cache(config_agent) md_list = buf.split() author = config_agent.config.get("main", "maintainer_email") or "Anonymous" e_author = atom.Element(name="author") child = atom.Element(name="name", text=author) e_author.append_children(child) ts = os.stat(cache_file_full_path).st_ctime updated = atom.generate_updated(ts) ts_as_id = "timestamp:" + commons.strutils.md5(updated) feed = atom.Feed(author=e_author, id=ts_as_id, updated=updated, title="Testing Feed Output") for md_file_name in md_list[:100]: req_path = commons.strutils.rstrips(md_file_name, ".md") req_path = commons.strutils.rstrips(req_path, ".markdown") local_full_path = mdutils.req_path_to_local_full_path( req_path, folder_pages_full_path) raw_text = commons.shutils.cat(local_full_path) page_title = mdutils.get_title_by_file_path_in_md( folder_pages_full_path, req_path) static_file_prefix = static_file.get_static_file_prefix_by_local_full_path( config_agent=config_agent, local_full_path=local_full_path, req_path=req_path) view_settings = page.get_view_settings(config_agent) page_content = mdutils.md2html(config_agent=config_agent, req_path=req_path, text=raw_text, static_file_prefix=static_file_prefix, **view_settings) text = cgi.escape(commons.strutils.safestr(page_content)) e_content = atom.Element(name="content", text=text, type="html") if not page_title: continue hash_title_as_id = "md5:" + commons.strutils.md5(page_title) updated = atom.generate_updated(os.stat(local_full_path).st_ctime) entry = atom.Entry(id=hash_title_as_id, title=page_title, updated=updated, content=e_content) feed.append_children(entry) buf = str(feed) return buf
def generate_feed(config_agent, req_path, tpl_render): folder_pages_full_path = config_agent.config.get("paths", "pages_path") cache_file_full_path = os.path.join(folder_pages_full_path, ".zw_all_pages_list_cache") buf = cache.get_all_pages_list_from_cache(config_agent) md_list = buf.split() author = config_agent.config.get("main", "maintainer_email") or "Anonymous" e_author = atom.Element(name="author") child = atom.Element(name="name", text=author) e_author.append_children(child) ts = os.stat(cache_file_full_path).st_ctime updated = atom.generate_updated(ts) ts_as_id = "timestamp:" + commons.strutils.md5(updated) feed = atom.Feed(author=e_author, id=ts_as_id, updated=updated, title="Testing Feed Output") for md_file_name in md_list[:100]: req_path = commons.strutils.rstrips(md_file_name, ".md") req_path = commons.strutils.rstrips(req_path, ".markdown") local_full_path = mdutils.req_path_to_local_full_path(req_path, folder_pages_full_path) raw_text = commons.shutils.cat(local_full_path) page_title = mdutils.get_title_by_file_path_in_md(folder_pages_full_path, req_path) static_file_prefix = static_file.get_static_file_prefix_by_local_full_path( config_agent = config_agent, local_full_path = local_full_path, req_path = req_path) view_settings = page.get_view_settings(config_agent) page_content = mdutils.md2html(config_agent = config_agent, req_path = req_path, text = raw_text, static_file_prefix = static_file_prefix, **view_settings) text = cgi.escape(commons.strutils.safestr(page_content)) e_content = atom.Element(name="content", text=text, type="html") if not page_title: continue hash_title_as_id = "md5:" + commons.strutils.md5(page_title) updated = atom.generate_updated(os.stat(local_full_path).st_ctime) entry = atom.Entry(id=hash_title_as_id, title=page_title, updated=updated, content=e_content) feed.append_children(entry) buf = str(feed) return buf
def POST(self, req_path): inputs = web.input() if req_path == "~search": view_settings = page.get_view_settings(config_agent) keywords = inputs.get("k") keywords = web.utils.safestr(keywords) title = "Search %s" % keywords if keywords: limit = config_agent.config.getint("pagination", "search_page_limit") lines = search.search_by_filename_and_file_content(keywords, limit = limit) buf = mdutils.sequence_to_unorder_list(seq = lines, **view_settings) else: buf = None if buf: content = mdutils.md2html(config_agent = config_agent, req_path = req_path, text = buf, **view_settings) else: content = "matched not found" static_files = static_file.get_global_static_files(**view_settings) return tpl_render.search(static_files = static_files, title = title, keywords = keywords, content = content) elif req_path == "~settings": show_full_path = inputs.get("show_full_path") auto_toc = inputs.get("auto_toc") highlight_code = inputs.get("highlight_code") if show_full_path == "on": show_full_path = 1 else: show_full_path = 0 web.setcookie(name = "zw_show_full_path", value = show_full_path, expires = 31536000) if auto_toc == "on": auto_toc = 1 else: auto_toc = 0 web.setcookie(name = "zw_auto_toc", value = auto_toc, expires = 31536000) if highlight_code == "on": highlight_code = 1 else: highlight_code = 0 web.setcookie(name = "zw_highlight", value = highlight_code, expires = 31536000) latest_req_path = web.cookies().get("zw_latest_req_path") if latest_req_path and (latest_req_path not in consts.g_redirect_paths) and latest_req_path != "/": web.setcookie(name = "zw_latest_req_path", value = "", expires = -1) latest_req_path = "/" + latest_req_path else: latest_req_path = "/" web.seeother(latest_req_path) return elif req_path == "~new": real_req_path = inputs.get("path") fixed_req_path = web.lstrips(real_req_path, "/") content = inputs.get("content") content = web.utils.safestr(content) page.update_page_by_req_path(req_path = fixed_req_path, content = content) cache.update_recent_change_cache(config_agent) cache.update_all_pages_list_cache(config_agent) web.seeother(real_req_path) return else: raise web.NotFound()