def copy_assets(src, dist): source_dir = unify_joinpath(os.path.dirname(__file__), src) dist_dir = unify_joinpath(self._config.build_dir, dist) if not os.path.exists(source_dir): return if not os.path.exists(dist_dir): os.mkdir(dist_dir) copytree(source_dir, dist_dir)
def build_search_cache(self): """build search cache json """ def render_search_cache(post_list, page_list): router = self._router def strip(text): r = re.compile(r'<[^>]+>', re.S) return r.sub('', text) def gen_entry(content): entry = { "title": content.get_meta('title'), "date": str(content.get_meta('date')), "path": router.gen_permalink_by_content(content), "text": strip(content.parsed), "categories": [], "tags": [] } if (content.get_meta('layout') == 'post'): for cate in content.get_meta('categories'): entry['categories'].append({ "name": cate, "slug": cate, "permalink": router.gen_permalink('category', cate, 1) }) for tag in content.get_meta('tags'): entry['tags'].append({ "name": tag, "slug": tag, "permalink": router.gen_permalink('tag', tag, 1) }) return entry posts = [gen_entry(post) for post in post_list if not post.skip] pages = [gen_entry(page) for page in page_list if not page.skip] cache = json.dumps({"posts": posts, "pages": pages}) return cache cache_str = render_search_cache(self._posts, self._pages) search_cache_hash = gen_hash(cache_str) safe_write( unify_joinpath(self._config.build_dir, search_cache_hash + '.json'), cache_str) self._env.globals['search_cache_hash'] = search_cache_hash
def tr(str, locale="english"): """translation support translate str according to translation file """ global g_translation if g_translation is None: path = unify_joinpath( os.path.dirname(__file__) + '/locale', g_conf.language + ".json") g_translation = json.loads(safe_read(path) or '{}') return g_translation.get(str, str)