def render_feed(all_articles, category="", filter_langs=False): """Render the atom newsfeed""" _ = lambda s: s a = all_articles[:settings.get("FEED_LENGTH", len(all_articles))] author = settings.DEFAULTS['AUTHOR'] updated = datetime.utcnow().isoformat()[0:19] + "Z" link = (settings.URL + category).rstrip("/") + "/" title = settings.get("TITLE", "") if category.startswith("tag/"): title = _(u"Tag \u201C%(tag)s\u201D \u2014 %(title)s") % { 'tag': category[4:], 'title': title } elif category.startswith("archive/"): title = _(u"Archive for %(date)s \u2014 %(title)s") % { 'date': category[8:], 'title': title } elif category != "" and category in settings.CATEGORY: title = _(u"Category \u201C%(category)s\u201D \u2014 %(title)s") % { 'category': settings.CATEGORY[category]['title'], 'title': title } id = settings.URL + category nolang = True template_engine.render_template("_templates/feed.mako", category + "/feed.xml", **locals())
def make_index(self): """Generate an index of all files' contents""" type = settings.get("INDEX", False) if type: data = self.indexdata.copy() W = re.compile(r'\W+', re.U) for k in data: data[k] = list( set(W.split(re.sub(r'<.+?>', '', data[k].lower())))) data[k] = filter(lambda s: s and len(s) > 1, data[k]) if type in ("ALL", "JSON"): index = open(os.path.join(settings.BUILD_TARGET, "index.json"), "wb") index.write(json.dumps(data)) index.close() if type in ("ALL", "SQLITE"): sqlite3.enable_callback_tracebacks(settings.DEBUG) db = sqlite3.connect( os.path.join(settings.BUILD_TARGET, "index.sqlite")) cur = db.cursor() cur.execute('CREATE TABLE terms ( p, t )') for k in data: for i in data[k]: cur.execute('INSERT INTO terms (p, t) VALUES (?, ?)', (unicode(k), unicode(i))) cur.close() db.commit() db.close() return bool(type)
def render_feed(all_articles, category="", filter_langs=False): """Render the atom newsfeed""" _ = lambda s: s a = all_articles[:settings.get("FEED_LENGTH", len(all_articles))] author = settings.DEFAULTS['AUTHOR'] updated = datetime.utcnow().isoformat()[0:19]+"Z" link = (settings.URL+category).rstrip("/") + "/" title = settings.get("TITLE", "") if category.startswith("tag/"): title = _(u"Tag \u201C%(tag)s\u201D \u2014 %(title)s") % { 'tag': category[4:], 'title': title } elif category.startswith("archive/"): title = _(u"Archive for %(date)s \u2014 %(title)s") % { 'date': category[8:], 'title': title } elif category != "" and category in settings.CATEGORY: title = _(u"Category \u201C%(category)s\u201D \u2014 %(title)s") % { 'category': settings.CATEGORY[category]['title'], 'title': title } id = settings.URL + category nolang = True template_engine.render_template("_templates/feed.mako", category+"/feed.xml", **locals())
def __init__(self, path): """Initialize with path to article source""" path = path.lstrip("/") self.lexers.update(settings.get('LEXERS', {})) self.processed = False self.category = os.path.dirname(path).strip("/") self.url = Url(settings.get("ARTICLE_PATH", "") + path) l = filter(lambda s: s in settings.languages, self.url.get_extensions()) self.hard_language = None if len(l) == 1: self.hard_language = l[0] self.url.fix_language() f = open("_articles/%s" % path, 'r') head, content = f.read().replace("\r\n", "\n").split("\n\n", 1) f.close() self.headers = ArticleHeaders(head) self.raw_content = content.decode("utf-8") self.soup = BeautifulSoup(self.raw_content, fromEncoding="utf-8") self.complete_headers() self.process_content()
def copy_statics(): """Copy static files from project to final site folder""" shutil.rmtree(settings.BUILD_TARGET, True) copytree(".", settings.BUILD_TARGET, ignore=ignore_patterns("_*", ".*swp", ".git*", "*.mako", "Makefile", *settings.get("IGNORED_PATTERNS", [])))