def preBuild(site): global CASES # Build all the cases for case in site.pages(): if case.path.startswith(DIR): # Skip non html cases for obvious reasons if not case.path.endswith('.html'): continue # Parse headers and markdown body headers, body = parsePost(case.data()) # Build a context for each post caseContext = Context() caseContext.update(headers) caseContext['raw_body'] = body caseContext['path'] = case.path caseContext['date'] = parseDate(headers.get('date') or headers.get('created')) caseContext['url'] = case.absolute_final_url caseContext['tags'] = headers.get('tags') and [h.strip() for h in headers['tags'].split(',')] or [] caseContext['category'] = headers.get('category') and [h.strip() for h in headers['category'].split(',')] or [] CASES.append(caseContext) # Sort the cases by date CASES = sorted(CASES, key=lambda x: x['date']) CASES.reverse() indexes = range(0, len(CASES)) for i in indexes: if i+1 in indexes: CASES[i]['prev_post'] = CASES[i+1] if i-1 in indexes: CASES[i]['next_post'] = CASES[i-1]
def preBuild(site): global EVENTS global UPCOMING_EVENTS global PAST_EVENTS for event in site.pages(): if event.path.startswith(DIR): # Skip non html posts for obvious reasons if not event.path.endswith('.html'): continue # Parse headers and markdown body headers, body = parsePost(event) ctx = Context() ctx.update(headers) ctx['raw_body'] = body ctx['path'] = event.path ctx['url'] = event.absolute_final_url ctx['date_from'] = parseDate(headers.get('date_from')) ctx['date_to'] = parseDate(headers.get('date_to')) ctx['tags'] = headers.get('tags') and [h.strip() for h in headers['tags'].split(',')] or [] EVENTS.append(ctx) # Sort the posts by date today = datetime.today() EVENTS = sorted(EVENTS, key=lambda x: x['date_from']) EVENTS.reverse() PAST_EVENTS= filter(lambda x: x['date_to'] < today, EVENTS) UPCOMING_EVENTS = filter(lambda x: x['date_to'] >= today, EVENTS) UPCOMING_EVENTS.reverse()
def preBuild(site): global EVENTS global UPCOMING_EVENTS global PAST_EVENTS for event in site.pages(): if event.path.startswith(DIR): # Skip non html posts for obvious reasons if not event.path.endswith(".html"): continue # Parse headers and markdown body headers, body = parsePost(event.data()) ctx = Context() ctx.update(headers) ctx["raw_body"] = body ctx["path"] = event.path ctx["url"] = event.absolute_final_url ctx["date_from"] = parseDate(headers.get("date_from")) ctx["date_to"] = parseDate(headers.get("date_to")) ctx["tags"] = headers.get("tags") and [h.strip() for h in headers["tags"].split(",")] or [] EVENTS.append(ctx) # Sort the posts by date today = datetime.today() EVENTS = sorted(EVENTS, key=lambda x: x["date_from"]) EVENTS.reverse() PAST_EVENTS = list(filter(lambda x: x["date_to"] < today, EVENTS)) UPCOMING_EVENTS = list(filter(lambda x: x["date_to"] >= today, EVENTS)) UPCOMING_EVENTS.reverse()
def test_parsePost(self): with self.assertRaises(AssertionError): utils.parsePost({}) utils.parsePost(None) body = u"this is an article about testing" self.assertIsInstance(utils.parsePost(u"post"), type(())) self.assertEqual(utils.parsePost(u"""hello: world\n\n{}""".format(body)), ({"hello": "world"}, body)) self.assertEqual(utils.parsePost(u"""hello: world\n{}""".format(body)), ({"hello": "world"}, "")) self.assertEqual(utils.parsePost(u"""\n{}\nhello:world""".format(body)), ({}, body + "\nhello:world"))
def preBuild(site): global POSTS global NEWS_JSON global DEVELOPER_NEWS_JSON global CONFIG conf = os.path.join(site.path, 'config.json') CONFIG = json.load(open(conf, 'r')) # Build all the posts for page in site.pages(): if page.path.startswith(DIR): # Skip non html posts for obvious reasons if not page.path.endswith('.html'): continue # Parse headers and markdown body headers, body = parsePost(page) # Build a context for each post postContext = Context() postContext.update(headers) postContext['raw_body'] = body postContext['path'] = page.path postContext['date'] = parseDate( headers.get('date') or headers.get('created')) postContext['url'] = page.absolute_final_url postContext['tags'] = headers.get('tags') and [ h.strip() for h in headers['tags'].split(',') ] or [] postContext['category'] = headers.get('category') and [ h.strip() for h in headers['category'].split(',') ] or [] POSTS.append(postContext) # Sort the posts by date POSTS = sorted(POSTS, key=lambda x: x['date']) POSTS.reverse() indexes = xrange(0, len(POSTS)) for i in indexes: if i + 1 in indexes: POSTS[i]['prev_post'] = POSTS[i + 1] if i - 1 in indexes: POSTS[i]['next_post'] = POSTS[i - 1] NEWS_JSON = toDict(CONFIG, filterPosts(POSTS, 'news')) DEVELOPER_NEWS_JSON = toDict(CONFIG, filterPosts(POSTS, 'developernews'))
def create_contexts(self, pages): contexts = [] for page in pages: # Parse headers and markdown body headers, body = parsePost(page.data()) # Build a context for each post ctx = Context() ctx.update(headers) ctx[Collection.CONTEXT_RAW_KEY] = body ctx['path'] = page.path ctx['date'] = Collection.to_datetime(headers) ctx['url'] = page.absolute_final_url for list_type in ['tags', 'category', 'topics']: ctx[list_type] = Collection.to_list(headers, list_type) contexts.append(ctx) return contexts
def preBuild(site): global POSTS global NEWS_JSON global DEVELOPER_NEWS_JSON global CONFIG conf = os.path.join(site.path, 'config.json') CONFIG = json.load(open(conf,'r')) # Build all the posts for page in site.pages(): if page.path.startswith(DIR): # Skip non html posts for obvious reasons if not page.path.endswith('.html'): continue # Parse headers and markdown body headers, body = parsePost(page) # Build a context for each post postContext = Context() postContext.update(headers) postContext['raw_body'] = body postContext['path'] = page.path postContext['date'] = parseDate(headers.get('date') or headers.get('created')) postContext['url'] = page.absolute_final_url postContext['tags'] = headers.get('tags') and [h.strip() for h in headers['tags'].split(',')] or [] postContext['category'] = headers.get('category') and [h.strip() for h in headers['category'].split(',')] or [] POSTS.append(postContext) # Sort the posts by date POSTS = sorted(POSTS, key=lambda x: x['date']) POSTS.reverse() indexes = xrange(0, len(POSTS)) for i in indexes: if i+1 in indexes: POSTS[i]['prev_post'] = POSTS[i+1] if i-1 in indexes: POSTS[i]['next_post'] = POSTS[i-1] NEWS_JSON = toDict(CONFIG, filterPosts(POSTS, 'news')) DEVELOPER_NEWS_JSON = toDict(CONFIG, filterPosts(POSTS, 'developernews'))
def test_parsePost(self): with self.assertRaises(AssertionError): utils.parsePost({}) utils.parsePost(None) body = u"this is an article about testing" self.assertIsInstance(utils.parsePost(u"post"), type(())) self.assertEqual( utils.parsePost(u"""hello: world\n\n{}""".format(body)), ({ "hello": "world" }, body)) self.assertEqual(utils.parsePost(u"""hello: world\n{}""".format(body)), ({ "hello": "world" }, '')) self.assertEqual( utils.parsePost(u"""\n{}\nhello:world""".format(body)), ({}, body + '\nhello:world'))
def preBuild(site): global EVENTS for event in site.pages(): if event.path.startswith(DIR): # Skip non html posts for obvious reasons if not event.path.endswith('.html'): continue # Parse headers and markdown body headers, body = parsePost(event) ctx = Context() ctx.update(headers) ctx['raw_body'] = body ctx['path'] = event.path ctx['url'] = event.absolute_final_url ctx['date_from'] = parseDate(headers.get('date_from')) ctx['date_to'] = parseDate(headers.get('date_to')) EVENTS.append(ctx) # Sort the posts by date EVENTS = sorted(EVENTS, key=lambda x: x['date_from']) EVENTS.reverse()