-
Notifications
You must be signed in to change notification settings - Fork 0
/
post_deploy.py
99 lines (82 loc) · 3.19 KB
/
post_deploy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# -*- coding:utf-8 -*-
import json
import datetime
import logging
from google.appengine.ext import deferred
import config
import models
import static
import utils
import generators
class PostRegenerator(object):
def __init__(self):
# 'seen' is a set of visited (entity class, entity) pairs.
self.seen = set()
def regenerate(self, batch_size=50, start_ts=None):
# Query all posts, sort by decreasing published property.
q = models.BlogPost.all().order('-published')
# Query only the posts published before 'start_ts' if provided.
# Should exclude drafts, which have a published time in the
# future.
q.filter('published <', start_ts or datetime.datetime.now())
# Fetch them by 'batch_size' (defaults to 50).
posts = q.fetch(batch_size)
for post in posts:
# Walk through the classes of dependencies and the entities
# to regenerate (all of them because we force 'regenerate=True'.
for generator_class, deps in post.get_deps(True):
# 'deps' is a set of entites to regenerate.
for dep in deps:
if (generator_class.__name__, dep) not in self.seen:
# Generate a series of warning in the logs, just to
# keep track of what has been regenerated.
logging.warn((generator_class.__name__, dep))
self.seen.add((generator_class.__name__, dep))
deferred.defer(generator_class.generate_resource, None, dep)
if len(posts) == batch_size:
deferred.defer(self.regenerate, batch_size, posts[-1].published)
class PageRegenerator(object):
def __init__(self):
self.seen = set()
def regenerate(self, batch_size=50, start_ts=None):
q = models.Page.all().order('-created')
q.filter('created <', start_ts or datetime.datetime.max)
pages = q.fetch(batch_size)
for page in pages:
deferred.defer(generators.PageContentGenerator.generate_resource,
page, None);
#page.put()
if len(pages) == batch_size:
deferred.defer(self.regenerate, batch_size, pages[-1].created)
post_deploy_tasks = []
def generate_static_pages(pages):
def generate():
for path, template, indexed in pages:
rendered = utils.render_template(template)
static.set(path, rendered, config.html_mime_type, indexed)
return generate
post_deploy_tasks.append(generate_static_pages([
# ('/search', 'search.html', True),
# ('/cse.xml', 'cse.xml', False),
('/robots.txt', 'robots.txt', False),
]))
def regenerate_all():
regen = PostRegenerator()
deferred.defer(regen.regenerate)
post_deploy_tasks.append(regenerate_all)
def post_deploy():
for task in post_deploy_tasks:
task()
def update_lastpost():
q = models.BlogPost.all().order('-published')
q.filter('published !=', datetime.datetime.max)# Filter drafts out
post = q.get()
postobj = {}
postobj['path'] = post.path
postobj['title'] = post.title
postobj['summary'] = utils.absolutify_url(post.summary)
postobj['tag_pairs'] = post.tag_pairs
postobj['pubdate'] = post.published.strftime('%B %d, %Y')
static.set('/lastpost.json', json.dumps(postobj, indent=4),
'text/plain; charset=utf-8', indexed=False,
last_modified=post.published_tz.replace(second=0, microsecond=0))