def regenerate(): logging.info('regenerating site') contents=['index.html','2.html','3.html','4.html'] memcache.delete_multi(contents) try: db.delete(StaticContent.get_by_key_name(contents)) except Exception, err: pass
def scroll(page): path=str(page)+'.html' content=memcache.get(path) if content is None: content=StaticContent.get_by_key_name(path) if content is None: content=StaticContent(key_name=path, body=str(template('page.html')), content_type='text/html') content.put() memcache.set(path, content, 43200) return _output(content)
def scroll(page): path = str(page) + ".html" content = memcache.get(path) if content is None: content = StaticContent.get_by_key_name(path) if content is None: bottles = Bottled.gql("ORDER BY created DESC LIMIT 25 OFFSET " + str(page * 25 - 24)) content = StaticContent( key_name=path, body=str(template("page.html", bottles=bottles)), content_type="text/html" ) content.put() memcache.set(path, content, 43200) return _output(content)
def get_content(path): if path == "": path = "index.html" content = memcache.get(path) if content is None: content = StaticContent.get_by_key_name(path) if content is None: if path == "index.html": bottles = Bottled.gql("ORDER BY created DESC LIMIT 25") content = StaticContent( key_name=path, body=str(template("index.html", bottles=bottles)), content_type="text/html" ) content.put() else: return memcache.set(path, content, 43200) return _output(content)
def pages(thread): centipede=Centipede.get_by_key_name(thread['url']) d={} urls=[] logging.info(thread['url']) centipede_url_components=urlparse.urlparse(thread['url']) centipede_url_netloc_path=centipede_url_components.netloc + centipede_url_components.path for page in scrapemark.scrape(""" <div class="pages" id="pageDivTop"> {* <a href="{{ [pages] }}"></a> *} <span></span> </div> """, url=thread['url'])['pages'][:-1]: d[page]=1 if centipede is None: centipede=Centipede(key_name=thread['url'], species=db.Category(u'天涯经济'), author=thread['author'], title=thread['title'], comments=thread['comments'], views=thread['views'], pedes=[]) urls=[db.Link(thread['url'])] urls.extend([db.Link(key) for key in d.keys()[:-2]]) qr_key=centipede_url_components.netloc + '.'.join([centipede_url_components.path.split('.')[0],'png']) img=urlfetch.fetch('http://chart.apis.google.com/chart?cht=qr&chs=200x200&chl='+urllib2.quote(host_url+centipede_url_netloc_path)) qr_content=StaticContent(key_name=qr_key, body=img.content, content_type='image/png') qr_content.put() else: urls=[db.Link(centipede.next)] urls.extend([db.Link(url) for url in d.keys()[:-2] if url not in centipede.pedes]) logging.info(urls) centipede.pedes.extend(urls) centipede.next=db.Link(d.keys()[-2]) for url in urls: yield url centipede.put() content=StaticContent.get_by_key_name(centipede_url_netloc_path) stanzas=[stanza for stanza in new_stanzas(thread, centipede)] if content is None: content=StaticContent(key_name=centipede_url_netloc_path, template=db.Text(template('centipede.html', centipede=centipede, stanzas=stanzas, template_next=True)), content_type='text/html') else: content.template=db.Text(template(content.template, centipede=centipede, stanzas=stanzas, template_next=True)) # db.put(stanzas) content.put() memcache.delete(content.key().name())
def get_content(path): if path=='': path='index.html' # memcache.delete(path) content=memcache.get(path) if content is None: content=StaticContent.get_by_key_name(path) if path=='index.html': centipedes=Centipede.gql("ORDER BY modified DESC LIMIT 25") content=StaticContent(key_name=path, body=db.Text(template('index.html', centipedes=centipedes)).encode('utf8'), content_type='text/html') content.put() else: if content is None: return HTTPError(404, "Page not found") else: if content.template: content.template=db.Text(template(content.template, template_next=False, stanzas=[])) content.body= content.template.encode('utf8') # memcache.set(path, content, 10800) return _output(content)