def get(self, slug): app = Entry.all().filter('slug =', slug).get() if app.packaged: self.render_template("install.html", url='/minifest/%s' % slug, packaged=True) else: self.render_template("install.html", url=app.url, packaged=False)
def get(self): urls = [] def addurl(loc, lastmod=None, changefreq=None, priority=None): url_info = { 'location': loc, 'lastmod': lastmod, 'changefreq': changefreq, 'priority': priority, } urls.append(url_info) addurl(g_blog.baseurl, changefreq='daily', priority=1) entries = Entry.all().filter('published =', True).order('-date').fetch(g_blog.sitemap_entries) for item in entries: loc = '%s/%s' % (g_blog.baseurl, item.link) addurl(loc, item.date, 'daily', 0.9) if g_blog.sitemap_include_category: cats = Category.all() for cat in cats: loc = '%s/category/%s' % (g_blog.baseurl, cat.slug) addurl(loc, None, 'weekly', 0.8) if g_blog.sitemap_include_tag: tags = Tag.all() for tag in tags: loc = '%s/tag/%s' % (g_blog.baseurl, urlencode(tag.tag)) addurl(loc, None, 'weekly', 0.8) self.response.headers['Content-Type'] = 'text/xml; charset=utf-8' self.render2('views/sitemap.xml', {'urlset': urls})
def action_updatelink(self): link_format = self.param('linkfmt') if link_format: link_format = link_format.strip() g_blog.link_format = link_format g_blog.save() for entry in Entry.all(): vals = { 'year': entry.date.year, 'month': str(entry.date.month).zfill(2), 'day': entry.date.day, 'postname': entry.slug, 'post_id': entry.post_id, } if entry.slug: newlink = link_format % vals else: newlink = '?p=%(post_id)s' % vals if entry.link != newlink: entry.link = newlink entry.put() self.write('"ok"') else: self.write('"Please input url format."')
def get(self, tags=None): entries = Entry.all().order('-date') cates = Category.all() tags = Tag.all() self.response.headers['Content-Type'] = 'binary/octet-stream' # 'application/atom+xml' self.render2('views/wordpress.xml', {'entries': entries, 'cates': cates, 'tags': tags})
def action_updatecomments(self): for entry in Entry.all(): cnt = entry.comments().count() if cnt != entry.commentcount: entry.commentcount = cnt entry.put() self.write('"ok"')
def get(self, tags=None): entries = Entry.all().filter('entrytype =', 'post').filter('published =', True).order('-date').fetch(20) if entries and entries[0]: last_updated = entries[0].date last_updated = last_updated.strftime('%Y-%m-%dT%H:%M:%SZ') for e in entries: e.formatted_date = e.date.strftime('%Y-%m-%dT%H:%M:%SZ') self.response.headers['Content-Type'] = 'application/atom+xml' self.render2('views/atom.xml', {'entries': entries, 'last_updated': last_updated})
def action_init_blog(self, slug=None): for com in Comment.all(): com.delete() for entry in Entry.all(): entry.delete() g_blog.entrycount = 0 self.write('"Init has succeed."')
def get(self, slug=None, postid=None): if postid: entries = Entry.all().filter("published =", True).filter("post_id =", postid).fetch(1) else: slug = urldecode(slug) entries = Entry.all().filter("published =", True).filter("link =", slug).fetch(1) if not entries or len(entries) == 0: return self.error(404) entry = entries[0] comments = Comment.all().filter("entry =", entry) commentuser = ["", "", ""] if entry.entrytype == "post": self.render( "single", { "entry": entry, "relateposts": entry.relateposts, "comments": comments, "user_name": commentuser[0], "user_email": commentuser[1], "user_url": commentuser[2], "checknum1": random.randint(1, 10), "checknum2": random.randint(1, 10), }, ) else: self.render( "page", { "entry": entry, "relateposts": entry.relateposts, "comments": comments, "user_name": commentuser[0], "user_email": commentuser[1], "user_url": commentuser[2], "checknum1": random.randint(1, 10), "checknum2": random.randint(1, 10), }, )
def get_recent_data(domain): key = '%s:recent' % domain data = memcache.get(key) if data is not None: return pickle.loads(data) else: # ~Two weeks of data. data = list(Entry.all().filter('domain =', urls[domain]) .order('-time') .run(limit=336)) memcache.add(key, pickle.dumps(data), time=3600) return data
def action_update_tags(self, slug=None): for tag in Tag.all(): tag.delete() for entry in Entry.all().filter('entrytype =', 'post'): if entry.tags: for t in entry.tags: try: logging.info('sss:' + t) Tag.add(t) except: traceback.print_exc() self.write('"All tags for entry have been updated."')
def get(self, slug): self.response.headers.add_header( "Content-type", "application/x-web-app-manifest+json") try: app = Entry.all().filter('slug =', slug).get() except Exception: self.response.write('{"error":"Not found."}') return try: blob_reader = blobstore.BlobReader(app.url) except Exception: self.response.write('{"error":"Could not retrieve package."}') return try: package = ZipFile(StringIO(blob_reader.read())) except Exception: self.response.write('{"error":"Could not retrieve package."}') return try: manifest = package.read("manifest.webapp") except Exception: self.response.write('{"error":"Could not open manifest."}') return try: unjsoned = json.loads(manifest) except Exception: self.response.write('{"error":"Could not parse manifest."}') return try: name = unjsoned["name"] except Exception: self.response.write('{"error":"Could not read app name."}') return try: version = unjsoned["version"] except Exception: self.response.write('{"error":"Could not read app version."}') return self.response.write(json.dumps({ "name": name, "package_path": "/serve/%s" % app.url, "version": version, }))
def get(self, slug=None): logging.info("browsing by tag %s" % slug) if not slug: self.error(404) return try: page_index = int(self.param('page')) except: page_index = 1 import urllib slug = urldecode(slug) entries = Entry.all().filter('tags =', slug).order('-date') (entries, links) = Pager(query=entries).fetch(page_index) self.render('tag', {'entries': entries, 'tag': slug, 'pager': links})
def get(self, number): offset = 10*(int(number)-1) entries_count = Entry.all(keys_only=True).count() if entries_count / 10 >= number: number = number+1 else: number = False entries = db.Query(Entry).filter('draft = ', False).order('-published').fetch(limit=10, offset=offset) for i in range(len(entries)): body = markdown.markdown(entries[i].body) entries[i].body = body path = os.path.join(template_dir, 'blog.html') data = {'entries': entries, 'sitetitle': 'The Staydecent™ Web Design & Development Blog', 'number':number} self.response.out.write(template.render(path, data))
def get(self, slug=None): if not slug: self.error(404) return try: page_index = int(self.param('page')) except: page_index = 1 slug = urllib.unquote(slug).decode('utf8') cats = Category.all().filter('slug =', slug).fetch(1) if cats: entries = Entry.all().filter('categorie_keys =', cats[0].key()).order('-date') (entries, links) = Pager(query=entries).fetch(page_index) self.render('category', {'entries': entries, 'category': cats[0], 'pager': links}) else: self.error(414, slug)
def initialize(self, request, response): try: BaseRequestHandler.initialize(self, request, response) m_pages = ( Entry.all() .filter("entrytype =", "page") .filter("published =", True) .filter("entry_parent =", 0) .order("menu_order") ) blogroll = Link.all().filter("linktype =", "blogroll") query = Entry.gql("WHERE entrytype = 'post' AND published = TRUE ORDER BY date") entries = query.fetch(1) start_date = end_date = None if entries: start_date = entries[0].date query = Entry.gql("WHERE entrytype = 'post' AND published = TRUE ORDER BY date DESC") entries = query.fetch(1) if entries: end_date = entries[0].date end_year = 0 if end_date: end_year = end_date.year self.template_vals.update( { "dates": self.build_dates(start_date, end_date), "end_year": end_year, "menu_pages": m_pages, "tags": Tag.all().order("tag"), "blogroll": blogroll, "recent_comments": Comment.all().order("-date").fetch(5), } ) logging.info("base public page initialized") except: logging.error(traceback.format_exc()) return self.error(404)
def posts(self): return Entry.all().filter('entrytype =', 'post').filter('categorie_keys =', self)
def wp_getPages(blogid, num): entries = Entry.all().filter('entrytype =', 'page').order('-date').fetch(min(num, 20)) return [entry_struct(entry) for entry in entries]
def metaWeblog_getRecentPosts(blogid, num): logging.info("getting %s posts of %s" % (str(num), str(blogid))) entries = Entry.all().filter('entrytype =', 'post').order('-date').fetch(min(num, 20)) return [entry_struct(entry) for entry in entries]