def GET(self,tags=None): try: all=self.param('all') except: all=False if(all): entries = Entry.all().order('-date') filename='micolog.%s.xml'%datetime.now().strftime('%Y-%m-%d') else: str_date_begin=self.param('date_begin') str_date_end=self.param('date_end') try: date_begin=datetime.strptime(str_date_begin,"%Y-%m-%d") date_end=datetime.strptime(str_date_end,"%Y-%m-%d") entries = Entry.all().filter('date >=',date_begin).filter('date <',date_end).order('-date') filename='micolog.%s.%s.xml'%(str(str_date_begin),str(str_date_end)) except: self.render2('views/admin/404.html') cates=Category.all() tags=Tag.all() self.response.headers['Content-Type'] = 'binary/octet-stream'#'application/atom+xml' self.response.headers['Content-Disposition'] = 'attachment; filename=%s'%filename self.render2('views/wordpress.xml',{'entries':entries,'cates':cates,'tags':tags})
def get(self): item_type = self.request.get('item') if item_type == "entry": object_list = Entry.all() elif item_type == "country": object_list = Country.all() else: item_type = None object_list = Entry.all() object_list = object_list.order('-publish_date') if users.get_current_user(): url = users.create_logout_url(self.request.uri) url_linktext = 'Logout' else: url = users.create_login_url(self.request.uri) url_linktext = 'Login' template_values = { 'object_list': object_list, 'item_type': item_type, 'url': url, 'url_linktext': url_linktext, } path = os.path.join(os.path.dirname(__file__), 'templates/admin/view.html') self.response.out.write(template.render(path, template_values))
def GET(self,slug=None,postid=None): if postid: postid = int(postid) entries = Entry.all().filter(published = True).filter(post_id = postid)[0:1]#.fetch(1) else: slug=urldecode(slug) entries = Entry.all().filter(published = True).filter(link = slug)[0:1]#.fetch(1) if not entries or len(entries) == 0: self.error(404) return mp=self.paramint("mp",1) entry=entries[0] if entry.is_external_page: self.redirect(entry.external_page_address,True) if self.blog.allow_pingback and entry.allow_trackback: self.response.headers['X-Pingback']="%s/rpc"%str(self.blog.baseurl) entry.readtimes += 1 entry.put() self.entry=entry comments=entry.get_comments_by_page(mp,self.blog.comments_per_page) ## commentuser=self.request.cookies.get('comment_user', '') ## if commentuser: ## commentuser=commentuser.split('#@#') ## else: commentuser=['','',''] comments_nav=self.get_comments_nav(mp,entry.purecomments().count()) if entry.entrytype=='post': self.render('single', { 'entry':entry, 'relateposts':entry.relateposts, 'comments':comments, 'user_name':commentuser[0], 'user_email':commentuser[1], 'user_url':commentuser[2], 'checknum1':random.randint(1,10), 'checknum2':random.randint(1,10), 'comments_nav':comments_nav, }) else: self.render('page', {'entry':entry, 'relateposts':entry.relateposts, 'comments':comments, 'user_name':commentuser[0], 'user_email':commentuser[1], 'user_url':commentuser[2], 'checknum1':random.randint(1,10), 'checknum2':random.randint(1,10), 'comments_nav':comments_nav, })
def doget(self,page): page=int(page) #entrycount=self.blog.postscount() entrycount=Entry.postscount() max_page = entrycount / self.blog.posts_per_page + ( entrycount % self.blog.posts_per_page and 1 or 0 ) if page < 1 or page > max_page: return self.error(404) offset_start = (page-1) * self.blog.posts_per_page offset_end = offset_start + self.blog.posts_per_page entries = Entry.all().filter(entrytype = 'post').\ filter(published = True).order_by('-date')[offset_start:offset_end]#.\ #fetch(self.blog.posts_per_page, offset = (page-1) * self.blog.posts_per_page) #import pdb; pdb.set_trace() show_prev =entries and (not (page == 1)) #show_prev = True show_next =entries and (not (page == max_page)) #show_next = True #print page,max_page,self.blog.entrycount,self.blog.posts_per_page self.render('index',{'entries':entries, 'show_prev' : show_prev, 'show_next' : show_next, 'pageindex':page, 'ishome':True, 'pagecount':max_page, 'postscounts':entrycount })
def action_updatecomments(self): for entry in Entry.all(): cnt=entry.comments().count() if cnt<>entry.commentcount: entry.commentcount=cnt entry.put() self.write(_('"All comments updated"'))
def GET(self,tags=None): urls = [] def addurl(loc,lastmod=None,changefreq=None,priority=None): url_info = { 'location': loc, 'lastmod': lastmod, 'changefreq': changefreq, 'priority': priority } urls.append(url_info) addurl(self.blog.baseurl,changefreq='daily',priority=0.9 ) entries = Entry.all().filter(published = True).order_by('-date')[0:self.blog.sitemap_entries]#.fetch(self.blog.sitemap_entries) for item in entries: loc = "%s/%s" % (self.blog.baseurl, item.link) addurl(loc,item.mod_date or item.date,'never',0.6) if self.blog.sitemap_include_category: cats=Category.all() for cat in cats: loc="%s/category/%s"%(self.blog.baseurl,cat.slug) addurl(loc,None,'weekly',0.5) if self.blog.sitemap_include_tag: tags=Tag.all() for tag in tags: loc="%s/tag/%s"%(self.blog.baseurl, urlencode(tag.tag)) addurl(loc,None,'weekly',0.5) ## self.response.headers['Content-Type'] = 'application/atom+xml' self.render('/admin/views/sitemap.xml', {'urlset':urls}, content_type='text/xml')#, content_type='application/xhtml+xml')
def GET(self,tags=None): entries = Entry.all().filter(entrytype = 'post').filter(published = True).order_by('-date')[0:10]#.fetch(10) if entries and entries[0]: last_updated = entries[0].date last_updated = last_updated.strftime("%a, %d %b %Y %H:%M:%S +0000") for e in entries: e.formatted_date = e.date.strftime("%a, %d %b %Y %H:%M:%S +0000") self.response.headers['Content-Type'] = 'application/rss+xml; charset=utf-8' self.render2('/admin/views/rss.xml',{'entries':entries,'last_updated':last_updated})
def action_init_blog(self,slug=None): for com in Comment.all(): com.delete() for entry in Entry.all(): entry.delete() self.blog.entrycount=0 self.blog.save() self.write(_('"Init has succeed."'))
def clean_gae(): query = Entry.all() entries = query.fetch(1000) db.delete(entries) query = Country.all() countries = query.fetch(1000) db.delete(countries) query = Tag.all() tags = query.fetch(1000) db.delete(tags)
def GET(self): #super(MainPage, self).initialize(request) params = {} page = 1 entries = Entry.all().filter('entrytype =','post').\ filter("published =", True).order('-date').\ fetch(self.blog.posts_per_page, offset = (page-1) * self.blog.posts_per_page) for entry in entries: entry.link = '%s/%s' % (self.app_context.current_app, entry.link) params['entries'] = entries return self.render('index.html', params)
def clean_gae(): query = Entry.all() entries =query.fetch(1000) db.delete(entries) query = Country.all() countries = query.fetch(1000) db.delete(countries) query = Tag.all() tags = query.fetch(1000) db.delete(tags)
def action_update_tags(self,slug=None): for tag in Tag.all(): tag.delete() for entry in Entry.all().filter('entrytype =','post'): if entry.tags: for t in entry.tags: try: Tag.add(t) except: traceback.print_exc() self.write(_('"All tags for entry have been updated."'))
def GET(self,slug=None): if not slug: self.error(404) return try: page_index=int (self.param('page')) except: page_index=1 import urllib slug=urldecode(slug) entries=Entry.all().filter(published = True).filter(tags = slug).order_by("-date") entries,links=Pager(query=entries,items_per_page=20).fetch(page_index) self.render('tag',{'entries':entries,'tag':slug,'pager':links})
def initialize(self, request): m_pages=Entry.all().filter('entrytype =','page')\ .filter('published =',True)\ .filter('entry_parent =',0)\ .order('menu_order') blogroll=Link.all().filter('linktype =','blogroll') archives=Archive.all().order('-year').order('-month').fetch(12) alltags=Tag.all() self.template_vals.update({ 'menu_pages':m_pages, 'categories':Category.all(), 'blogroll':blogroll, 'archives':archives, 'alltags':alltags, 'recent_comments':Comment.all().order('-date').fetch(5) })
def GET(self,slug=None): if not slug: self.error(404) return try: page_index=int(self.param('page')) except: page_index=1 slug=urldecode(slug) cats=Category.all().filter(slug = slug)[0:1]#.fetch(1) if cats: entries=Entry.all().filter(published = True).filter(categorie_keys = cats[0].key()).order_by("-date") entries,links=Pager(query=entries,items_per_page=20).fetch(page_index) self.render('category',{'entries':entries,'category':cats[0],'pager':links}) else: self.error(404,slug)
def initialize(self, request): BaseRequestHandler.initialize(self,request) m_pages=Entry.all().filter(entrytype = 'page') \ .filter(published = True)\ .filter(entry_parent = 0)\ .order_by('menu_order') blogroll=Link.all().filter(linktype = 'blogroll') #archives=Archive.all().order_by('-year').order_by('-month').fetch(12) archives=Archive.all().order_by('-year', '-month')[0:12] alltags=Tag.all() self.template_vals.update({ 'menu_pages':m_pages, 'categories':Category.all(), 'blogroll':blogroll, 'archives':archives, 'alltags':alltags, 'recent_comments':Comment.all().order_by('-date')[0:5]#.fetch(5) })
def GET(self,slug='post'): try: page_index=int(self.param('page')) except: page_index=1 entries=Entry.all().filter('entrytype =',slug).order('-date') entries,links=Pager(query=entries,items_per_page=15).fetch(page_index) self.render2('views/admin/'+slug+'s.html', { 'current':slug+'s', 'entries':entries, 'pager':links } )
def action_updatelink(self): link_format=self.param('linkfmt') if link_format: link_format=link_format.strip() self.blog.link_format=link_format self.blog.save() for entry in Entry.all(): vals={'year':entry.date.year,'month':str(entry.date.month).zfill(2),'day':entry.date.day, 'postname':entry.slug,'post_id':entry.post_id} if entry.slug: newlink=link_format%vals else: newlink=self.blog.default_link_format%vals if entry.link<>newlink: entry.link=newlink entry.put() self.write(_('"Link formated succeed"')) else: self.write(_('"Please input url format."'))
def action_update_archives(self,slug=None): for archive in Archive.all(): archive.delete() entries=Entry.all().filter('entrytype =','post') archives={} for entry in entries: my = entry.date.strftime('%B %Y') # September-2008 sy = entry.date.strftime('%Y') #2008 sm = entry.date.strftime('%m') #09 if archives.has_key(my): archive=archives[my] archive.entrycount+=1 else: archive = Archive(monthyear=my,year=sy,month=sm,entrycount=1) archives[my]=archive for ar in archives.values(): ar.put() self.write(_('"All entries have been updated."'))
def sticky_entrys(self): return Entry.all().filter('entrytype =','post')\ .filter('published =',True)\ .filter('sticky =',True)\ .order('-date')
def POST(self,slug=None,postid=None): '''handle trackback''' error = '''<?xml version="1.0" encoding="utf-8"?> <response> <error>1</error> <message>%s</message> </response> ''' success = '''<?xml version="1.0" encoding="utf-8"?> <response> <error>0</error> </response> ''' if not self.blog.allow_trackback: self.response.out.write(error % "Trackback denied.") return self.response.headers['Content-Type'] = "text/xml" if postid: entries = Entry.all().filter(published = True).filter(post_id = postid)[0:1]#.fetch(1) else: slug=urldecode(slug) entries = Entry.all().filter(published = True).filter(link = slug)[0:1]#.fetch(1) if not entries or len(entries) == 0 :#or (postid and not entries[0].link.endswith(self.blog.default_link_format%{'post_id':postid})): self.response.out.write(error % "empty slug/postid") return #check code ,rejest spam entry=entries[0] logging.info(self.request.remote_addr+self.request.path+" "+entry.trackbackurl) #key=self.param("code") #if (self.request.uri!=entry.trackbackurl) or entry.is_external_page or not entry.allow_trackback: #import cgi from urlparse import urlparse param=urlparse(self.request.uri) code=param[4] param=cgi.parse_qs(code) if param.has_key('code'): code=param['code'][0] if (not str(entry.key())==code) or entry.is_external_page or not entry.allow_trackback: self.response.out.write(error % "Invalid trackback url.") return coming_url = self.param('url') blog_name = myfilter.do_filter(self.param('blog_name')) excerpt = myfilter.do_filter(self.param('excerpt')) title = myfilter.do_filter(self.param('title')) if not coming_url or not blog_name or not excerpt or not title: self.response.out.write(error % "not enough post info") return import time #wait for half second in case otherside hasn't been published time.sleep(0.5) ## #also checking the coming url is valid and contains our link ## #this is not standard trackback behavior ## try: ## ## result = urlfetch.fetch(coming_url) ## if result.status_code != 200 : ## #or ((self.blog.baseurl + '/' + slug) not in result.content.decode('ascii','ignore')): ## self.response.out.write(error % "probably spam") ## return ## except Exception, e: ## logging.info("urlfetch error") ## self.response.out.write(error % "urlfetch error") ## return comment = Comment.all().filter(entry = entry).filter(weburl = coming_url).get() if comment: self.response.out.write(error % "has pinged before") return comment=Comment(author=blog_name, content="...<strong>"+title[:250]+"</strong> " + excerpt[:250] + '...', weburl=coming_url, entry=entry) comment.ip=self.request.remote_addr comment.ctype=COMMENT_TRACKBACK try: comment.save() memcache.delete("/"+entry.link) self.write(success) self.blog.tigger_action("pingback_post",comment) except: self.response.out.write(error % "unknow error")
def home(request): entries = Entry.all() entries.order('-published') entries.fetch(limit=5) events = fetch_events() return render_to_response('home.html', {'events': events[0:5], 'entries':entries})
def action_updatecommentno(self): for entry in Entry.all(): entry.update_commentno() self.write(_('"All comments number Updates."'))
def sticky_entrys(self): return Entry.all().filter(entrytype = 'post')\ .filter(published = True)\ .filter(sticky = True)\ .order_by('-date')
def items(self): return Entry.all()
def archive_index(request): entries = Entry.all() entries.order('-published') entries.fetch(limit=5) return render_to_response('base.html')#, {'entries': entries, 'events': events})
def show_entry(request, key): return object_detail(request, Entry.all(), key)