def action_pingback_ping(self): """Try to notify the server behind `target_uri` that `source_uri` points to `target_uri`. If that fails an `PingbackError` is raised. """ source_uri=self.param('source') target_uri=self.param('target') try: #response =urlfetch.fetch(target_uri) response=fetch_result(target_uri) #retry up to 5 times except: raise base.PingbackError(32) try: pingback_uri = response.headers['X-Pingback'] except KeyError: _pingback_re = re.compile(r'<link rel="pingback" href="([^"]+)" ?/?>(?i)') match = _pingback_re.search(response.content) if match is None: raise base.PingbackError(33) pingback_uri =base.urldecode(match.group(1)) rpc = xmlrpclib.ServerProxy(pingback_uri) try: return rpc.pingback.ping(source_uri, target_uri) except Fault, e: raise base.PingbackError(e.faultCode)
def GET(self,slug=None,postid=None): if postid: postid = int(postid) entries = Entry.all().filter(published = True).filter(post_id = postid)[0:1]#.fetch(1) else: slug=urldecode(slug) entries = Entry.all().filter(published = True).filter(link = slug)[0:1]#.fetch(1) if not entries or len(entries) == 0: self.error(404) return mp=self.paramint("mp",1) entry=entries[0] if entry.is_external_page: self.redirect(entry.external_page_address,True) if self.blog.allow_pingback and entry.allow_trackback: self.response.headers['X-Pingback']="%s/rpc"%str(self.blog.baseurl) entry.readtimes += 1 entry.put() self.entry=entry comments=entry.get_comments_by_page(mp,self.blog.comments_per_page) ## commentuser=self.request.cookies.get('comment_user', '') ## if commentuser: ## commentuser=commentuser.split('#@#') ## else: commentuser=['','',''] comments_nav=self.get_comments_nav(mp,entry.purecomments().count()) if entry.entrytype=='post': self.render('single', { 'entry':entry, 'relateposts':entry.relateposts, 'comments':comments, 'user_name':commentuser[0], 'user_email':commentuser[1], 'user_url':commentuser[2], 'checknum1':random.randint(1,10), 'checknum2':random.randint(1,10), 'comments_nav':comments_nav, }) else: self.render('page', {'entry':entry, 'relateposts':entry.relateposts, 'comments':comments, 'user_name':commentuser[0], 'user_email':commentuser[1], 'user_url':commentuser[2], 'checknum1':random.randint(1,10), 'checknum2':random.randint(1,10), 'comments_nav':comments_nav, })
def action_pingback_ping(self): """Try to notify the server behind `target_uri` that `source_uri` points to `target_uri`. If that fails an `PingbackError` is raised. """ source_uri = self.param('source') target_uri = self.param('target') try: #response =urlfetch.fetch(target_uri) response = fetch_result(target_uri) #retry up to 5 times except: raise base.PingbackError(32) try: pingback_uri = response.headers['X-Pingback'] except KeyError: _pingback_re = re.compile( r'<link rel="pingback" href="([^"]+)" ?/?>(?i)') match = _pingback_re.search(response.content) if match is None: raise base.PingbackError(33) pingback_uri = base.urldecode(match.group(1)) rpc = xmlrpclib.ServerProxy(pingback_uri) try: return rpc.pingback.ping(source_uri, target_uri) except Fault, e: raise base.PingbackError(e.faultCode)
def GET(self,slug=None): if not slug: self.error(404) return try: page_index=int (self.param('page')) except: page_index=1 import urllib slug=urldecode(slug) entries=Entry.all().filter(published = True).filter(tags = slug).order_by("-date") entries,links=Pager(query=entries,items_per_page=20).fetch(page_index) self.render('tag',{'entries':entries,'tag':slug,'pager':links})
def GET(self,slug=None): if not slug: self.error(404) return try: page_index=int(self.param('page')) except: page_index=1 slug=urldecode(slug) cats=Category.all().filter(slug = slug)[0:1]#.fetch(1) if cats: entries=Entry.all().filter(published = True).filter(categorie_keys = cats[0].key()).order_by("-date") entries,links=Pager(query=entries,items_per_page=20).fetch(page_index) self.render('category',{'entries':entries,'category':cats[0],'pager':links}) else: self.error(404,slug)
def get(self, slug=None, postid=None): if postid: entries = Entry.all().filter("published =", True).filter("post_id =", postid).fetch(1) else: slug = urldecode(slug) entries = Entry.all().filter("published =", True).filter("link =", slug).fetch(1) if not entries or len(entries) == 0: return self.error(404) entry = entries[0] comments = Comment.all().filter("entry =", entry) commentuser = ["", "", ""] if entry.entrytype == "post": self.render( "single", { "entry": entry, "relateposts": entry.relateposts, "comments": comments, "user_name": commentuser[0], "user_email": commentuser[1], "user_url": commentuser[2], "checknum1": random.randint(1, 10), "checknum2": random.randint(1, 10), }, ) else: self.render( "page", { "entry": entry, "relateposts": entry.relateposts, "comments": comments, "user_name": commentuser[0], "user_email": commentuser[1], "user_url": commentuser[2], "checknum1": random.randint(1, 10), "checknum2": random.randint(1, 10), }, )
def post(self): if not self.is_login: self.redirect(users.create_login_url(self.request.uri)) try: #global imt imt=memcache.get("imt") #imt=OptionSet.getValue('wpimport_data') import_data=imt.pop() #if tdata=='men': memcache.set('imt',imt) #else: # OptionSet.setValue('wpimport_data',imt) if import_data: try: if import_data[0]=='cat': _cat=import_data[1] nicename=_cat['slug'] cat=Category.get_by_key_name(nicename) if not cat: cat=Category(key_name=nicename) cat.name=_cat['name'] cat.slug=nicename cat.put() elif import_data[0]=='entry': _entry=import_data[1] logging.debug('importing:'+_entry['title']) hashkey=str(hash(_entry['title'])) entry=Entry.get_by_key_name(hashkey) if not entry: entry=Entry(key_name=hashkey) entry.title=_entry['title'] entry.author=self.login_user entry.is_wp=True #entry.date=datetime.strptime( _entry['pubDate'],"%a, %d %b %Y %H:%M:%S +0000") try: entry.date=datetime.strptime( _entry['pubDate'][:-6],"%a, %d %b %Y %H:%M:%S") except: try: entry.date=datetime.strptime( _entry['pubDate'][0:19],"%Y-%m-%d %H:%M:%S") except: entry.date=datetime.now() entry.entrytype=_entry['post_type'] entry.content=_entry['content'] entry.excerpt=_entry['excerpt'] entry.post_id=_entry['post_id'] entry.slug=urldecode(_entry['post_name']) entry.entry_parent=_entry['post_parent'] entry.menu_order=_entry['menu_order'] for cat in _entry['categories']: c=Category.get_by_key_name(cat['slug']) if c: entry.categorie_keys.append(c.key()) entry.settags(','.join(_entry['tags'])) ## for tag in _entry['tags']: ## entry.tags.append(tag) if _entry['published']: entry.save(True) else: entry.save() for com in _entry['comments']: try: date=datetime.strptime(com['date'][0:19],"%Y-%m-%d %H:%M:%S") except: date=datetime.now() comment=Comment(author=com['author'], content=com['content'], entry=entry, date=date ) try: comment.email=com['email'] comment.weburl=com['weburl'] except: pass comment.save() finally: queue=taskqueue.Queue("import") queue.add(taskqueue.Task( url="/admin/wp_import")) except Exception,e : logging.info("import error: %s"%e.message)
def POST(self,slug=None,postid=None): '''handle trackback''' error = '''<?xml version="1.0" encoding="utf-8"?> <response> <error>1</error> <message>%s</message> </response> ''' success = '''<?xml version="1.0" encoding="utf-8"?> <response> <error>0</error> </response> ''' if not self.blog.allow_trackback: self.response.out.write(error % "Trackback denied.") return self.response.headers['Content-Type'] = "text/xml" if postid: entries = Entry.all().filter(published = True).filter(post_id = postid)[0:1]#.fetch(1) else: slug=urldecode(slug) entries = Entry.all().filter(published = True).filter(link = slug)[0:1]#.fetch(1) if not entries or len(entries) == 0 :#or (postid and not entries[0].link.endswith(self.blog.default_link_format%{'post_id':postid})): self.response.out.write(error % "empty slug/postid") return #check code ,rejest spam entry=entries[0] logging.info(self.request.remote_addr+self.request.path+" "+entry.trackbackurl) #key=self.param("code") #if (self.request.uri!=entry.trackbackurl) or entry.is_external_page or not entry.allow_trackback: #import cgi from urlparse import urlparse param=urlparse(self.request.uri) code=param[4] param=cgi.parse_qs(code) if param.has_key('code'): code=param['code'][0] if (not str(entry.key())==code) or entry.is_external_page or not entry.allow_trackback: self.response.out.write(error % "Invalid trackback url.") return coming_url = self.param('url') blog_name = myfilter.do_filter(self.param('blog_name')) excerpt = myfilter.do_filter(self.param('excerpt')) title = myfilter.do_filter(self.param('title')) if not coming_url or not blog_name or not excerpt or not title: self.response.out.write(error % "not enough post info") return import time #wait for half second in case otherside hasn't been published time.sleep(0.5) ## #also checking the coming url is valid and contains our link ## #this is not standard trackback behavior ## try: ## ## result = urlfetch.fetch(coming_url) ## if result.status_code != 200 : ## #or ((self.blog.baseurl + '/' + slug) not in result.content.decode('ascii','ignore')): ## self.response.out.write(error % "probably spam") ## return ## except Exception, e: ## logging.info("urlfetch error") ## self.response.out.write(error % "urlfetch error") ## return comment = Comment.all().filter(entry = entry).filter(weburl = coming_url).get() if comment: self.response.out.write(error % "has pinged before") return comment=Comment(author=blog_name, content="...<strong>"+title[:250]+"</strong> " + excerpt[:250] + '...', weburl=coming_url, entry=entry) comment.ip=self.request.remote_addr comment.ctype=COMMENT_TRACKBACK try: comment.save() memcache.delete("/"+entry.link) self.write(success) self.blog.tigger_action("pingback_post",comment) except: self.response.out.write(error % "unknow error")
def post(self): if not self.is_login: self.redirect(users.create_login_url(self.request.uri)) try: #global imt imt=memcache.get("imt") #imt=OptionSet.getValue('wpimport_data') import_data=imt.pop() #if tdata=='men': memcache.set('imt',imt) #else: # OptionSet.setValue('wpimport_data',imt) try: cmtimport=memcache.get("cmtimport") except: cmtimport=False if import_data: try: if import_data[0]=='cat': _cat=import_data[1] nicename=_cat['slug'] cat=Category.get_by_key_name(nicename) if not cat: cat=Category(key_name=nicename) cat.name=_cat['name'] cat.slug=nicename cat.put() elif import_data[0]=='entry': _entry=import_data[1] logging.debug('importing:'+_entry['title']) hashkey=str(hash(_entry['title'])) entry=Entry.get_by_key_name(hashkey) if not entry: entry=Entry(key_name=hashkey) entry.title=_entry['title'] entry.author=self.login_user entry.is_wp=True #entry.date=datetime.strptime( _entry['pubDate'],"%a, %d %b %Y %H:%M:%S +0000") try: entry.date=datetime.strptime( _entry['pubDate'][:-6],"%a, %d %b %Y %H:%M:%S") except: try: entry.date=datetime.strptime( _entry['pubDate'][0:19],"%Y-%m-%d %H:%M:%S") except: entry.date=datetime.now() entry.entrytype=_entry['post_type'] entry.content=_entry['content'] entry.excerpt=_entry['excerpt'] entry.post_id=_entry['post_id'] entry.slug=urldecode(_entry['post_name']) entry.entry_parent=_entry['post_parent'] entry.menu_order=_entry['menu_order'] for cat in _entry['categories']: c=Category.get_by_key_name(cat['slug']) if c: entry.categorie_keys.append(c.key()) entry.settags(','.join(_entry['tags'])) ## for tag in _entry['tags']: ## entry.tags.append(tag) if _entry['published']: entry.save(True) else: entry.save() if cmtimport: for com in _entry['comments']: try: date=datetime.strptime(com['date'][0:19],"%Y-%m-%d %H:%M:%S") except: date=datetime.now() comment=Comment(author=com['author'], content=com['content'], entry=entry, date=date ) try: comment.email=com['email'] comment.weburl=com['weburl'] except: pass try: if len(com['ip'])>4: comment.ip=com['ip'] except: pass comment.store() finally: queue=taskqueue.Queue("import") queue.add(taskqueue.Task( url="/admin/wp_import")) except Exception,e : logging.info("import error: %s"%e.message)