def grab_by_spider(spider_class): new_posts_count = 0 blog = Blog.query.filter(Blog.url == spider_class.url).first() # 若blog不存在,则创建 if not blog: blog = Blog(url=spider_class.url, title=spider_class.title, is_approved=True, subtitle=spider_class.subtitle, author=spider_class.author, has_spider=True) db.session.add(blog) db.session.commit() for p in spider_class.get_posts_(): url = p['url'] title = p['title'] published_at = p['published_at'] post = Post.query.filter(Post.url == url).first() # 新文章 if not post: new_posts_count += 1 content = spider_class.get_post_(url) post = Post(url=url, title=title, published_at=published_at, content=content) blog.posts.append(post) print(" new - %s" % title) elif published_at != post.published_at: # 更新文章 post.title = title post.published_at = published_at post.content = spider_class.get_post_(url) db.session.add(post) print(" update - %s" % title) db.session.add(blog) db.session.commit() return new_posts_count
def grab_by_spider(spider_class): new_posts_count = 0 blog = Blog.query.filter(Blog.url == spider_class.url).first() # 若blog不存在,则创建 if not blog: blog = Blog(url=spider_class.url, title=spider_class.title, is_approved=True, subtitle=spider_class.subtitle, author=spider_class.author, has_spider=True) if spider_class.for_special_purpose: # 特殊用途 blog.is_approved = False blog.for_special_purpose = True db.session.add(blog) db.session.commit() # logging.debug(blog.title) print(blog.title) # 检测博客是否在线 blog.offline = check_offline(blog.url) # 用于计算blog最后更新时间 last_updated_at = datetime.datetime.min for p in spider_class.get_posts_(): url = p['url'] title = p['title'] published_at = p['published_at'] post = Post.query.filter(Post.url == url).first() # 新文章 if not post: new_posts_count += 1 content = spider_class.get_post_(url) post = Post(url=url, title=title, published_at=published_at, content=content) blog.posts.append(post) # logging.debug(" new - %s" % title) print(" new - %s" % title) # 插入到用户订阅文章中 for blog_user in blog.blog_users: user_read_post = UserReadPost(user_id=blog_user.user_id) post.readers.append(user_read_post) else: # 更新文章 post.title = title post.published_at = published_at post.content = spider_class.get_post_(url) db.session.add(post) if published_at > last_updated_at: last_updated_at = published_at blog.updated_at = last_updated_at db.session.add(blog) db.session.commit() return new_posts_count
def deletepage(): form=request.form pagelist=[] for item in form: pagelist.append(int(item)) for item in pagelist: post=Post.all().filter('post_id',item).get() if post: post.remove() Tag.updatecache() Post.updatecache() return json.dumps({'status':1})
def getfullpost(post_id): post=Post.cached_get_by_id(post_id) if post: comments=Comment.cached_get_by_id(post_id) return render_template('page.html',post=post,comments=comments) else: abort(404)
def getpost(post_id): ''' if there is no such post return post_id=-1 ''' post=Post.all().filter('post_id',post_id).get() if post and post.saveonly==False: return post.getjson() else: return json.dumps({'post_id':-1})
def index(page=1): postlist=Post.cached_get(False,User.PER_PAGE_IN_HOME,page); pagecount=Post.PostCount/User.PER_PAGE_IN_HOME+1 if Post.PostCount%User.PER_PAGE_IN_HOME==0: pagecount=pagecount-1 currentpage=page if postlist or page==1: return render_template("index.html", postlist=postlist, currentpage=currentpage, pagecount=pagecount) else : return render_template("error/index_not_found.html")
def searchtagname(page=1): if 'tagname' not in request.args: return render_template('tag.html',allposst=[],pagecount=0,currentpage=1,tagname="") tagname=urllib.unquote(request.args['tagname']).decode('utf-8') allpost=Post.all().filter('saveonly',False) tagnamelist=tagname.split(',') for eachtag in tagnamelist: eachtag=eachtag.strip() allpost.filter('tags',eachtag) #.filter('tags',tagname) pagecount=allpost.count()/User.SHOW_TAGSEARCH_NUMBER+1 if allpost.count()%User.SHOW_TAGSEARCH_NUMBER==0: pagecount=pagecount-1 allpost=allpost.fetch(User.SHOW_TAGSEARCH_NUMBER,(page-1)*User.SHOW_TAGSEARCH_NUMBER) if allpost: return render_template('tag.html',allpost=allpost,pagecount=pagecount,currentpage=page,tagname=tagname) else: return render_template('error/tag_not_found.html',tagname=tagname)
def updatepost(): form =request.args # print request.data if form.has_key('post_id'): post=Post.getone(form['post_id']) post.title=form['title'] post.content=urllib.unquote(request.data).decode('utf-8') post.settags(form['tags']) post.update_time=int(time.time()) if form['posttype']=='saveonly': post.saveonly=True else : post.saveonly=False if form.has_key('allowcomment') and form['allowcomment']=='on': post.allowcomment=True else : post.allowcomment=False post.put_into() Tag.updatecache() return json.dumps({'message':'success','post_id':form['post_id']}) return "no such key exsits"
def dosearch(page=1): if 'search' not in request.args: return render_template('search.html',searchtag="",pagecount=0) searchtag=request.args['search'] query_options=search.QueryOptions( sort_options=search.SortOptions( expressions=[search.SortExpression(expression='title',default_value="")], ), ids_only=True ) query_obj=search.Query(query_string=searchtag,options=query_options) allpost=Index.search(query=query_obj) allpostid=[int(i.doc_id) for i in allpost.results] allpost=Post.cached_get_by_id_list(allpostid) #allpost=Post.all().filter('post_id in',allpostid) pagecount=allpost.count()/User.SHOW_TAGSEARCH_NUMBER+1 if allpost.count()%User.SHOW_TAGSEARCH_NUMBER==0: pagecount=pagecount-1 return render_template('search.html', allpost=allpost, searchtag=searchtag, pagecount=pagecount)
def test_new_post_with_category(self): c = Category(name='Python') c.put() p = Post(title="Primo Post", category=c) p.put() return self.assertEqual(p.category.name, 'Python')
def test_new_post(self): p = Post(title="Primo post") p.put() return self.assertEqual(p.title, "Primo post")
def newpost(): form=request.args newpost=Post(title=form['title'], content=urllib.unquote(request.data).decode('utf-8'), num_lookup=0 ) if form['posttype']=='saveonly': newpost.saveonly=True else : newpost.saveonly=False if form.has_key('allowcomment') and form['allowcomment']=='on': newpost.allowcomment=True else : newpost.allowcomment=False newpost.post_id=Post.properid() newpost.settags(form['tags']) newpost.create_date=int(time.time()) newpost.update_time=newpost.create_date newpost.put_into() Tag.updatecache() Post.updatecache() #return json.dumps({'message':newpost.content,'post_id':newpost.post_id}) return json.dumps({'message':'success','post_id':newpost.post_id})
def newpost(): form = request.args newpost = Post(title=form['title'], content=urllib.unquote(request.data).decode('utf-8'), num_lookup=0) if form['posttype'] == 'saveonly': newpost.saveonly = True else: newpost.saveonly = False if form.has_key('allowcomment') and form['allowcomment'] == 'on': newpost.allowcomment = True else: newpost.allowcomment = False newpost.post_id = Post.properid() newpost.settags(form['tags']) newpost.create_date = int(time.time()) newpost.update_time = newpost.create_date newpost.put_into() Tag.updatecache() Post.updatecache() #return json.dumps({'message':newpost.content,'post_id':newpost.post_id}) return json.dumps({'message': 'success', 'post_id': newpost.post_id})
def test_posts(self): u = User(username='******', email='*****@*****.**') p = Post(body='first post', author=u) db.session.add_all([u, p]) self.assertEqual(db.session.query(Post.id).count(), 1) self.assertEqual(Post.query.first().user_id, u.id)