def get(self, cls, bookmark = ''): cls = int(cls) if cls not in range(len(MukioTools.namelist)): self.error(404) return '''Simple paging''' next = None prev = None if bookmark: bookmark = MukioTools.dt_from_str(urllib.unquote(urllib.unquote(bookmark))) arts = Article.all().order('-postdate').filter('classify =',cls).filter('postdate <=',bookmark).fetch(PAGESIZE + 1) else: arts = Article.all().order('-postdate').filter('classify =',cls).fetch(PAGESIZE+1) if len(arts) == PAGESIZE + 1: next = arts[-1].postdate arts = arts[:PAGESIZE] ars = [] for a in arts: a.keyname = a.key().name() a.postdate += timedelta(hours=+8) a.tagname = MukioTools.tagname(a.classify) ars.append(a) if bookmark: arts = Article.all().order('postdate').filter('classify =',cls).filter('postdate >=',bookmark).fetch(PAGESIZE + 1) if len(arts) == PAGESIZE + 1: prev = arts[-1].postdate tmpvars = {'arts':ars,'next':next,'prev':prev,'title':u'分类:' + MukioTools.namelist[cls],'iscls':True,'cls':cls} self.render('articles.html',tmpvars)
def get(self): uname = self.get_current_user() user = User.get_user_by_name(uname) order = get_order() story = Story.get_by_porder(order) sid = self.get_argument("sid",None) if sid is not None: story=Story.get_by_sid(sid) article = Article.get_all_Astory(200) usedStory = Story.get_all_story() temp = [] for one in usedStory: if one.porder > 0 and one.porder <= order: temp.append(one) usedStory = temp reArticles = Article.get_all(200) Rarticle = sorted(reArticles,BaseHandler.rank) Ruser = User.get_all_user(100) if len(Rarticle) > 6: Rarticle=Rarticle[:6] if len(usedStory)>3: usedStory=usedStory[:3] if len(Ruser)>9: Ruser=Ruser[:9] self.render("story.html",user=user,Rarticle=Rarticle,Article=article,usedStory=usedStory,Ruser=Ruser,story=story)
def get(self, bookmark = ''): '''Simple paging''' next = None prev = None if bookmark: bookmark = MukioTools.dt_from_str(urllib.unquote(urllib.unquote(bookmark))) #self.response.headers['Referer'] = bookmark arts = Article.all().order('-postdate').filter('postdate <=',bookmark).fetch(PAGESIZE + 1) else: arts = Article.all().order('-postdate').fetch(PAGESIZE+1) if len(arts) == PAGESIZE + 1: next = arts[-1].postdate arts = arts[:PAGESIZE] ars = [] for a in arts: a.keyname = a.key().name() a.postdate += timedelta(hours=+8) a.tagname = MukioTools.tagname(a.classify) ars.append(a) if bookmark: arts = Article.all().order('postdate').filter('postdate >=',bookmark).fetch(PAGESIZE + 1) if len(arts) == PAGESIZE + 1: prev = arts[-1].postdate tmpvars = {'arts':ars,'next':next,'prev':prev} self.render('articles.html',tmpvars)
def get(self): uname = self.get_current_user() user = User.get_user_by_name(uname) order = get_order() card = Card.get_by_porder(order) cid = self.get_argument("cid",None) if cid is not None: card=Card.get_by_cid(cid) article = Article.get_all_Acard(200) usedCard = Card.get_all_card() temp = [] for one in usedCard: if 0 < one.porder and one.porder <= order: temp.append(one) usedCard = temp reArticles = Article.get_all(200) Rarticle = sorted(reArticles,BaseHandler.rank) if len(Rarticle) > 6: Rarticle = Rarticle[:6] Ruser = User.get_all_user(100) if len(Ruser)>9: Ruser = Ruser[:9] if len(usedCard)>3: usedCard = usedCard[:3] self.render("card.html",user=user,Rarticle=Rarticle,Article=article,usedCard=usedCard,Ruser=Ruser,card=card)
def add(): form = request.form if validate_legitimacy(form, request.cookies): Article.save_article(form) return redirect(url_for('.index')) else: abort(403)
def POST(self, id): form = New.form() post = Article.get_post(int(id)) if not form.validates(): return render.edit(post, form) Article.update_post(int(id), form.d.article_title, form.d.article_content) raise web.seeother('/')
def post_index(): if request.form['form'] == 'Add feed': url = request.form['feed'] try: parsed = main_feed.parsing_method(url) source = main_feed.source_get(parsed) s = Source.insert_feed(url, source) if s: articles = main_feed.articles_get(parsed) Article.insert_feed(s.id, articles) except: pass return redirect('/') elif request.form['form'] == 'Set filter': filter_dir.set_filter = request.form["filter"] return redirect('/') elif request.form['form'] == 'Submit': Answer.insert_answer(request.form['answer1'], request.form['answer2'], request.form['answer3'], request.form['comment']) return redirect('/') elif request.form['form'] == 'Search': article_list, sources = search(request.form['search']) return render_template('index.html', articles=article_list, sources=sources, set_filter=filter_dir.set_filter) else: return redirect('/')
def get_article(id): articles = mongo.db.article.find_one_or_404({'_id': id}) article = Article(entries=articles) if article.access_level == 0: return article.serialize() else: raise ApiError(NO_AUTH, 403)
def post(odata): curr_session=db_session() tobj = CrudObj(**odata) curr_session.add(tobj) curr_session.commit() tobjstr=tobj.dump() return tobjstr, 201 #note: tobj.dump() here causes {} return, not sure why?!?
def get(self, bookmark=''): '''Simple paging''' next = None prev = None if bookmark: bookmark = MukioTools.dt_from_str( urllib.unquote(urllib.unquote(bookmark))) #self.response.headers['Referer'] = bookmark arts = Article.all().order('-postdate').filter( 'postdate <=', bookmark).fetch(PAGESIZE + 1) else: arts = Article.all().order('-postdate').fetch(PAGESIZE + 1) if len(arts) == PAGESIZE + 1: next = arts[-1].postdate arts = arts[:PAGESIZE] ars = [] for a in arts: a.keyname = a.key().name() a.postdate += timedelta(hours=+8) a.tagname = MukioTools.tagname(a.classify) ars.append(a) if bookmark: arts = Article.all().order('postdate').filter( 'postdate >=', bookmark).fetch(PAGESIZE + 1) if len(arts) == PAGESIZE + 1: prev = arts[-1].postdate tmpvars = {'arts': ars, 'next': next, 'prev': prev} self.render('articles.html', tmpvars)
def update(self, id, request_data): article = Article().find(id) if Auth.user().id == article.user_id: article.update(request_data) return RedirectResponse(article.url())
def delete(self, id): article = Article().find(id) if article.user().id == Auth.user().id: article.delete() return RedirectResponse(request.base_uri + '/articles')
def edit(self, id): article = Article().find(id) if article.user().id == Auth.user().id: return view('articles.edit', article=article) return ErrorResponse('Unauthorised', 500)
def get(self,email,bookmark = ''): email = urllib.unquote(urllib.unquote(email)) usr = users.User(unicode(email).strip()) next = None prev = None if bookmark: bookmark = MukioTools.dt_from_str(urllib.unquote(urllib.unquote(bookmark))) arts = Article.all().order('-postdate').filter('author = ',usr).filter('postdate <=',bookmark).fetch(PAGESIZE + 1) else: arts = Article.all().order('-postdate').filter('author = ',usr).fetch(PAGESIZE+1) if len(arts) == PAGESIZE + 1: next = arts[-1].postdate arts = arts[:PAGESIZE] ars = [] for a in arts: a.keyname = a.key().name() a.postdate += timedelta(hours=+8) a.tagname = MukioTools.tagname(a.classify) ars.append(a) if bookmark: arts = Article.all().order('postdate').filter('author = ',usr).filter('postdate >=',bookmark).fetch(PAGESIZE + 1) if len(arts) == PAGESIZE + 1: prev = arts[-1].postdate self.render('articles.html',{'arts':ars,'author':usr,'next':next,'prev':prev,'title':usr.nickname() + unicode('的上传列表','utf-8'),'edit':usr == users.get_current_user()})
def test_get_comments_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 0, 0, 0, None, ["tagname1"])) tag.register_article(Article('t', 0, 0, 2, None, ["tagname1"])) self.assertEqual({tag: 1}, tag_analytics.get_comments_per_article_for_each_tag( [tag]))
def test_get_views_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 300, 0, 0, None, ["tagname1"])) tag.register_article(Article('t', 500, 0, 0, None, ["tagname1"])) self.assertEqual({tag: 400}, tag_analytics.get_views_per_article_for_each_tag( [tag]))
def new(): if request.method == 'GET': return render_template('admin/new.html') if request.method == 'POST': form = request.form a = Article(form) a.save() return redirect(url_for('index.index'))
def test_get_reactions_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 0, 6, 0, None, ["tagname1"])) tag.register_article(Article('t', 0, 8, 0, None, ["tagname1"])) self.assertEqual({tag: 7}, tag_analytics.get_reactions_per_article_for_each_tag( [tag]))
def sources_post(): feed_url = request.form['feed'] parsed = feed.parse(feed_url) feed_source = feed.get_source(parsed) source = FeedSource.insert_from_feed(feed_url, feed_source) feed_articles = feed.get_articles(parsed) Article.insert_from_feed(source.sid, feed_articles) return redirect('/sources')
def set_user_from_climate_feedback_user_scrap(user, path, store=None): if store is None: store = {} result = requests.get('https://climatefeedback.org{}'.format(path)) soup = BeautifulSoup(result.text, 'html.parser') info = soup.find("div", class_="med-body") user.affiliation = situation_line.split(",")[1] user.external_thumb_url = soup.find("img", class_="avatar")['src'] user.title = situation_line.split(",")[0] name = info.find("h2", class_="noborder").text first_name = None last_name = name if ' ' in name: name_chunks = name.split(' ') first_name = name_chunks[0] last_name = name_chunks[1:] paragraphs = info.find_all("p") situation_line = paragraphs[0].text user.firstName = first_name user.lastName = last_name expertise_line = paragraphs[1].text if 'Expertise:' in expertise_line: expertise = expertise_line.split('Expertise: ')[1] else: expertise = None user.expertise = expertise orcid = info.find("a", href=re.compile("https://orcid.org/(.*)")) if orcid: user.orcidId = orcid['href'].split('https://orcid.org/')[1] website = info.find("a", text="Website") if website: user.websiteUrl = website['href'] publication_image = info.find("img", alt="publication") if publication_image: publication_anchors = publication_image.parent.find_all("a") for publication_anchor in publication_anchors: publication_dict = { "tags": "isValidatedAsPeerPublication", "url": publication_anchor['href'] } publication = Publication.query.filter_by(url=data['url'])\ .first() if not publication: publication = Article(**publication_dict) publication.populate_from_dict( resolve_with_url(publication.url)) UserArticle(article=publication, user=user)
def get(self,pagenum = 0): totle = Article.all().count() pg = Pager(int(pagenum),PERPAGE,LINKDISTANCE,totle) articles = Article.all().order('-postdate').fetch(pg.len,pg.frm) def addkey(i): i.keyname = i.key().name() i.tagname = MukioTools.tagname(i.classify) i.postdate += timedelta(hours=+8) return i self.render('admin/articles.html',{'pg':pg,'articles':map(addkey,articles)})
def get_article(): form = json.loads(request.get_data(as_text=True)) article = Article.find_by(chapter_id=form["chapter_id"]) if article == None: article = Article() article.title = "空" article.content = "当前章节没有文章,请直接刷题" return Response(json.dumps(article.__dict__, ensure_ascii=False), content_type='application/json') else: return Response(json_util.dumps(article.__dict__, ensure_ascii=False), content_type='application/json')
def test_put(self): print 'test start' article = Article(testid=1, sectionid=2, content='test') article.put() query = article.all() self.assertEquals(query.count(), 1) query = query.filter("testid =", 1) self.assertEquals(query.count(), 1) for result in query: self.assertEquals(result.content, 'test')
def get(self): uname = self.get_current_user() user = User.get_user_by_name(uname) article = Article.get_all_Afree(200) reArticles = Article.get_all(200) Rarticle = sorted(reArticles,BaseHandler.rank) if len(Rarticle) > 6: Rarticle = Rarticle[:6] Ruser = User.get_all_user(100) if len(Ruser)>9: Ruser = Ruser[:9] self.render("free.html",user=user,Rarticle=Rarticle,Article=article,Ruser=Ruser)
def post_article(title, content): logging.error("DB QUERY BEING RUN") b = Article(path=title, content=content) key = b.put() # update memcache m = memcache.get(title) m.append(b) memcache.set(title, m) return b
def test_get_articles_json(self): user = User("laura_miller", "laura", "miller") article = Article("Some text", "A Title", user) article_two = Article("Other text", "Other article", user) self.session.add_all([user, article, article_two]) self.session.commit() response = self.client.get("/json/articles") self.assertEqual(response.status_code, 200) self.assertEqual(response.get_json(), [{ "title": "A Title" }, { "title": "Other article" }])
def post(self): data = NewArticle.parse.parse_args() article_data = Article(data['id_author'], data['title'], data['content']) try: article_data.save_to_db() except: return {"message": "An error occurred creating the article."}, 500 return {"message": "New article created successfully."}, 201
def setUp(self): self.articles = [ Article("title1", 100, 10, 1, None), Article("title2", 900, 90, 9, None), Article("title3", 300, 30, 3, None), Article("title4", 400, 40, 4, None), Article("title5", 500, 50, 5, None), Article("title6", 600, 60, 6, None), Article("title7", 700, 70, 7, None), Article("title8", 800, 80, 8, None), Article("title9", 200, 20, 2, None), Article("title10", 1000, 100, 10, None), ]
def test_get_most_comments_per_article_tag(self): most_popular_tag = Tag("tagname1") most_popular_tag.register_article( Article('t', 0, 0, 3, None, ["tagname1"])) most_popular_tag.register_article( Article('t', 0, 0, 3, None, ["tagname1"])) other_tag = Tag("tagname2") other_tag.register_article(Article('t', 0, 0, 3, None, ["tagname2"])) other_tag.register_article(Article('t', 0, 0, 1, None, ["tagname2"])) self.assertEqual( most_popular_tag, tag_analytics.get_most_comments_per_article_tag( [most_popular_tag, other_tag]))
def post(self): uname = self.get_current_user() user = User.get_user_by_name(uname) if len(user)>0: aid = self.get_argument("aid",None) uid = user[0].uid touname = self.get_argument("touname",None) touid = User.get_user_by_name(touname)[0].uid comment = self.get_argument("comment") date = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime()) Comment.add_one_comment(aid,uid,touid,comment,date) Article.add_one_ncomment(aid) self.redirect("/article?aid="+str(aid)+"#comment-id") else: self.redirect("/login")
def test_get_most_reactions_per_article_tag(self): most_popular_tag = Tag("tagname1") most_popular_tag.register_article( Article('t', 0, 6, 0, None, ["tagname1"])) most_popular_tag.register_article( Article('t', 0, 8, 0, None, ["tagname1"])) other_tag = Tag("tagname2") other_tag.register_article(Article('t', 0, 2, 0, None, ["tagname2"])) other_tag.register_article(Article('t', 0, 8, 0, None, ["tagname2"])) self.assertEqual( most_popular_tag, tag_analytics.get_most_reactions_per_article_tag( [most_popular_tag, other_tag]))
def get(self, pagenum=0): totle = Article.all().count() pg = Pager(int(pagenum), PERPAGE, LINKDISTANCE, totle) articles = Article.all().order('-postdate').fetch(pg.len, pg.frm) def addkey(i): i.keyname = i.key().name() i.tagname = MukioTools.tagname(i.classify) i.postdate += timedelta(hours=+8) return i self.render('admin/articles.html', { 'pg': pg, 'articles': map(addkey, articles) })
def fetch_and_save_articles_in_date_range(from_date, to_date): # Fetch raw articles raw_articles = RawArticle.get_raw_articles(from_date.strftime('%Y-%m-%d'), to_date.strftime('%Y-%m-%d')) # Build articles and insert into database articles = Article.build_articles(raw_articles) # Article.bulk_insert(articles) # Store raw article content in datasets for later analysis df = pd.DataFrame.from_records([ { 'article_title': x.title, 'article_uuid': x.article_uuid, 'article_url': x.url, 'article_description': x.description, 'source_id': x.source_id, 'published_at': x.published_at, 'named_entities': x.named_entities, 'raw_content': x.raw_content, } for x in articles if x.title is not None and x.description is not None ]).drop_duplicates(subset='article_url').reset_index(drop=True) tmp = tempfile.NamedTemporaryFile() with open(tmp.name, 'w') as f: df.to_csv(tmp.name, sep='\t', encoding='utf-8', index=False) date_str = from_date.strftime('%Y-%m-%d') datasets.put(tmp.name, f'/input/article_content/{date_str}.csv')
def post(self,articlekey=None): article = Article.get_by_key_name(unicode(articlekey).strip()) if not article: self.redirect('/admin/error') title = unicode(self.request.get('title')).strip() abs = unicode(self.request.get('abs')).strip() tags = unicode(self.request.get('tags')).strip() classify = int(self.request.get('classify')) article.title = title; article.abs = abs; article.classify = classify; article.tags = tags.split(); try: article.put() except: self.redirect('/admin/error/failtosavearticle') try: videostring = unicode(self.request.get('videolist')).strip() # logging.info('%r' % videostring) videos = read(videostring) # logging.info('%r' % videos) oldvideos = article.video_set.order('postdate') self.compareAndUpdate(article,videos,oldvideos) except: self.redirect('/admin/error/failtosavevideos') self.get(articlekey)
def post(self, articlekey=None): article = Article.get_by_key_name(unicode(articlekey).strip()) if not article: self.redirect('/admin/error') title = unicode(self.request.get('title')).strip() abs = unicode(self.request.get('abs')).strip() tags = unicode(self.request.get('tags')).strip() classify = int(self.request.get('classify')) article.title = title article.abs = abs article.classify = classify article.tags = tags.split() try: article.put() except: self.redirect('/admin/error/failtosavearticle') try: videostring = unicode(self.request.get('videolist')).strip() # logging.info('%r' % videostring) videos = read(videostring) # logging.info('%r' % videos) oldvideos = article.video_set.order('postdate') self.compareAndUpdate(article, videos, oldvideos) except: self.redirect('/admin/error/failtosavevideos') self.get(articlekey)
def get_articles_from_climate_feedback_feedbacks_scrap(articles_max=3, editor_user=None, store=None): # store is an object that sets in it # all the currently being created objects # as { articles: [], ..., users: [] } # because we don't want to create a new user object # when we scrap a review article made by an already created user one. if store is None: store = {"users": []} result = requests.get('https://climatefeedback.org/feedbacks') soup = BeautifulSoup(result.text, 'html.parser') articles = [] evaluation_rows = soup.find("main").find_all("div", class_="row") logger.info('{} evaluation rows and we pick {} max'.format( len(evaluation_rows), articles_max)) for (evaluation_row_index, evaluation_row) in enumerate(evaluation_rows[:articles_max]): logger.info('article {}...'.format(evaluation_row_index)) evaluation_media_body = evaluation_row.find("div", class_="media-body") article = Article() evaluation_url = evaluation_media_body.find("a")['href'] set_article_from_climate_feedback_evaluation_scrap( article, evaluation_url, editor_user, store) articles.append(article) return articles
def get_random_article(self): """Get a random article on the page :rtype: Article """ links = self.soup.select(".article-img-link") random_index = random.randint(0, len(links) - 1) return Article(links[random_index].get('href'))
def test_article_of_tag_is_registered(self): article = Article("article", 100, 10, 1, None, ["tagname1"]) self.tags[0].register_article(article) self.assertEqual(self.tags[0].views, 100) self.assertEqual(self.tags[0].reactions, 10) self.assertEqual(self.tags[0].comments, 1) self.assertEqual(self.tags[0].articles, [article])
def get(self, aid): #arts = Article.all() #arts.filter('__key__ =',db.Key(aid)) art = Article.get_by_key_name(aid.strip()) #arts.get() usr = users.get_current_user() if (not art) or (art.author != usr): self.redirect('/') if art: art.keyname = art.key().name() videos = art.video_set videos.order('postdate') vds = [] for v in videos: v.keyname = v.key().name() v.cbk = v.cblock_set.count() vds.append(v) tmpvars = { 'art': art, 'title': unicode('添加视频 - ', 'utf-8') + art.title, 'vds': vds } self.render('addvideo.html', tmpvars) else: self.redirect('/')
def test_article_of_different_tag_is_not_registered(self): article = Article("article", 100, 10, 1, None, ["tagname0"]) self.tags[0].register_article(article) self.assertEqual(self.tags[0].views, 0) self.assertEqual(self.tags[0].reactions, 0) self.assertEqual(self.tags[0].comments, 0) self.assertEqual(len(self.tags[0].articles), 0)
def article_list(self, unused_request): query = Article.query() articles = [] for article_model in query.fetch(): article = Aticle(title=article_model.title,author=article_model.author) articles.append(article) return ArticleCollection(items=articles)
def get(self): uname = self.get_current_user() user = User.get_user_by_name(uname) '''every recommendation''' reArticles = Article.get_all(200) Rarticle = sorted(reArticles,BaseHandler.rank) if len(Rarticle)>6: Rarticle=Rarticle[:6] '''get suiyue circle''' circle = Article.get_all(100) '''get suiyou''' Ruser = User.get_all_user(100) if len(Ruser)>9: Ruser=Ruser[:9] self.render("read.html",user=user,Rarticle=Rarticle,Circle=circle,Ruser=Ruser, Title="岁阅美文")
def delete_article_by_key_name(keyname): a = Article.get_by_key_name(keyname) if a: videos = a.video_set for v in videos: MukioTools.delete_video_by_key_name(v.key().name()) MukioTools.delete_chat_by_artkey_name(keyname) a.delete()
def post(self): title = unicode(self.request.get('title')).strip() abs = unicode(self.request.get('abs')).strip() tags = unicode(self.request.get('tags')).strip() classify = int(self.request.get('classify')) usr = users.get_current_user() if title and abs and usr: art = Article(key_name=MukioTools.rndvid(5), author=usr, title=title, abs=abs, classify=classify, tags=tags.split() ) if art: art.put() self.redirect('/addvideo/' + art.key().name() +'/')
def get(self): aid = self.get_argument("aid") uname = self.get_current_user() user = User.get_user_by_name(uname) article = Article.get_by_aid(aid) F = False if len(article)>0: if len(user)>0: focus = User.get_all_focuson(user[0].uid) if len(focus)>0: for one in focus: if one.uid == article[0].uid: F = True break writer = User.get_user_by_id(article[0].uid) comments = Comment.get_all_comments(article[0].aid,200) Article.add_one_nvisit(aid) self.render("article.html",user=user,writer=writer,article=article,comments=comments,F=F)
def get(self, article_key=None): article = Article.get(article_key) if article is None or not article.is_active: return self.error(404) creator_view = self.get_current_user() == article.creator qs = article.revision_set qs.order('-date_created') revisions = each_profiles(qs) return self.render('revision_list.html', locals())
def get(self): article_qs = Article.all() nickname = self.request.get('user') if nickname: p= ProfileByNickView(nickname).get() if p: article_qs.filter('creator =', p.get_user()) article_qs.order('-date_modified') articles = each_profiles(article_qs, field='creator') return self.render('article_list.html', locals())
def all_articles(self): return (Article.select() .distinct() .join(UserSubscription, on=(UserSubscription.subscription == Article.source)) .join(ArticleTopic, on=(ArticleTopic.article == Article.id)) .join(UserTopic, on=((UserTopic.topic == ArticleTopic.topic) & (UserTopic.user == UserSubscription.user))) .join(UserArticle, JOIN_LEFT_OUTER, on=(UserArticle.article == Article.id) & (UserArticle.user == UserTopic.user)) .where(UserTopic.user == self, UserArticle.state >> None) .order_by(Article.publish_date.desc()) .limit(200))
def articles(self, limit=50, offset=0): return (Article.select(Article, UserArticle.score) .distinct() .join(UserSubscription, on=(UserSubscription.subscription == Article.source)) .join(ArticleTopic, on=(ArticleTopic.article == Article.id)) .join(UserTopic, on=((UserTopic.topic == ArticleTopic.topic) & (UserTopic.user == UserSubscription.user))) .join(UserArticle, on=(UserArticle.article == Article.id)) .where(UserTopic.user == self, UserArticle.user == self, UserArticle.state >> None) .order_by(UserArticle.score.desc()) .limit(limit) .offset(offset))
def get(self, article_key=None, errors=None): article = Article.get(article_key) if article is None or not article.is_active: return self.error(404) base_rev_key = self.request.get('base') base_rev = None if base_rev_key: base_rev = ArticleRevision.get(base_rev_key) if base_rev is None: base_rev = article.get_current_revision() content = base_rev.content return self.render('revision_add.html', locals())
def get(self,artkeynamestr): art = Article.get_by_key_name(artkeynamestr.strip()) if art: chats = art.chat_set chats.order('postdate') chatstr = '' for i in range(chats.count()): chatstr += self.renderchat(chats[i],str(i)) self.render('chats.html',{'chatstr':chatstr,'art':art}) else: self.error(404)
def post(self, article_key=None): article = Article.get(article_key) if article is None or not article.is_active: return self.error(404) rev_key = self.request.get('revision_key') if not rev_key: return self.error(404) rev = ArticleRevision.get(rev_key) if rev is None or rev.article.key() != article.key(): return self.error(401) article.current_rev_key = str(rev.key()) article.put() return self.redirect(article.href())
def domain_today(): data = Domain.get_all() result = [] total = 0 for item in data: article_new = Article.count(item['domain'], today=True) result.append({ 'domain': item['domain'], 'article_num': article_new }) total += article_new result = sorted(result, key=lambda k: k['article_num'], reverse=True) return jsonify(article_total=total, data=result)
def get(self,articlekey=None): article = Article.get_by_key_name(unicode(articlekey).strip()) if not article: self.redirect('/admin/error') article.keyname = article.key().name() article.tagname = MukioTools.tagname(article.classify) article.postdate += timedelta(hours=+8) article.tagstring = ' '.join(article.tags) videos = article.video_set videos.order('postdate') def addkey(i): i.keyname = i.key().name() return i self.render('admin/article_edit.html',{'title':u'编辑主题','article':article,'videos':map(addkey,videos)})
def get(self, article_key=None): article = Article.get(article_key) if article is None or not article.is_active: return self.error(404) revision = None rev_key = self.request.get('rev', '') if rev_key: revision = ArticleRevision.get(rev_key) if not revision: revision = article.get_current_revision() return self.render('article_view.html', locals())
def post(self): uid = self.get_argument("uid",10000000) Class=self.get_argument("Class",None) kid = self.get_argument("kid",None) title = self.get_argument("title","无题") content=self.get_argument("content",None) label = self.get_argument("label","") date = time.strftime("%Y-%m-%d %H:%M:%S",time.localtime()) if content is not None: Article.add_one_article(Class,uid,kid,kid,"1",label,title,content,"none",date) if Class=="story": User.add_one_nstory(uid) if Class=="card": User.add_one_ncard(uid) if Class=="free": User.add_one_nfree(uid) self.redirect("/"+Class) elif Class == "story": self.redirect("/write?sid="+str(kid)) elif Class=="card": self.redirect("/write?cid="+str(kid)) else: self.redirect("/write")
def get(self): uname = self.get_current_user() user = User.get_user_by_name(uname) '''every recommendation''' reArticles = Article.get_all(200) Rarticle = sorted(reArticles,BaseHandler.rank) if len(Rarticle)>6: Rarticle=Rarticle[:6] '''best today''' date = time.strftime("%Y-%m-%d",time.localtime()) beArticles = Article.get_by_date(date,200) Barticle = sorted(beArticles,BaseHandler.rank) '''get suiyue circle''' if len(user)>0: circle = Article.get_article_by_user_focus(user[0].uid, 25) else: circle = Article.get_all(200) if len(circle)>100: circle = circle[:100] '''get suiyou''' Ruser = User.get_all_user(100) if len(Ruser)>9: Ruser=Ruser[:9] self.render("index.html",user=user,Rarticle=Rarticle,Barticle=Barticle,Circle=circle,Ruser=Ruser)
def test_add_article(): seed_user = User.get() article = Article.create(source=1, publish_date=datetime.now(), url='www.test.com/article', author='Tester', title='Testing test.', body='I am testing this article.') def query(): return UserArticle.get(UserArticle.user == seed_user, UserArticle.article == article) with pytest.raises(UserArticle.DoesNotExist): query() seed_user.add_article(article) assert query() is not None
def get(self,aid,prt): #artkey = aid.strip()#db.Key(str(aid).strip()) #arts = Article.all() #arts.filter('__key__=',artkey) if prt == '': prt = 0 else: prt = int(prt) art = Article.get_by_key_name(unicode(aid).strip())#arts.get() if not art: self.redirect('/articles.php') else: art.clickstatis += 1 art.put() art.keyname = art.key().name() art.tagname = MukioTools.tagname(art.classify) art.postdate += timedelta(hours=+8) videos = art.video_set if prt > videos.count(): prt = 0 videos.order('postdate') vdlinks = [] if videos.count() > 1: for i in range(videos.count()): vdlinks.append({'n':i,'ptitle':videos[i].parttitle,'selected':i == prt}) vds = videos.fetch(1,prt) vd = None if len(vds): vd = vds[0] vd.keyname = vd.key().name() tmpvars = { 'art':art, 'video':vd, 'title':art.title, 'links':vdlinks, 'part':prt, 'host':self.request.headers['host'] } self.render('videos.html',tmpvars)
def post(self): errors = [] title = self.request.get('title', '').strip() if not title: errors.append('Title cannot be empty') content = self.request.get('content', '').strip() if not content: errors.append('Content cannot be empty') if errors: return self.render('article_add.html', locals()) user = self.get_current_user() article = Article.new(user, title, content) full_article_url = urlparse.urljoin(self.request.uri, article.href()) blip = Blip.new(user, u'added article %s %s' % (article.title, full_article_url)) return self.redirect(article.href())