def test_show_news(self): news = News(title="News", short_description="Short Description", description="Description1") news.put() path = "/news/" + str(news.key().id()) response = self.testapp.get(path) self.assertEqual(200, response.status_int)
def get(self, tags, page): if tags != 'all': tags = tags.split('+') news_list = News.objects(tags__all=tags).exclude( 'content', 'comments').order_by( '-date')[10 * page: 10 * (page + 1)] else: news_list = News.objects().exclude('content', 'comments').order_by( '-date')[10 * page: 10 * (page + 1)] return news_list_serialize(news_list)
def post(self): news_id = int(self.get_argument('id', 0)) category_id = int(self.get_argument('category_id', 0)) title = self.get_argument('title', '') content = self.get_argument('content', '') create_uid = self.get_secure_cookie('admin_user_id') status = int(self.get_argument('status', 0)) if not category_id: response = {'code': 110, 'msg': self.error_message['110']} return self.write(json_encode(response)) if not title: response = {'code': 111, 'msg': self.error_message['111']} return self.write(json_encode(response)) if not content: response = {'code': 112, 'msg': self.error_message['112']} return self.write(json_encode(response)) if not create_uid: response = {'code': 113, 'msg': self.error_message['113']} return self.write(json_encode(response)) result = News.update(news_id, category_id, title, content, create_uid, status) if result: response = {'code': 0, 'msg': '添加成功'} return self.write(json_encode(response)) else: response = {'code': 114, 'msg': self.error_message['114']} return self.write(json_encode(response))
def get(self, *args): news = News.get_by_id(long(args[0])) if news: news.delete() self.redirect('/news')
def post(self): args = newsParser.parse_args() id = args['id'] title = args['title'] abstract = args['abstract'] news_pic = args['news_pic'] content = args['content'] tags = request.json['tags'] if id is None or title is None: abort(400) news = News.objects(id=id).exclude('comments').first() if news is None: abort(400) news.title = title news.abstract = abstract news.news_pic = news_pic news.content = content news.tags = tags news.save() cache.delete(title) return news_serialize(news)
def post(self): args = searchParser.parse_args() search = args['search'] tags = request.json['tags'] page = args['page'] if tags is None or len(tags) == 0: news_list = News.objects().exclude( 'content', 'comments').order_by('-date') else: news_list = News.objects(tags__all=tags).exclude( 'content', 'comments').order_by('-date') if search is not None and search is not '': news_list = news_list.filter(title__icontains=search) return news_list_serialize(news_list[10 * page: 10 * (page + 1)])
def get(self, news_id): news = News.news_get(news_id) if news: self.render(news=news) else: self.template = '404.html' self.render()
def get(self, catagory=0): spec = dict() if catagory: spec.update(catagory=int(catagory)) news_list = News.news(spec=spec) self.render(news_list=news_list)
def test_new_news(self): params = { 'title': "Event", 'short_description': "Short description", 'description': "Description" } self.testapp.post('/news/new', params) self.assertEqual(1, News.all().count())
def post(self): news = News() news.title = self.request.get("title") news.short_description = self.request.get("short_description") news.description = self.request.get("description") news.created_at = datetime.now().date() if self.request.get("image"): news.image = db.Blob(images.resize(self.request.get("image"), 300)) news.put() self.redirect("/news")
def _get_item(self, soup): li_item_list = soup.findAll('li') for li_item in li_item_list: if li_item.find('div'): if li_item.find('div').find('h4'): href = li_item.find('div').find('h4').find('a').get('href') news = News( str(li_item.find('div').find('h4').find('a').string), self.dns + href.replace('../', ''), '') self.news_list.append(news)
def put(self): args = newsParser.parse_args() title = args['title'] abstract = args['abstract'] news_pic = args['news_pic'] content = args['content'] tags = request.json['tags'] if title is None: abort(400) try: news = News(title=title, abstract=abstract, news_pic=news_pic, content=content, tags=tags) news.save() except: print title abort(400) return news_serialize(news)
def post(self, ident): news = News.get_by_id(long(ident)) news.title = self.request.get("title") news.short_description = self.request.get("short_description") news.description = self.request.get("description") if self.request.get("image"): news.image = db.Blob(images.resize(self.request.get("image"), 300)) db.put(news) self.redirect("/news")
def get(self, catagory): if catagory == 'startup': catagory = const.NEWS_CATAGORY.STARTUP else: catagory = const.NEWS_CATAGORY.NEWS spec = dict(catagory=catagory) title = const.NEWS_CATAGORY_CN.get(catagory) li = News.news(spec=spec) self.render(li=li, title=title)
def _scrap(): LogUtils.start('Exame') hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) news = [] for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append(News(title, href, paragraphs, 'EXAME Notícias', 'https://abrilexame.files.wordpress.com/2019/08/logo-exame.png?w=150', publish_date)) db.save_all_news(news) LogUtils.end('Exame', hrefs)
def delete(self): args = newsParser.parse_args() id = args['id'] if id is None: abort(400) news = News.objects(id=id).first() if news is None: abort(400) news.delete() return {'status': 'success'}
def getNews(ident): news = News.get_by_id(long(ident)) jsonEventInfo = {} jsonEventInfo['title'] = news.title jsonEventInfo['short_description'] = news.short_description jsonEventInfo['description'] = news.description if news.image: jsonEventInfo['image_link'] = '/news/images/' + str(ident) news = jsonEventInfo return news
def get(self, ident): news = News.get_by_id(long(ident)) self.response.headers['Content-Type'] = 'application/json' data = { "title": news.title, "short_description": news.short_description, "description": news.description, "created_at": news.created_at.strftime('%d-%m-%Y'), "image": base64.b64encode(str(news.image)) } jsonNewsData = json.dumps(data) self.response.write(jsonNewsData)
def get(self, news_id=None): news = None if news_id: news = News.news_get(news_id) if not news: news = { 'title' : '', 'summary' : '', 'img' : 0, 'content' : '', 'catagory': 1 } self.render(news=news)
def test_delete_news(self): news = News() news.put() self.assertEqual(1, News.all().count()) path = "/news/delete/" + str(news.key().id()) self.testapp.get(path) self.assertEqual(0, News.all().count())
def get(self): page = self.get_argument('page', 1) rows = self.get_argument('rows', 20) title = self.get_argument('title', '') begin = self.get_argument('begin', '') end = self.get_argument('end', '') query = {} if title: query['title'] = title if begin: query['begin'] = begin if end: query['end'] = end offset = (int(page) - 1) * int(rows) limit = rows rows = News.gets(offset, limit, **query) rows = [obj2dict(r) for r in rows] total = News.get_count() response = {'total': total, 'rows': rows} return self.write(date_encode(response))
def _scrap(): LogUtils.start('Info Money') news = [] hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append( News( title, href, paragraphs, 'InfoMoney', 'https://is2-ssl.mzstatic.com/image/thumb/Purple123/v4/5c/df/a9/5cdfa9b4-913f-8b4d-a99d-1c6f2662061e/AppIcon-0-1x_U007emarketing-0-0-85-220-0-4.png/1200x630wa.png', publish_date)) db.save_all_news(news) LogUtils.end('Info Money', hrefs)
def _scrap(): LogUtils.start('Suno') news = [] hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append( News( title, href, paragraphs, 'Suno Notícias', 'https://www.sunoresearch.com.br/wp-content/uploads/2019/12/suno-research.jpg', publish_date)) db.save_all_news(news) LogUtils.end('Suno', hrefs)
def put(self): args = commnetParser.parse_args() title = args['title'] username = args['username'] content = args['content'] if title is None or username is None or content is None: abort(400) comment = Comment(username=username, content=content) success = News.objects(title=title).only( 'comments').update_one(push__comments=comment) if success is 0: abort(400) cache.delete(title + "_comment") return {'status': 'success'}
def get(self): args = commnetParser.parse_args() title = args['title'] if title is None: abort(400) comemnts = cache.get(title + "_comment") if comemnts is not None: return comments_serialize(comemnts) news = News.objects(title=title).only('comments').first() if news is None: abort(400) cache.set(title + "_comment", news.comments, timeout=360000) return comments_serialize(news.comments)
def get(self): args = newsParser.parse_args() title = args['title'] if title is None: abort(400) news = cache.get(title) if news is not None: return news_serialize(news) news = News.objects(title=title).exclude('comments').first() if news is None: abort(400) news.update(inc__news_views=1) cache.set(title, news, timeout=60) return news_serialize(news)
def test_edit_news(self): news = News(title="News", short_description="Short Description", description="Description1") news.put() ident = news.key().id() params = { 'title': "News", 'short_description': "Short Description", 'description': "Description2" } path = "/news/edit/" + str(news.key().id()) response = self.testapp.post(path, params) news = News.get_by_id(ident) self.assertEqual(1, News.all().count()) self.assertEqual(302, response.status_int) self.assertEqual("Description2", str(news.description))
def get(self): self.response.headers['Content-Type'] = 'application/json' query = News.all().order("-created_at") newsData = [] for news in query: data = { "id": news.key().id(), "title": news.title, "short_description": news.short_description, "description": news.description, "created_at": news.created_at.strftime('%d-%m-%Y'), "image": base64.b64encode(str(news.image)) } newsData.append(data) jsonEventsData = json.dumps(newsData) self.response.write(jsonEventsData)
def getAllNews(self): query = News.all().order("-created_at") news = [] for n in query: jsonEventInfo = {} jsonEventInfo['title'] = n.title jsonEventInfo['short_description'] = n.short_description jsonEventInfo['description'] = n.description jsonEventInfo['date'] = n.created_at.strftime('%d-%m-%Y') currentUrl = self.request.url; jsonEventInfo['delete_link'] = currentUrl + '/delete/' + str(n.key().id()) jsonEventInfo['edit_link'] = currentUrl + '/edit/' + str(n.key().id()) jsonEventInfo['direct_link'] = currentUrl + '/' + str(n.key().id()) news.append(jsonEventInfo) return news
def run_updater(): stock_dict = json.load(open('resources/stock_links.json')) stock_list = list(json.load(open('resources/stock_links.json'))) for stock in stock_dict.keys(): try: news = News.get_by_stock_code(collection="news_articles", stock_code=stock) if news is None: new_news = News(stock_code=stock) new_news.load_articles(query=stock_dict[stock]['name']) new_news.update_articles() else: news.load_articles(stock_dict[stock]['name']) news.update_articles() print("Updated: " + stock) except ConnectionFailure: print("Unable to connect Mongodb...") finally: time.sleep(10) print("Update completed...")
def create_news(self, json_news): news_list = [] for news in json_news: news_list.append(News(news)) return news_list
def get(self): news_id = self.get_argument('id', '') news = News.get(news_id) category = NewsCategory.gets() self.render(self.url, info=news, categorys=category)
if d > bdeg: bdeg = d best = world.agents[a] degree_centrality.append(best) s2 = {} for agent in degree_centrality: s2[agent] = news.name return s2 def activate_agents(agents): for agent in agents.keys(): agent.states[agents[agent]] = AgentState.ACTIVE fake_news = News(0, 0.9, 0.5) counter_news = News(1, 0.5, 0.1) news_cycle = {fake_news.name: fake_news, counter_news.name: counter_news} number_agents = 500 names_agents = [_ for _ in range(number_agents)] threshold = np.clip(np.random.normal(0.5, 0.1, number_agents), 0.0, 1.0) #mu = 0.5, sigma = 0.1 independence = np.full( number_agents, 0.1 ) #np.clip(np.random.normal(0.5, 0.1, number_agents), 0.0, 1.0) #mu = 0.5, sigma = 0.1 w = construct_world(names_agents, threshold, independence, news_cycle) fakenews_spreader = find_degreecentral_nodes(w, 1, [], fake_news) counternews_spreader = find_degreecentral_nodes( w, 5, [a for a in fakenews_spreader.keys()], counter_news) steps = 30
def post(self): o = self.json News.news_upsert(o) self.finish(o.__dict__)
def post(self): o = self.json o.__dict__.update(is_del=DEL_FLAG.TRUE) News.news_upsert(o) self.finish()
def get(self, ident): news = News.get_by_id(long(ident)) if news.image: self.response.headers['Content-Type'] = 'image/*' self.response.out.write(news.image)