def create_tables(): from models import User, News, OursNews # User.create_table() News.create_table() OursNews.create_table()
def addNewsArticle(companyModel,newsSourceModel,url): tree = html.fromstring(requests.get(url).text) # Need the article title title = tree.xpath('//h1/text()')[0] # Need the text of the article textElements = tree.xpath("//*[@id='articleText']//*/text()") text = "" for t in textElements: text += t # Need the date field dateText = tree.xpath("//div[@id='articleInfo']//span[@class='timestamp']/text()")[0] date = datetime.datetime.strptime(dateText[:-4], "%a %b %d, %Y %I:%M%p").date() News.create( company=companyModel, newsSource=newsSourceModel, title=title, text=text, url=url, date = date ) print "Added: "+title
def create(request, template="news/create.html"): #if def get_slug(text, numb=0): "Create unique slug" if numb: text += "_%d" % numb s = slugify(text) try: News.objects.get(slug=s) return get_slug(text, numb + 1) except News.DoesNotExist: pass return s u = request.user if request.POST: n = News(author=u, show=False, slug=get_slug(request.POST['title'])) f = NewsForm(request.POST, instance=n) if f.is_valid(): n = f.save() return HttpResponseRedirect(n.get_absolute_url() + "show/") else: f = NewsForm() return render_to_response(template, { "form": f, }, context_instance=RequestContext(request))
def add_news(request): if request.user.is_authenticated()==False: return HttpResponse("<h2>Для виконання операції, авторизуйтесь</h2>") a = News() if request.method == 'POST': form = NewsForm(request.POST, instance=a) if form.is_valid(): title = form.cleaned_data['title'] text = form.cleaned_data['text'] # date = form.cleaned_data['date'] link = form.cleaned_data['link'] author = form.cleaned_data['author'] category = form.cleaned_data['category'] comm = form.cleaned_data['comments'] # user = form.cleaned_data['user'] n = News(title=title, text=text, date=datetime.datetime.now(), link=link, author=author, category=category, user=request.user) #n.comments.add(comm) n.save() return HttpResponseRedirect('/') else: form = NewsForm(instance=a, initial={'author': request.user }) photo1 = Photo.objects.random() photo2 = Photo.objects.random() vars = {'weblink': 'news_add.html', 'sel_menu': 'main', 'photo1': photo1, 'photo2': photo2, 'entry': get_funn(), 'form': form} calendar = embeded_calendar() vars.update(calendar) return render_to_response('index.html', vars, context_instance=RequestContext(request, processors=[custom_proc]))
def getNewsList(request): # depend on the friends depart num = request.GET['num'] allFriends = Account.objects(pk__in=request.user.friends).all() number = News.objects(author__in=allFriends).count() if int(num)*15>number: result=[] return HttpResponse(dumps(result)) result = News.objects(author__in=allFriends)[int(num)*15:int(num)*15 + 15].order_by("-time").as_pymongo() result = list(result) for news in result: del(news['_types']) del(news['_cls']) news['picture'] = endpoint + "news/getPicture?id=" + str(news['_id']) news['voice'] = endpoint + "news/getVoice?id=" + str(news['_id']) uid = news['author'] user = News.objects(author=uid).first().author gid = news['good'] good = News.objects(good=gid).first().good news['good'] = endpoint + "goods/getGoods?id=" + str(good.pk) news['author'] = {"portrait": endpoint + "users/getPortrait?id=" + str(user.pk), "name": user.username} news['comments'] = endpoint + "news/getComments?id=" + str(news['_id']) # news['_id'] = endpoint + "news/getNewsDetail?id=" + str(news['_id']) del(news['_id']) news['time']=str(news['time']) result = dumps(result) return HttpResponse(result)
def delete_news(news_id): News.objects(id=news_id).delete() response_object = { 'status': 'success', 'news_id': news_id, } return jsonify(response_object), 200
def get_news(): """returns JSON with news model""" # checks is a specific company / stock symbol has been requested if request.json: # checks that a stock_symbol was provided with JSON stock_symbol = request.json.get('stock_symbol') if stock_symbol: # request for stock object returned_stock_details = Stock.query.get_or_404(stock_symbol) if returned_stock_details: # get news for stock returned_news = News.get_news(stock_symbol) # if no news was returned if returned_news is False: return jsonify(news='no news obtained') # create a list with each item is a dictionary which is able to be jsonify'ed all_news = [news.serialize_news() for news in returned_news] return jsonify(news=all_news) # if no stock symbol passed as json returned_news = News.get_news() if returned_news is False: return jsonify(news='no news obtained') all_news = [news.serialize_news() for news in returned_news] return jsonify(news=all_news)
def get_news_search_queryset(city, q): try: sphinx.SetFilter('city', [int(city)], exclude=0) search_result = sphinx.Query(q, 'article', '') sphinx.ResetFilters() items = [] for match in search_result['matches']: news = News() news.id = match['id'] for key, value in match['attrs'].iteritems(): setattr(news, key, value) items.append(news) except: error = sphinx.GetLastError() raise Exception('search', error) #queryset = News.objects.filter(is_displayed=True, city=city, title__icontains=q).order_by('-date_added') if items and len(items) > 0: photos = NewsPhoto.objects.filter(display_order=0, subnews_id__isnull=True) photos_dict = {} for p in photos: photos_dict[p.news_id] = p for item in items: item.photo = photos_dict[item.id] if item.id in photos_dict.keys() else None return items else: return []
def createDB(): print "Creating the initial db" Company.create_table() NewsSource.create_table() News.create_table() Price.create_table() OpinionAPI.create_table() OpinionAPIResponse.create_table()
def news_delete(id: int): if not auth.is_authorized(): return redirect('/login') news = News.query.filter_by(id=id).first() if news.user_id != auth.get_user().id: abort(403) News.delete(news) return redirect('/news')
def delete(self, id): if not self._auth.is_authorized(): abort(401) news = News.query.filter_by(id=id).first() if news.user_id != self._auth.get_user().id: abort(403) News.delete(news) return jsonify({"deleted": True})
def add_news(self): new_news = News(news_title=self.title, news_text=self.text) if self.title and self.text: new_news.save_to_db() return 'ok' else: return "Произошла ошибка!"
def project(news_id): news = News.select().where( News.category == 'PROJECT' ).order_by(News.time.desc()).limit(10) project = News.select().where( News.id == news_id ).get() return render_template('articles/project.html', news=project, project_list = news)
def tags(tag, page=1): page = int(page) count_news = News.objects(category=tag).count() posts = News.objects(category=tag).order_by('-date').skip((page - 1) * NEWS_ON_PAGE).limit(NEWS_ON_PAGE) pagination = Pagination(page=page, per_page=NEWS_ON_PAGE, total=count_news, record_name='News', format_total=True, format_number=True) return render_template("news.html", posts=posts, pagination=pagination, title="Cryptocurrency news today")
def news_create_form(): if not auth.is_authorized(): return redirect('/login') form = NewsCreateForm() if form.validate_on_submit(): title = form.title.data content = form.content.data News.add(title=title, content=content, user=auth.get_user()) return redirect('/') return render_template('news-create.html', title='Создать новость', form=form)
def news_form(): form = NewsForm() if request.method == 'POST' and form.validate: # print(form, form.title.data) title = request.form['title'] description = request.form['description'] image_path = save_file(form.news_image.data) news = News(title, image_path, description) news.save() return render_template('card_form.html', form=form)
def post(self, request): title = request.POST['title'] shortNews = request.POST['shortNews'] try: current_user = request.user user = AuthUser.objects.get(id=current_user.id) datatime = timezone.localtime(timezone.now()).strftime('%d %b %Y') news = News(title=title,datatime=datatime,shortnews=shortNews,auth_user =user) news.save() return JsonResponse({'response': 'OK'}) except DatabaseError: return JsonResponse({'response': 'FAIL'})
def events(news_id): news = News.select().where( News.id == news_id ).get() #news_list news_list = News.select().where( News.category == 'ACTIVITY' ).order_by(News.time.desc()).limit(10) #print news.title return render_template('articles/events.html', news = news, events_list = news_list)
def notification(): news = News.select().where( News.category == 'NEWS' ).order_by(News.time.desc()).limit(6) events = News.select().where( News.category == 'ACTIVITY' ).order_by(News.time.desc()).limit(6) #展示最新的6个新闻摘要news_list和活动摘要events_list return render_template('notification.html', news_list = news, events_list = events)
def news_post(): form = NewsForm() if form.validate_on_submit(): news = News(view = 0, title = form.title.data,url = form.url.data,hot = False) try: news.put() flash(u'저장 성공', 'success') return redirect(url_for('new_list')) except CapabilityDisabledError: flash(u'App Engine Datastore is currently in read-only mode.', 'failure') return redirect(url_for('new_list')) return render_template('news_post.html', form=form,title= request.args.get('title'), url= request.args.get('url'))
def news(news_id): news = News.select().where( News.id == news_id ).get() #news_list news_list = News.select().where( News.category == 'NEWS' ).order_by(News.time.desc()).limit(10) #最新的10个新闻news_list 及指定的新闻文章news return render_template('articles/news.html', news = news, news_list = news_list)
def news_post(): form = NewsForm() if form.validate_on_submit(): news = News(view = 0, title = form.title.data,url = form.url.data,hot = False) try: news.put() search.Index(name=_INDEX_NAME).add(create_doc(news.key.id(),news.title,news.url,news.post_time)) flash(u'저장 성공', 'success') return redirect(url_for('new_list')) except CapabilityDisabledError: flash(u'App Engine Datastore is currently in read-only mode.', 'failure') return redirect(url_for('new_list')) return render_template('news_post.html', form=form,title= request.args.get('title'), url= request.args.get('url'),counter=counter.load_and_get_count("view"),acc=ACC)
def add_news(req): check = News.objects.filter(link=req.POST['link']) mnews = "" if check.count() != 0: # record already exist if check[0].time > int(req.POST['time']): return HttpResponse(json.dumps({'status': 'OK'}, indent=4), content_type='application/json') mnews = check[0] else: mnews = News() mnews.title = req.POST['title'] mnews.content = req.POST['content'] mnews.link = req.POST['link'] mnews.time = int(req.POST['time']) mnews.save() keywords = json.loads(req.POST['keywords']) refers = json.loads(req.POST['refers']) for word in keywords: mkeywords = Keywords() mkeywords.news = mnews mkeywords.words = word mkeywords.save() for refer in refers: mrefer = Refers() mrefer.news = mnews mrefer.refer = refer mrefer.save() return HttpResponse(json.dumps({'status': 'OK'}), content_type='application/json')
def index(): notifications = News.select().where( News.category == 'NOTIFICATION' ).order_by(News.time.desc()).limit(5) news = News.select().where( News.category == 'NEWS' ).order_by(News.time.desc()).limit(6) pics = News.select().where( News.category == 'PICTURE' ) pic_num = pics.count() #通知与公告 notification 热门新闻 news return render_template('index.html', notification_list = notifications, news_list = news, pics=pics, pic_num=pic_num)
def post(self, *args): cat_id = args[0] news_title = self.get_argument("news-title") news_body = self.get_argument("news-body") news_date = self.get_argument("news-date") au_id = self.GetAuthorIdByEmail(self.current_user) News.create( title=news_title, body=news_body, date=news_date, author=au_id, category=cat_id, ) self.write("Item saved...") self.redirect("/category")
def post(self): news = News() news.title = self.request.get("title") news.slug_title = self.request.get("slug_title") news.content = self.request.get("content") news.when_published = datetime.utcnow() news.put() self.response.out.write(news.to_json("title", "is_deleted", "is_active", "is_starred"))
def populate_news(request): data = get_from_hackernews() for dx in data: try: n = News.objects.get(headline=dx['news']) except: n = News(headline=dx['news'], source=dx['url'], points=dx['pts'], author=dx['user']) n.save() response = json.dumps({'data': data}) return HttpResponse(response)
def add_news(self): new_news = News(news_title=self.title, news_text=self.text) try: app.session.add(new_news) app.session.commit() except: return "Произошла ошибка!"
def news_list(): #news_list news_list = News.select().where( News.category == 'NEWS' ).order_by(News.time.desc()).limit(20) return render_template('articles/n_list.html', news_list = news_list)
def index(request): if users.get_current_user(): url = users.create_logout_url("/") url_linktext = 'Logout' username = users.get_current_user() else: url = users.create_login_url("/") url_linktext = 'Login' username = '' #if 'guestbook_name' in request.GET: guestbook_name = request.GET.get('guestbook_name', DEFAULT_GUESTBOOK_NAME) #else: # guestbook_name=DEFAULT_GUESTBOOK_NAME # Ancestor Queries, as shown here, are strongly consistent with the High # Replication Datastore. Queries that span entity groups are eventually # consistent. If we omitted the ancestor from this query there would be # a slight chance that Greeting that had just been written would not # show up in a query. greetings_query = Greeting.query( ancestor=guestbook_key(guestbook_name)).order(-Greeting.date) greetings = greetings_query.fetch(10) news = News.query().order(-News.date).fetch(5) template_values = { 'url': url, 'url_linktext': url_linktext, 'username': username, 'greetings': greetings, 'guestbook_name': guestbook_name, 'news': news, } return render_to_response('index.html', template_values)
def news_release(): user = g.user # new = News.query.all() category = Category.query.all() if request.method == 'POST': resp = {} title = request.form.get('title') category_id = request.form.get('category_id') digest = request.form.get('digest') # 摘要 index_image = request.files['index_image'] image_name = photos.save((index_image)) # 上传图片 print(image_name) image_url = 'static/upload/' + image_name content = request.form.get('content') userid = User.query.filter(User.id == user.id).first() news = News(title=title, category_id=category_id, digest=digest, index_image_url="/" + image_url, content=content, user_id=userid.id) db.session.add(news) resp = {'errno': RET.OK, 'errmsg': error_map[RET.OK]} return jsonify(resp) data = {'categories': category} return render_template('news/user_news_release.html', data=data)
def get_list(request): search_list = request.GET['search'] # 我 news_list, ptt_list = News().getNews(search_list) # return render(request,'post_list.html',{}) return render_to_response('post_list.html', locals())
def post(self): news_id = self.request.get_range('news_id') news = News.get_by_id(news_id) if not news: self.abort(400) if news.status == STATUS_ACTIVE: news.deactivate()
def get_news_content(url): html = download(url) soup = BeautifulSoup(html,"html.parser") # print soup.prettify("utf-8") print soup.find(attrs={"class":"pg-headline"}) title = soup.find(attrs={"class":"pg-headline"}) print soup.find(attrs={"class":"metadata__byline__author"}) author = soup.find(attrs={"class":"metadata__byline__author"}) print soup.find(attrs={"class":"update-time"}) update_time = soup.find(attrs={"class":"update-time"}) contents = soup.find_all(attrs={"class":"zn-body__paragraph"}) content = "" for i in contents: print i.text content += i.text #存入mongoDB session = Session.connect('runoob') #session.clear_collection(News) news = News(title=str(title), author=str(author), update_time=str(update_time),content=str(content),url=str(url), site=str("http://edition.cnn.com/")) print news.title session.save(news) print '查询结果' result = session.query(News).skip(3).limit(2) for news in session.query(News).skip(3).limit(2): print news.title,news.update_time
def update_sentiment_info_on_existing_news(): for n in News.select().where(News.word_count.is_null()): info = sentence_info(n) n.sent_score = info["sent_score"] n.word_count = info["word_count"] n.words = json.dumps(info["words"]) n.save()
def test_get_news_model_no_stock(self): """test adding news model""" # returns a list of 20 news objects articles = News.get_news() # tests random selection of articles that data is returned self.assertIsNotNone(articles[0].headline) self.assertIsNotNone(articles[5].headline) self.assertIsNotNone(articles[10].headline) self.assertIsNotNone(articles[19].headline) self.assertIsNotNone(articles[0].category) self.assertIsNotNone(articles[5].category) self.assertIsNotNone(articles[10].category) self.assertIsNotNone(articles[19].category) self.assertIsNotNone(articles[0].summary) self.assertIsNotNone(articles[5].summary) self.assertIsNotNone(articles[10].summary) self.assertIsNotNone(articles[19].summary) self.assertIsNotNone(articles[0].id) self.assertIsNotNone(articles[5].id) self.assertIsNotNone(articles[10].id) self.assertIsNotNone(articles[19].id)
def news_details_edit(): info = {} if request.method == "POST": info = request.values news_id = int(info.get("news_id")) if info.get("news_id") else 0 small_pic = info.get("small_pic", "") subtitle = info.get("subtitle", "") title = info.get("title", "") author = info.get("author", "") content = info.get("content", "") desc = info.get("desc", "") read_num = int(info.get("read_num")) if info.get("read_num") else 0 cur_time = int(time.time()) content = content.replace("\n", '').replace("'", '"') tdate = tt.today() if news_id == 0: # 新增news last_weight = News.get_news_weight() weight = 0 # weight = last_weight + 1 addstatus = News.add_news(title=title, subtitle=subtitle, author=author, read_num=read_num, tdate=tdate, cur_time=cur_time, desc=desc, content=content, weight=weight, small_pic=small_pic) if addstatus: return jsonify(message=addstatus, status=0) elif news_id > 0: editstatus = News.edit_news(title=title, subtitle=subtitle, author=author, read_num=read_num, cur_time=cur_time, desc=desc, content=content, news_id=news_id, small_pic=small_pic) if editstatus: return jsonify(message=editstatus, status=0) else: pass return jsonify(message="success", status=1)
def get_news_info(news_file, session, is_diff_file): title = '' content = '' is_exists_id = False already_get_id = False for line in news_file: if inspect_josn_format(line) and line.startswith('{'): already_get_id = True is_exists_id = False title = '' content = '' meta = json.loads(line) id = meta['id'] create_time = date.fromtimestamp(int( meta['created_at'])).strftime("%Y-%m-%d") try_get = session.query(News).filter_by(news_id=id).first() if try_get and is_diff_file: is_exists_id = True # if not try_get.inspect_expiration_date(create_time): session.query(News).filter_by( news_id=id).first().update_changed_count() elif (not try_get) and (not is_diff_file): url = meta['url'] source_media = media_list[int(meta['source'])] else: already_get_id = False print('Error item!') else: if already_get_id == False: continue if is_exists_id == True: if content == '' and title != '': content = line already_get_id = False if not (("404" in content) or ("404" in title)): session.query(News).filter_by(news_id=id).update({ 'title': title, 'content': content }) if title == '': title = line else: if content == '' and title != '': content = line already_get_id = False news_add = News(news_id=id, url=url, title=title, source_media=source_media, content=content, create_time=create_time) session.add(news_add) session.commit if title == '': title = line return session
def get_context_data(self, **kwargs): list =[] news =News.objects.all().order_by('id').reverse()[:5] if news.count() <=0: list.append(News(id=0,title="Dodaj artykul",shortnews="test",datatime="",imageurl="")) news = list return {'news':news}
def get(self): news = News.query().order(-News.start).fetch() for new in news: new.created_str = new.created.strftime(STR_DATETIME_FORMAT) utc_time = datetime.utcnow().strftime(STR_DATETIME_FORMAT) self.render('/notifications/news_list.html', news=news, utc_time=utc_time, NOTIFICATION_STATUS_MAP=NOTIFICATION_STATUS_MAP, STATUS_ACTIVE=STATUS_ACTIVE, STATUS_CREATED=STATUS_CREATED)
def post(self, *args): news_id = args[0] news_info = News.select().where(News.id == news_id).get() news_info.title = self.get_argument("title") news_info.body = self.get_argument("body") news_info.date = self.get_argument("date") news_info.save() self.write("Saved Changes.")
def get(self, **kwargs): context = { 'all_news': News.all().order('-date'), } context = request_context(context) return render_response('news.html', **context)
def get(self): if users.get_current_user(): logout_url = users.create_logout_url('/') else: logout_url = None news = News.get_latest(10) response = render_template('index.html', logout_url=logout_url, news=news) self.response.out.write(response)
def admin_news_uploads(): form = UploadNewsForm() if form.validate_on_submit(): news = News() db.session.add(news) db.session.commit() news.title = form.title.data news.content = form.content.data news.author = current_user.username file_name = secure_filename(form.image.data.filename) extension = file_name.split('.')[-1] file_name = '.'.join([str(news.id), extension]) path = upload_s3(file_name, form.image.data, app.config['S3_NEWS_IMAGE_DIR']) news.image = path db.session.commit() return redirect(url_for('news_and_resources'))
def post(self): type = self.request.get('type', '') if type == 'stats': pass elif type == 'cleanup': last_year = datetime.datetime.now() - datetime.timedelta(days=365) last_quarter = datetime.datetime.now() - datetime.timedelta( days=92) last_month = datetime.datetime.now() - datetime.timedelta(days=31) # Old news old_news = News.query(News.date < last_quarter).order( News.date).fetch(500, keys_only=True) #logging.info('Cleaning up old news %s' % News.query().order(News.date).count(100,keys_only=True)) ndb.delete_multi(old_news) elif type == 'tag_cloud': channel_urls = [] tags = {} extras = Extra.query(Extra.tag != None) for extra in extras: if extra.channelurl not in channel_urls: channel_urls.append(extra.channelurl) tag = extra.tag if tag in tags: tags[tag] += 1 else: tags[tag] = 1 tags_sorted = sorted(tags.iteritems(), key=operator.itemgetter(1), reverse=True) memcache.set("tag_cloud", tags_sorted) logging.debug('Tags: %s' % (tags_sorted)) elif type == 'fix': test_channel = '#kanava' channel = Channel.query(Channel.name == test_channel).get() channelurls = ChannelUrl.query( ChannelUrl.channel == channel.key).fetch(50) for channelurl in channelurls: url = channelurl.url.get() logging.debug('Channel: %s, channelurl: %s (id %s)' % (test_channel, url, channelurl)) posts = Post.query(Post.channelurl == channelurl.key) for post in posts: logging.debug(' * posted by %s' % (post.user)) post.key.delete() rates = Rate.query(Rate.channelurl == channelurl.key) for rate in rates: logging.debug(' * rate %s' % (rate)) rate.key.delete() extras = Extra.query(Extra.channelurl == channelurl.key) for extra in extras: logging.debug(' * extra %s, by %s' % (extra, extra.user)) extra.key.delete() channelurl.key.delete()
def get_news(): # Initialize the list results = [] # Iterate each keyword of the list for keyword in keywords: start = random.randrange(0, 180, 10) # Asign to the URL the keyword to search URL = "https://google.com/search?q=" + keyword + "&start=" + str(start) search = requests.get(URL, headers=HEADERS) # If the search has a 200 status we parse the content found if search.status_code == 200: response = BeautifulSoup(search.content, "html.parser") # In the response we find the common class rc and their anchors for new in response.find_all('div', class_='rc', limit=3): anchors = new.find_all('a') if anchors: # Get the content of anchor: href reference = anchors[0]['href'] # Search in database if the reference is current saved find_reference = News.query.filter_by( reference=reference).first() # If the reference is not saved in database we extract the data of the search if not find_reference: # Get the text of the results, like title and excerpt of the post/article according to specific tags and class title = new.find('h3').text excerpt = new.find('span', class_='st').text news = { "keyword": keyword, "title": title, "reference": reference, "excerpt": excerpt } # Join content of the excerpt and title, and pass to lowercase to ensure a better count of frequency words full_text = news['excerpt'].lower( ) + news['title'].lower() text = full_text.split(" ") FreqDistBody = FreqDist(text) frequency = FreqDistBody[keyword] # Adding a new key value pair to news news.update({'frequency': frequency}) results.append(news) # Store in database the results of the search new = News.create(news['title'], news['excerpt'], news['reference'], news['keyword'], news['frequency']) return jsonify(sorted(results, key=lambda k: k['frequency'], reverse=True))
def add_news(req): check = News.objects.filter(link=req.POST['link']) mnews = "" if check.count() != 0: # record already exist if check[0].time > int(req.POST['time']): return HttpResponse(json.dumps({'status':'OK'},indent=4),content_type='application/json') mnews = check[0] else: mnews = News() mnews.title = req.POST['title'] mnews.content = req.POST['content'] mnews.link = req.POST['link'] mnews.time = int(req.POST['time']) mnews.save() keywords = json.loads(req.POST['keywords']) refers = json.loads(req.POST['refers']) for word in keywords: mkeywords = Keywords() mkeywords.news = mnews mkeywords.words = word mkeywords.save() for refer in refers: mrefer = Refers() mrefer.news = mnews mrefer.refer = refer mrefer.save() return HttpResponse(json.dumps({'status':'OK'}),content_type='application/json')
def getNewsDetail(request): nid=request.GET['id'] news=News.objects(pk=nid).first().to_mongo() del(news['_types']) del(news['_cls']) news['picture'] = endpoint + "news/getPicture?id=" + str(news['_id']) news['voice'] = endpoint + "news/getVoice?id=" + str(news['_id']) uid = news['author'] user = News.objects(author=uid).first().author gid = news['good'] good = News.objects(good=gid).first().good news['good'] = endpoint + "goods/getGoods?id=" + str(good.pk) news['author'] = {"portrait": endpoint + "users/getPortrait?id=" + str(user.pk), "name": user.username} news['comments'] = endpoint + "news/getComments?id=" + str(news['_id']) # news['_id'] = endpoint + "news/getNewsDetail?id=" + str(news['_id']) del(news['_id']) news['time']=str(news['time']) return HttpResponse(dumps(news))
def getCommentVoice(request): vid=request.GET['vid'] a=News.objects(comments__voice=ObjectId(str(vid))).first() comments=a.comments voice=None for comment in comments: mongo=comment.to_mongo() if str(mongo['voice'])==str(vid): voice=comment.voice return HttpResponse(voice.read(),mimetype="audio/mpeg")
def fetch(link, dt): try: news = News.objects.get(url=link) news = None print ">>", "exists", link except DoesNotExist, e: print ">>", "fetch", link try: res = requests.get(link, timeout=3) print ">>", "get done", link res.encoding = res.apparent_encoding if not "text" in res.headers.get("content-type", "none"): return if res.apparent_encoding is not None: html = unicode(res.content, res.apparent_encoding) news = News(url=link, html=html, published_at=dt) news.save() news = None except UnicodeDecodeError, e: print ">>", "error", link, e
def post(self, request): try: api_news_ids = request.data top_news_id_list = [] for news_id in api_news_ids: row = News.objects(news_id=news_id).first() if not row: top_news_id_list.append(news_id) return Response({"data": top_news_id_list, "statusCode" : 200}) except Exception, e: log.debug(str(e) + " IN TopNewsIdApi ") return Response({"data": "failure", "statusCode" : 404})
def post(self, request): try: news_detail_list = request.data if news_detail_list: for news_details in news_detail_list: news = News( news_id=long(news_details["news_id"]), username=news_details["username"], title=news_details["title"], url=news_details["url"] if "url" in news_details else '', score=int(news_details["score"]) if "score" in news_details else 0, description=news_details["description"] if "description" in news_details else '', descendants=int(news_details["descendants"]) if "descendants" in news_details else 0, confidence=float(news_details["confidence"]) if "confidence" in news_details else 0, sentiment=news_details["sentiment"] if "sentiment" in news_details else '', ) news.save() return Response({"statusCode" : 200}) else: return Response({"statusCode" : 505, "message" : "wrong type of arguments : " + news_detail_list}) except Exception, e: log.debug(str(e) + " IN SaveDetailsApi ") return Response({"statusCode" : 404, "message" : "Exception occured while saving : " + str(e)})
def addNews(request): url=request.POST['url'] news = News() user = request.user # change to request.user news.author = user news.picture = open(request.FILES['picture'].temporary_file_path(), "rb") news.voice = open(request.FILES['voice'].temporary_file_path(), "rb")#change to data from request result=urlparse.urlparse(url) params=urlparse.parse_qs(result.query,True) gid=params['id'] news.good = Goods.objects(pk=gid[0]).first() news.save() return HttpResponse("success")
def post(self, request): try: top_news_ids = request.data top_news_list = [] for news_id in top_news_ids: a = News.objects(news_id=news_id).first() if a: news_details_json = {"news_id": a.news_id, "username": a.username, "title": a.title, "url": a.url, "score": a.score, "description": a.description, "descendants": a.descendants, "confidence": a.confidence, "sentiment": a.sentiment} top_news_list.append(news_details_json) return Response({"data": top_news_list, "statusCode" : 200}) except Exception, e: log.debug(str(e) + " IN DisplayNewsApi ") return Response({"statusCode" : 404})
def post(self, request): try: news_details = request.data news_details = json.dumps(news_details) news_details = ast.literal_eval(news_details) news_details = news_details.keys()[0] news_details = ast.literal_eval(news_details) news = News( news_id=long(news_details["news_id"]), username=news_details["username"], title=news_details["title"], url=news_details["url"] if "url" in news_details else '', score=int(news_details["score"]) if "score" in news_details else 0, description=news_details["description"] if "description" in news_details else '', descendants=int(news_details["descendants"]) if "descendants" in news_details else 0, confidence=float(news_details["confidence"]) if "confidence" in news_details else 0, sentiment=news_details["sentiment"] if "sentiment" in news_details else '', ) news.save() return Response({"status": "success"}) except Exception, e: log.debug(str(e) + " IN SaveDetailsApi ") return Response({"status": "failure"})
def getComments(request): nid=request.GET['id'] news=News.objects(pk=nid).as_pymongo() comments=list(news)[0]["comments"] for comment in comments: del(comment["_types"]) del(comment["_cls"]) uid=comment["author"] user = Account.objects(pk=uid.id).first() if comment["voice"]!=None: comment["voice"]=endpoint + "news/getCommentVoice?vid="+str(comment["voice"]) else: comment["voice"]="" comment["username"]=user.username comment["portrait"]=endpoint+"users/getPortrait?id="+str(user.pk) del(comment["author"]) return HttpResponse(dumps(comments))
def addComment(request): result=urlparse.urlparse(request.POST['comment_url']) params=urlparse.parse_qs(result.query,True) nid=params['id'] comment = Comment() comment.content ="" if request.POST.has_key("content"): content = request.POST['content'] comment.content = content if request.FILES.has_key("voice"): voice =open(request.FILES['voice'].temporary_file_path(), "rb") # change to data from request comment.voice = voice comment.author = request.user news = News.objects(pk=nid[0]).first() news.comments = news.comments + [comment] news.save() return HttpResponse("success")
def post(self, request): try: top_news_ids = request.data top_news_ids = json.dumps(top_news_ids) top_news_ids = ast.literal_eval(top_news_ids) top_news_ids = top_news_ids.keys()[0] top_news_ids = ast.literal_eval(top_news_ids) top_new_news_id_list = [] for news_id in top_news_ids: news_id = long(news_id) a = News.objects(news_id=news_id).first() # News.objects(news_id=news_id).delete() if not a: top_new_news_id_list.append(news_id) return Response({"new_news_id": top_new_news_id_list}) except Exception, e: log.debug(str(e) + " IN TopNewsIdApi ") return Response({"status": "failure"})