def index(request): main_article = Article.get_main() popular_articles = Article.get_most_popular() newest_articles = Article.get_n_newest() categories = Category.objects.all() return render(request, 'base_content.html', {'main_article': main_article, 'popular_articles': popular_articles, 'newest_articles': newest_articles, 'categories': categories})
def populate_db(db): role_admin, role_user = db_create(db) user1, user2 = [ User(nickname=name, email="*****@*****.**" % name, pwdhash=name, roles=[role_user], activation_key="") for name in ["user1", "user2"] ] db.session.add(user1) db.session.add(user2) db.session.commit() for user in (user1, user2): for feed_name in ['feed1', 'feed2', 'feed3']: feed = Feed(link=feed_name, user_id=user.id, title="%r %r" % (user.nickname, feed_name)) db.session.add(feed) db.session.commit() for article in ['article1', 'article2', 'article3']: entry = "%s %s %s" % (user.nickname, feed.title, article) article = Article(entry_id=entry, link=article, feed_id=feed.id, user_id=user.id, title=entry, content=article) db.session.add(article) db.session.commit() db.session.commit()
def __query_database__(self, company: Company, query_value:str, set_size:int) -> QuerySet: ''' populates the dbs with news from start_date till end_date returns -- the query set of the newly added articles that are financial ''' db_key = '' query_set = NewsApiClient(api_key=db_key).get_everything( q=query_value, from_param=self.start_date.strftime('%Y-%m-%d'), language='en', sort_by='relevancy', page_size=set_size, page=1 )['articles'] #the models expect an array but the api returns a dict, hence we must convert it query_list = np.array([[ article['source']['name'],article['title'], article['description'],article['content']] for article in query_set]) fin_outcomes = self.__is_fin__(query_list) if len(fin_outcomes) ==0: #if none of what we got is financial company.article_set.filter(date__range = [self.start_date, self.today], isFin=True) sentiment_outcomes = np.zeros(len(query_list)) sentiment_outcomes[fin_outcomes] = self.__get_sentiment__(query_list[fin_outcomes]) for sentiment, is_fin, article in zip(sentiment_outcomes, fin_outcomes, query_set): try: Article(company_id = company, title = article['title'], source=article['source']['name'], date=article['publishedAt'].split('T',1)[0], subtitle=article['description'], content=article['content'], url=article['url'], isFin = is_fin, sentiment = int(sentiment)).save() except IntegrityError: continue return company.article_set.filter(date__range = [self.start_date, self.today], isFin=True)
def addArticle(request): # c={} POST方式获取 title = request.POST['title'] content = request.POST['content'] type_id = request.POST['type_id'] flag = request.POST['flag'] usercount = request.POST['user_account'] account = User.objects.get(user_account=usercount) #外键 获取发布者 article = Article(title=title, content=content, type_id=type_id, user_account=account, flag=flag, create_time=datetime.now()) article.save() return HttpResponseRedirect("/article/articleCurd")
def article(request, article_id=0): form = ArticleForm() model = None categories = Category.objects.filter(is_deleted=False).order_by('level', 'sort', 'create_time') \ .values('id', 'name', 'spell') if request.method == 'POST': form = ArticleForm(request.POST) if form.is_valid(): category_ids = ','.join( str(i['id']) for i in form.get_categories().values('id')) model = Article(title=form.get_title(), subject=form.get_subject(), cover=form.get_cover(), content=form.get_content(), category_ids=category_ids, topping=form.get_topping(), tag_ids='', hits=0, score=0) if article_id > 0: model.id = article_id Article.objects.save_new(model, request.user) CacheArticles.delete(model.id) elif article_id > 0: model = Article.objects.first(id=article_id) if model: form = ArticleForm({ 'title': model.title, 'subject': model.subject, 'content': model.content, 'cover': model.cover, 'topping': model.topping }) category_ids = model.category_ids.split(',') for category in categories: if str(category['id']) in category_ids: category['checked'] = 'checked' if request.method == 'POST' and model.id > 0: return HttpResponseRedirect('/reading/%d/' % model.id, { 'article': form, 'categories': categories }) return render(request, 'web/article.html', { 'article': form, 'categories': categories })
def articleCreator(request, idk=None): if idk: article = get_object_or_404(Article, pk=idk) else: article = Article() if request.method == 'POST': article_form = ArticleForm(request.POST, instance=article, files=request.FILES) components_idks = request.POST.getlist('component_idk') position_index = request.POST.getlist('component_position') id_and_position = zip(components_idks, position_index) if article_form.is_valid(): article = article_form.save() for idk, position in id_and_position: component = get_object_or_404(Component, pk=idk) component.position = position component.save() article.components.add(component) article.author = request.user article.save() return redirect( reverse('article_creator', kwargs={'idk': article.id})) else: article_form = ArticleForm(instance=article, files=request.FILES) else: if idk: article_form = ArticleForm(request.POST or None, request.FILES or None, instance=article) else: article_form = ArticleForm() return render( request, 'article/article_creator.html', { 'article_form': article_form, 'article': article, 'idk': article.id, 'image_upload_form': ImageForm(), 'videoForm': VideoForm(), 'paragraphForm': ParagraphForm(), 'quoteForm': QuoteForm(), })
def saveArticle(request): # c={} POST方式获取 article_id = request.POST['article_id'] title = request.POST['title'] content = request.POST['content'] type_id = request.POST['type_id'] flag = request.POST['flag'] account = User.objects.get(user_account='S02') #外键 article = Article(article_id=article_id, title=title, content=content, type_id=type_id, user_account=account, flag=flag, create_time=datetime.now()) if len(article_id) > 0: print("id不是null") article.save() return HttpResponseRedirect("/article/articleCurd")
def import_json(nickname, json_content): """ Import an account from a JSON file. """ user = User.query.filter(User.nickname == nickname).first() json_account = json.loads(json_content.decode("utf-8")) nb_feeds, nb_articles = 0, 0 # Create feeds: for feed in json_account: if (None != Feed.query.filter(Feed.user_id == user.id, Feed.link == feed["link"]).first()): continue new_feed = Feed( title=feed["title"], description="", link=feed["link"], site_link=feed["site_link"], created_date=datetime.datetime.fromtimestamp( int(feed["created_date"])), enabled=feed["enabled"], ) user.feeds.append(new_feed) nb_feeds += 1 db.session.commit() # Create articles: for feed in json_account: user_feed = Feed.query.filter(Feed.user_id == user.id, Feed.link == feed["link"]).first() if None != user_feed: for article in feed["articles"]: if (None == Article.query.filter( Article.user_id == user.id, Article.feed_id == user_feed.id, Article.link == article["link"], ).first()): new_article = Article( entry_id=article["link"], link=article["link"], title=article["title"], content=article["content"], readed=article["readed"], like=article["like"], retrieved_date=datetime.datetime.fromtimestamp( int(article["retrieved_date"])), date=datetime.datetime.fromtimestamp( int(article["date"])), user_id=user.id, feed_id=user_feed.id, ) user_feed.articles.append(new_article) nb_articles += 1 db.session.commit() return nb_feeds, nb_articles
def add_article(): if request.method == 'GET': return render_template('manage/add_article.html') elif request.method == 'POST': article = Article( content=request.form.get('fancy-editormd-html-code'), mark_code=request.form.get('fancy-editormd-markdown-code'), title=request.form.get('title'), type=request.form.get('type')) db.session.add(article) db.session.commit() return redirect(url_for('index'))
def add_article(request): if request.method == 'POST': title = request.POST['title'] if 'title' in request.POST else '' text = request.POST['text'] if 'text' in request.POST else '' if title == '': return HttpResponse(json.dumps({'status':0, 'error': 'title required'})) elif text == '': return HttpResponse(json.dumps({'status':0,'error': 'paragraphs not found'})) else: paragraphs = text.split("\n\n") print paragraphs article = Article.create(title, paragraphs) return HttpResponse(json.dumps({'status':1,'error': ''})) else: context = {} return render(request, 'article_form.html', context)
def articleDetails(request, idk=None): article = get_object_or_404(Article, pk=idk) articles = Article.objects.all()[0:5] categories = Category.objects.all() popular_articles = Article.get_most_popular() if article.components: components = article.components.all().order_by('position') return render( request, 'article/article.html', { 'article': article, 'components': components, 'articles': articles, 'popular_articles': popular_articles, 'categories': categories })
def handle(self, *args, **kwargs): time = timezone.now().strftime('%X') self.stdout.write("It's now %s" % time) logger.info("the job sheduler stared") logger.info('开始运行定时更新RSS任务') now = datetime.now() if now.hour % 4 == 0: feeds = Site.objects.filter(status='active', creator='user').order_by('-star') elif now.hour % 4 == 1: feeds = feeds = Site.objects.filter(status='active', creator='user', star__gte=50).order_by('-star') elif now.hour % 4 == 2: feeds = Site.objects.filter(status='active', creator='user', star__gte=20).order_by('-star') elif now.hour % 4 == 3: feeds = Site.objects.filter(status='active', creator='user', star__gte=9).order_by('-star') feeds = Site.objects.filter(status='active', creator='user', star__gte=9).order_by('-star') for site in feeds: logger.info(f"RSS源`{site.rss}") feed_obj = feedparser.parse(site.rss) logger.info('定时更新RSS任务运行结束') for entry in feed_obj.entries[:10]: try: title = entry.title link = entry.link #logger.info(f"RSS源`{title}") except AttributeError: logger.warning(f'必要属性获取失败:`{site.rss}') continue # if is_crawled_url(link): # continue try: author = entry['author'][:11] logger.info(f"RSS源数据author`{author}") except: author = None #logger.info(f"RSS源数据author`{author}") try: value = entry.content[0].value except: value = entry.get('description') or entry.link try: article = Article(site=site, title=title, author=author, src_url=link, uindex=current_ts(), content=value) article.save() #mark_crawled_url(link) except django.db.utils.IntegrityError: logger.info(f'数据重复插入:`{title}`{link}') except: logger.warning(f'数据插入异常:`{title}`{link}') logger.info('定时更新RSS任务运行结束')