def save_clone_page(request): save_article = Article() save_data = Cache().get(request.POST.get('save_key')) save_article.group = ArticleGroup.objects.get(groupid=0) save_article.title = save_data['title'] save_article.comment = '转发文章' save_article.summary = '本文转自{0},侵权删除'.format(save_data['url']) save_article.context = save_data['article'] save_article.save() if 'pic_source' in save_data.keys(): for item in save_data['pic_source']: try: Image.objects.get(name=str(item)) except Image.DoesNotExist: save_img = Image() save_img.name = item save_img.path.save( save_img.name, File(open(os.path.join(PIC_TMP_PATH, item), 'rb'))) save_img.save() save_article.image.add(save_img) if 'code_source' in save_data.keys(): need_js = Script.objects.get(name='prettify.js') need_css = Script.objects.get(name='prettify.css') save_article.script.add(need_js, need_css) Cache().remove('tops') return save_article.articleid
def scrap_article(url: str, source: Source): """ This method extract article from web :param url: URL of article :param source: Source of Article :return: Article """ logger.info('Scrapping %s', url) response = requests.get(url) if response.status_code == 200: soup = bs4.BeautifulSoup(response.text, 'html.parser') article = Article() article.uuid = hashlib.md5(bytes(url.encode())).hexdigest() article.url = url article.source = source try: article.title = soup.select('.' + source.article_title_class)[0].text article.body = soup.select('.' + source.article_body_class)[0].text article.image = soup.select('.' + source.article_image_class)[0]['src'] article.save() return article except IndexError: logger.error('Error while scrapping article %s', url) return None except ValidationError: logger.error('Error while validating article %s', url) return None return None
def publish_article(): if request.method == 'POST': title = request.form.get('title') type_id = request.form.get('type') content = request.form.get('content') article = Article() article.title = title article.type_id = type_id article.content = content article.user_id = g.user.id db.session.add(article) db.session.commit() return redirect(url_for('user.index'))
def publish_article(): if request.method == 'POST': title = request.form.get('title') type_id = request.form.get('type') content = request.form.get('content') article = Article() article.title = title article.type_id = type_id article.content = content.encode('utf-8') article.userId = g.user.id db.session.add(article) db.session.commit() return redirect(url_for('user.index')) return render_template('user/user_center.html')
def publish_article(): if request.method == 'POST': title = request.form.get('title') content = request.form.get('content') uid = request.form.get('uid') # 添加文章 article = Article() article.title = title article.content = content article.user_id = uid db.session.add(article) db.session.commit() return '添加成功!' else: users = User.query.filter(User.isdelete == False).all() return render_template('article/add_article.html', users=users)
def publish_article(): if request.method == 'POST': title = request.form.get('title') type_id = request.form.get('type') content = request.form.get('content') # print(title) # print(type_id) # print(content) # print(g.user.id) # 添加文章 article = Article() article.title = title article.content = content article.type_id = type_id # article.user_id = g.user.id # db.session.add(article) db.session.commit() # return '发表成功!' return redirect(url_for('user.index'))