def _post_comment(self, index_url, comment_str, author_ip, user): title = urlparse(index_url).netloc # print comment_str, user # logger.info(u'评论:' + comment_str # + u'IP:' + author_ip # + u' 用户:' + str(user) # + u' TITLE:' + title # + u' INDEX:' + index_url) comment_board, created = CommentBoard.objects.get_or_create( url=index_url, title=title) comment_board.save() if created else None if user.is_authenticated(): author = get_author(user) comment = Comment( time_added=datetime.datetime.utcnow().replace( tzinfo=utc), time_modified=datetime.datetime.utcnow().replace( tzinfo=utc), comment_str=comment_str, comment_board=comment_board, author_ip=author_ip, title=title, author=author) # 以后换成author,现在先用user else: ''' Annoymous User access the site. ''' comment = Comment( time_added=datetime.datetime.utcnow().replace( tzinfo=utc), time_modified=datetime.datetime.utcnow().replace( tzinfo=utc), comment_str=comment_str, comment_board=comment_board, title=title, author_ip=author_ip) # Generate top 5 tags for comment. comment.tags = jieba.analyse.extract_tags(comment_str, topK=3) logger.info('tags: ' + repr(comment.tags).decode("unicode-escape")) comment.save()
def _post_comment(url_b64, comment_str, author_ip, user): index_url = base64.b64decode(url_b64.encode('ascii', 'ignore'), '+-') try: title = urlparse(index_url).netloc except Exception as e: print index_url, e return None comment = Comment( time_added=datetime.datetime.utcnow().replace( tzinfo=utc), time_modified=datetime.datetime.utcnow().replace( tzinfo=utc), comment_str=comment_str, url_b64 = url_b64, author_ip = author_ip, title = title, ) comment.save()#need to save here to create the ID # Generate top 5 tags for comment. stags= jieba.analyse.extract_tags(comment_str, topK=3) for stag in stags: tags = Tag.objects.filter(name=stag) if len(tags) > 0: comment.tags.append(tags[0]) tags[0].comments.append(comment.id) tags[0].save() else: tag = Tag.objects.create( name=stag, time_added=datetime.datetime.utcnow().replace( tzinfo=utc), ) tag.comments.append(comment.id) tag.save() comment.tags.append(tag) #logger.info('tags: ' + repr(comment.tags).decode("unicode-escape")) comment.save()
def _post_comment(url_b64, comment_str, author_ip, user): index_url = base64.b64decode(url_b64.encode('ascii', 'ignore'), '+-') try: title = urlparse(index_url).netloc except Exception as e: print index_url, e return None comment = Comment( time_added=datetime.datetime.utcnow().replace(tzinfo=utc), time_modified=datetime.datetime.utcnow().replace(tzinfo=utc), comment_str=comment_str, url_b64=url_b64, author_ip=author_ip, title=title, ) comment.save() #need to save here to create the ID # Generate top 5 tags for comment. stags = jieba.analyse.extract_tags(comment_str, topK=3) for stag in stags: tags = Tag.objects.filter(name=stag) if len(tags) > 0: comment.tags.append(tags[0]) tags[0].comments.append(comment.id) tags[0].save() else: tag = Tag.objects.create( name=stag, time_added=datetime.datetime.utcnow().replace(tzinfo=utc), ) tag.comments.append(comment.id) tag.save() comment.tags.append(tag) #logger.info('tags: ' + repr(comment.tags).decode("unicode-escape")) comment.save()
def _post_comment(index_url, comment_str, author_ip, user): try: title = urlparse(index_url).netloc except Exception as e: print index_url, e return None # print comment_str, user logger.info(u'评论:' + comment_str + u'IP:' + author_ip + u' 用户:' + str(user) + u' TITLE:' + title + u' INDEX:' + index_url + u' UTC:' + str(datetime.datetime.utcnow().replace(tzinfo=utc)) + u' Time:' + str(datetime.datetime.now())) comment_board, created = CommentBoard.objects.get_or_create( url=index_url, title=title) comment_board.save() if created else None if user and user.is_authenticated(): author = get_author(user) comment = Comment( time_added=datetime.datetime.utcnow().replace( tzinfo=utc), time_modified=datetime.datetime.utcnow().replace( tzinfo=utc), comment_str=comment_str, comment_board=comment_board, author_ip=author_ip, title=title, author=author) author.comments_sum += 1 author.points += 5 author.save() else: ''' Annoymous User access the site. ''' comment = Comment( time_added=datetime.datetime.utcnow().replace( tzinfo=utc), time_modified=datetime.datetime.utcnow().replace( tzinfo=utc), comment_str=comment_str, comment_board=comment_board, title=title, author_ip=author_ip) # get the page's screenshot and save url_b64 to comment url_b64 = base64.b64encode(index_url, '+-') shot_process = Process(target=save_pageshot, args=(index_url, url_b64)) shot_process.start() comment.url_b64 = url_b64 comment.save()#need to save here to create the ID # Generate top 5 tags for comment. stags= jieba.analyse.extract_tags(comment_str, topK=3) for stag in stags: tags = Tag.objects.filter(name=stag) if len(tags) > 0: comment.tags.append(tags[0]) tags[0].comments.append(comment.id) tags[0].save() else: tag = Tag.objects.create( name=stag, time_added=datetime.datetime.utcnow().replace( tzinfo=utc), ) tag.comments.append(comment.id) tag.save() comment.tags.append(tag) #logger.info('tags: ' + repr(comment.tags).decode("unicode-escape")) comment.save()
def get(self, request, *args, **kwargs): update_last_request(request) #print request index_page = request.GET.get('page', 1) index_url = kwargs.get('index_url', self.index_default_str) url_b64 = kwargs.get('url_b64', self.base64_default_str) flag = kwargs['flag'] #HOT comments_hot = Comment.objects.order_by('-time_added').filter( time_added__gte=datetime.datetime.now() - timedelta(days=30)) comments_hot = sorted(comments_hot, key=lambda o: len(o.up_users), reverse=True) #comments_hot = filter(lambda x:urlparse(index_url).netloc in x.comment_board.title, # Comment.objects.order_by('-time_added')) p_hot = Paginator(comments_hot, 3).page(1) #NEW #comments_new = Comment.objects.order_by('-time_added').all() if url_b64 == self.base64_default_str: comments_new = Comment.objects.all()[0:10] else: comments_new = Comment.objects.filter(url_b64=url_b64) if len(comments_new) == 0: try: title = urlparse(index_url).netloc except Exception as e: print index_url, e return None comment_board, created = CommentBoard.objects.get_or_create( url=index_url, title=title) comment_board.save() if created else None comment = Comment( time_added=datetime.datetime.utcnow().replace(tzinfo=utc), time_modified=datetime.datetime.utcnow().replace(tzinfo=utc), comment_str="快来抢沙发啊~~", comment_board=comment_board, title=title, author_ip='藤蔓评论') comments_new._result_cache.append(comment) p_new = Paginator(comments_new, 3).page(1) #RELEVANT index_url = base64.b64decode(url_b64.encode('ascii', 'ignore'), '+-') url_objects = Url.objects.filter(url=index_url) if len(url_objects) == 0: print "No tag" # return HttpResponse('No tag: '+index_url, mimetype='plain/text') p_tag = p_hot else: tags = filter(lambda x: url_objects[0].id in x.urls, Tags.objects.order_by('-time_added')) if len(tags) == 0: print "No tag" p_tag = p_hot else: tags = sorted(tags, key=lambda x: len(x.comments), reverse=True)[0:10] count = len(tags) if count > 3: tags = sample(tags, 3) else: tags = sample(tags, count) for tag in tags: comments = filter(lambda x: x.id in tag.comments, Comment.objects.all()) #print "comments:", comments tag.comments_list = sorted( comments, key=lambda x: len(x.up_users) - len(x.down_users), reverse=True)[0:3] index_page = request.GET.get('page', 1) print "index_page:", index_page logger.info(str(len(tags)) + ': ' + str(tags)) p_tag = Paginator(tags, 3).page(1) template_name = kwargs.get('template', self.template_meta) form = CaptchaTestForm() user = request.user if user.is_authenticated(): author = get_author(user) is_not_human = author.is_not_human else: # CAPTCHA-FIXME: forbid annoymous user to comment. is_not_human = False if flag == 'raw': template_name = 'plugin/comment_list_raw.html' return render( request, template_name, { 'p_comment_hot': p_hot, 'p_comment_new': p_new, 'p_comment_tag': p_tag, 'index_url': index_url, 'url_b64': url_b64, 'form': form, 'is_not_human': is_not_human, })
def _post_comment(index_url, comment_str, author_ip, user): try: title = urlparse(index_url).netloc except Exception as e: print index_url, e return None # print comment_str, user logger.info(u'评论:' + comment_str + u'IP:' + author_ip + u' 用户:' + str(user) + u' TITLE:' + title + u' INDEX:' + index_url + u' UTC:' + str(datetime.datetime.utcnow().replace(tzinfo=utc)) + u' Time:' + str(datetime.datetime.now())) comment_board, created = CommentBoard.objects.get_or_create( url=index_url, title=title) comment_board.save() if created else None if user and user.is_authenticated(): author = get_author(user) comment = Comment( time_added=datetime.datetime.utcnow().replace(tzinfo=utc), time_modified=datetime.datetime.utcnow().replace(tzinfo=utc), comment_str=comment_str, comment_board=comment_board, author_ip=author_ip, title=title, author=author) author.comments_sum += 1 author.points += 5 author.save() else: ''' Annoymous User access the site. ''' comment = Comment( time_added=datetime.datetime.utcnow().replace(tzinfo=utc), time_modified=datetime.datetime.utcnow().replace(tzinfo=utc), comment_str=comment_str, comment_board=comment_board, title=title, author_ip=author_ip) # get the page's screenshot and save url_b64 to comment url_b64 = base64.b64encode(index_url, '+-') shot_process = Process(target=save_pageshot, args=(index_url, url_b64)) shot_process.start() comment.url_b64 = url_b64 comment.save() #need to save here to create the ID # Generate top 5 tags for comment. stags = jieba.analyse.extract_tags(comment_str, topK=3) for stag in stags: tags = Tag.objects.filter(name=stag) if len(tags) > 0: comment.tags.append(tags[0]) tags[0].comments.append(comment.id) tags[0].save() else: tag = Tag.objects.create( name=stag, time_added=datetime.datetime.utcnow().replace(tzinfo=utc), ) tag.comments.append(comment.id) tag.save() comment.tags.append(tag) #logger.info('tags: ' + repr(comment.tags).decode("unicode-escape")) comment.save()