def new_post(): form = PostForm() print("new_post") if form.validate_on_submit(): new_post = Post() new_post.title = form.title.data new_post.text = form.text.data new_post.publish_date = datetime.datetime.now() user = User.objects(id=current_user.id).first() new_post.user = user new_post.save() return render_template('new.html', form=form)
def process_submission(self, post): print('Post Recieved') try: from webapp.models import Post, Review except django.core.exceptions.AppRegistryNotReady: return False if "[review]" in post.title.lower(): try: p_exists = Post.objects.get(id=post.id) print('Post in Database') return False except Post.DoesNotExist: pass p = Post(user=post.author.name, date=post.created, link=post.url, id=post.id, title=post.title) p.save() print(post.title + ' is not in the database, adding') split_selftext = post.selftext.split('\n') index, review_start_index, review_end_index = 0, -1, -1 while (index < len(split_selftext)): if (review_start_index == review_end_index and "|" in split_selftext[index]): review_start_index = index elif (review_start_index != review_end_index and "|" in split_selftext[index]): review_end_index = index index += 1 if (review_start_index == -1 or review_end_index == -1): return [] r_list = parse.parse_review( split_selftext[review_start_index:review_end_index + 1], p) for r in r_list: r.save()
def scrape_post(post): if "[review]" in post.title.lower(): print("Run Scraper") try: p_exists = Post.objects.get(id=post.id) return except Post.DoesNotExist: pass p = Post(user=post.author.name, date=parse.parse_date(post.created), link=post.url, id=post.id, title=post.title) p.save() pprint(('Post: ' + str(p))) split_selftext = post.selftext.split('\n') index, review_start_index, review_end_index = 0, -1, -1 while (index < len(split_selftext)): if (review_start_index == review_end_index and "|" in split_selftext[index]): review_start_index = index elif (review_start_index != review_end_index and "|" in split_selftext[index]): review_end_index = index index += 1 if (review_start_index == -1 or review_end_index == -1): return [] r_list = parse.parse_review( split_selftext[review_start_index:review_end_index + 1], p) for r in r_list: p.review_set.create(user=r.user, date=post.created, itemName=r.itemName, itemLink=r.itemLink, itemReview=r.itemReview, itemSize=r.itemSize, itemPic=r.itemPic)
def add(request): #broker if request.method == 'POST': resp = request.body.decode('UTF-8') data = json.loads(resp) lista = Adapters.objects.filter(ativo=1).values("rid") if data["to"] == 1: isRecomendation = False for i in lista: if i["rid"] in ( data["id"]).split(" "): #clique de recomendacao isRecomendation = True adapter = i["rid"] break post_data = Post(data["ip"], data["idUser"], data["idClick"], data["classe"], data["texto"], data["current"], data["href"], data["timestamp"], data["dateTimestamp"], data["dateR"]) post_data.save() #salva o clique na tabela ClickStream if isRecomendation: #salva na tabela recomendações acessadas caso o clique proveio de uma recomendação post_data = RecomendacaoAcessada(adapter, data["idClick"], date=data["dateR"]) post_data.save() return HttpResponse(status=204) else: #gerar recomendacao dados = Post.objects.get(idClick=data["idClick"]) for i in lista: if i["rid"] == data["iden"]: recomendador = i["rid"] + '.' + i[ "rid"] #nome do recomendador (ou funcao) try: recomendador = eval( recomendador) # executa python dentro da execução except: module = __import__("webapp.algorithms." + i["rid"], fromlist=i["rid"]) recomendador = getattr(module, i["rid"]) finally: function_output = recomendador( request, dados) #saida deverá ser um vetor de links for each_link in function_output: post_data = RecomendacaoGerada( #salva as recomendações sugeridas i["rid"], data["idClick"], each_link, data["dateR"]) post_data.save() return JsonResponse({i["rid"]: function_output}) return HttpResponse(status=204) else: return HttpResponse(status=204)