Example #1
0
def diary(request):
    if request.method == "POST":
        diary = request.POST.get("diary")
        usern = request.POST.get("name")
        print(diary)

        try:
            print('predict----->', predict(diary))
            sentim = predict(diary)

        except:
            pass
        sentimentp = TextBlob(diary).sentiment.polarity
        if(sentimentp < 0):
            sentimentp = -1
        elif(sentimentp > 0):
            sentimentp = 1
        else:
            sentimentp = 0

        try:
            usern = UserModel.objects.get(id=usern)
            userdiary = DiaryModel.objects.create(
                description=diary, polarity=sentimentp, user=usern)
            userdiary.save()
            return redirect("home")
        except:
            return HttpResponse("failed")

    else:
        return render(request, 'diary.html')
Example #2
0
    def POST(self, name):
        logging.debug(web.data())

        text = web.input(text="").text
        prediction_result = sentiment.predict(text)

        render = web.template.render('templates/')
        return render.index(prediction_result)
Example #3
0
    def GET(self, name):
        logging.debug(web.data())
        sentence = web.input(sentence="").sentence
        logging.debug('sentence: ' + sentence)
        start_time = current_milli_time()

        result = sentiment.predict(sentence)

        res = {
            'status': 'ok',
            'took': current_milli_time() - start_time,
            'result': str(result)
        }
        web.header('Content-Type', 'application/json')
        return json.dumps(res)
Example #4
0
        def callback(ch, method, properties, body):
            tweets = json.loads(body.decode())
            for t in tweets['tweets']:
                pred = predict(_deemojify(t['tweet']))
                t['sentiment'] = 1 if pred else 0

            if len(tweets['tweets']) > 0:
                sent_perc = sum([t["sentiment"] for t in tweets["tweets"]
                                 ]) / len(tweets["tweets"])
            else:
                sent_perc = 0
            logger.info(
                f'{tweets["id_hackadeira"][:8]} :: {100*sent_perc:.2f}%'
            )  # noqa

            ch.basic_ack(delivery_tag=method.delivery_tag)
            _send_to_queue(tweets)
Example #5
0
def ask():
    # kernel now ready for use
    while True:
        message = str(request.form['messageText'])
        m1 = message[:5]
        if message == "quit":
            aimodule.record()
        elif message == "save":
            aimodule.saveBrain("bot_brain.brn")
        elif message == "#elp":
            aimodule.incident()
        else:
            bot_response = aimodule.respond(message)
            sen_response = sentiment.predict(message)
            resp = bot_response + "---" + sen_response
            #print sen_response
            logger.debug(sen_response)
            return jsonify({'status': 'OK', 'answer': resp})
Example #6
0
    def GET(self, name):
        logging.debug(web.data())
        search = web.input(search="").search
        logging.debug('sentence: ' + search)
        start_time = current_milli_time()

        result = {}
        twitter_search = twitterClient.search(search)
        for idx, twit in enumerate(twitter_search):
            result[str(idx)] = {'text': twit, 'rate': sentiment.predict(twit)}

        res = {
            'status': 'ok',
            'took': current_milli_time() - start_time,
            'result': result
        }
        web.header('Content-Type', 'application/json')
        return json.dumps(res,
                          sort_keys=True,
                          indent=4,
                          separators=(',', ': '))
Example #7
0
def calculate_would_buy():
    friendships, reviews = import_data()
    clusters, scores, purchased = import_results()

    with open('model.pkl', 'rb') as f:
        model = pickle.load(f)
        vocab_index = model['vocabulary_index']
        term_probability_matrix = model['term_probability_matrix']
        class_prob = model['class_probability']

    with open('community_detection/communities_test.p', 'rb') as f:
        communities = pickle.load(f)

    logger.debug('Loaded data')

    # Replaces the review with a label.
    for user, review in reviews.items():
        data = preprocessing(review)
        reviews[user] = predict(data, vocab_index, term_probability_matrix,
                                class_prob)

    logger.debug('Predicted reviews')

    would_purchase = calculate_answer(communities, friendships, reviews)
    logger.debug('Calculated would purchase')

    scores = convert_to_balanced(scores)

    print_cluster_yes_percentage(communities, would_purchase)
    print_cluster_accuracy(communities, clusters)
    print_review_accuracy(reviews, scores)
    print_review_precision(reviews, scores, 1)
    print_review_recall(reviews, scores, 1)
    print_review_precision(reviews, scores, 0)
    print_review_recall(reviews, scores, 0)
    print_purchase_accuracy(would_purchase, purchased)

    with open('would_purchase.pkl', 'wb') as f:
        pickle.dump(would_purchase, f)
Example #8
0
 def test(self):
     s = "Dear Alex,\n\nMahalo. The movie is awesome. I didn't ruin it. \n\nEnjoy,\n\nD to the J https://t.co/rDyir8hgzi"
     prediction = sentiment.predict(s)
     print(prediction)
     self.assertGreater(prediction['pos'], prediction['neg'])