Beispiel #1
0
    def test_comments(self):
        """ Tests the insertion and deletion of comments """

        bat = queries.find_media_by_creator('Dario')
        queries.insert_comment(bat[0].media_id, 11, 7, 0.9, 0.4, -0.5, 0.6, 0.3)
        comments = queries.find_comments_for_media(bat[0].media_id)

        # can we find the comments?
        self.assertEqual(comments[0].item_id, bat[0].media_id, \
                    'Retrieved incorrect id')
        self.assertEqual(comments[0].relevancy, 7, \
                    msg='Retrieved incorrect relevancy')
        self.assertAlmostEqual(float(comments[0].pleasantness), 0.9, \
                    msg='Retrieved incorrect pleasantness')
        self.assertAlmostEqual(float(comments[0].attention), 0.4, \
                    msg='Retrieved incorrect attention')
        self.assertAlmostEqual(float(comments[0].sensitivity), -0.5, \
                    msg='Retrieved incorrect sensitivity')
        self.assertAlmostEqual(float(comments[0].aptitude), 0.6, \
                    msg='Retrieved incorrect aptitude')
        self.assertAlmostEqual(float(comments[0].polarity), 0.3, \
                    msg='Retrieved incorrect polarity')

        # is the number of comments incremented
        bat = queries.find_media_by_creator('Dario')

        self.assertEqual(bat[0].number_of_comments, 1, 
                        'Retrieved incorrect number of comments')
Beispiel #2
0
    def test_clean(self):
        """ Tests cleaning the database of comments and emotions """

        bat = queries.find_media_by_asin('0440419395')
        queries.insert_comment(bat.media_id, 11, 7, 0.9, 0.4, -0.5, 0.6, 0.3)

        queries.clean_media(bat.media_id)

        comments = queries.find_comments_for_media(bat.media_id)
        emotions = queries.find_emotions_for_media(bat.media_id)

        self.assertEqual(len(comments), 0)
        self.assertEqual(len(emotions), 0)
def calculateVectorsForAllComments(dictFromJSON, g):
    compound_emotion_dict = collections.defaultdict(int)
    sentic_emotion_dict = collections.defaultdict(int)

    processed_comments = list()
    overall_rating = 0.0

    # the product model from the DB
    product = queries.find_media_by_asin(dictFromJSON["asin"])

    tokenized_docs = buildListOfTokenizedDocuments(dictFromJSON)
    for comment in dictFromJSON["comments"]:
        vectorized_comment = calculateVector(tokenizeDocument(comment["text"]), tokenized_docs)
        vectorized_desc = calculateVector(tokenizeDocument(dictFromJSON["description"]), tokenized_docs)
        comment["vector_space"] = vectorized_comment
        relevancy = getCosine(vectorized_comment, vectorized_desc)

        if relevancy < 0.15:
            continue

        comment["relevancy"] = relevancy

        # add emotional score
        try:
            comment_emotion = emotions(comment["text"], g)
        except ConceptError:
            print("Not enough concepts to do anything useful - skipping this product")
            continue

        comment["emotion_vector"] = comment_emotion.emotion_vector

        compound_emotions = comment_emotion.get_compound_emotion()
        sentic_values = comment_emotion.get_all_sentic_values()

        sentic_values = [value.name for value in sentic_values if value is not None]

        compound_emotions_list = []
        for compound_emotion, strength in compound_emotions:
            compound_emotions_list.append(
                {"compound_emotion": compound_emotion.name, "strength": strength.name}
            )
        comment["compound_emotions"] = compound_emotions_list

        comment["sentic_emotions"] = sentic_values

        # add all compound_emotions to the default dictFromJSON
        for compound in comment["compound_emotions"]:
            compound_emotion_dict[compound["compound_emotion"]] += 1

        for sentic in comment["sentic_emotions"]:
            sentic_emotion_dict[sentic] += 1

        overall_rating += float(comment["rating"])

        # add the comment to the database
        # insert_comment(item_id, relevancy, pleasantness, attention, sensitivity, aptitude, polarity, date)
        queries.insert_comment(product.media_id,
                                comment["unixtime"],
                                comment["relevancy"],
                                comment["emotion_vector"]["pleasantness"],
                                comment["emotion_vector"]["attention"],
                                comment["emotion_vector"]["sensitivity"],
                                comment["emotion_vector"]["aptitude"],
                                comment["emotion_vector"]["polarity"])

        comment["text"] = html.unescape(comment["text"])
        processed_comments.append(comment)

    popular_compound_emotions = []
    if len(compound_emotion_dict) == 0:
        raise NoEmotionsFoundError("No compound emotions found")

    for i in range(0, 3):
        popular_emotion = max(compound_emotion_dict, key=compound_emotion_dict.get)
        popular_compound_emotions.append(popular_emotion)
        compound_emotion_dict.pop(popular_emotion)

    popular_sentic_emotions = []
    if len(sentic_emotion_dict) == 0:
        raise NoEmotionsFoundError("No sentic emotions found")

    for i in range(0, 3):
        popular_sentic = max(sentic_emotion_dict, key=sentic_emotion_dict.get)
        popular_sentic_emotions.append(popular_sentic)
        sentic_emotion_dict.pop(popular_sentic)
        # add emotion to the database
        queries.insert_media_emotion(product.media_id, popular_sentic)

    dictFromJSON["popular_compound_emotions"] = popular_compound_emotions
    dictFromJSON["popular_sentic_emotions"] = popular_sentic_emotions

    if len(processed_comments) > 0:
        rating = overall_rating / len(processed_comments)
        rating = float("{0:.2f}".format(rating))
        dictFromJSON["overall_rating"] = rating

    dictFromJSON["comments"] = sort_list_of_dicts(processed_comments)

    # get rid of escaped html characters in description
    dictFromJSON["description"] = html.unescape(dictFromJSON["description"])

    return dictFromJSON