def analyse_entry(self, entry, activity): params = activity.params text_input = entry.text text = self._my_preprocessor(text_input) dictionary = self._dictionary[params['language']] feature_set = self._extract_features(text, dictionary, params['language']) emotions = EmotionSet() emotions.id = "Emotions0" emotion1 = Emotion(id="Emotion0") emotion1["emoml:pad-dimensions_pleasure"] = feature_set['V'] emotion1["emoml:pad-dimensions_arousal"] = feature_set['A'] emotion1["emoml:pad-dimensions_dominance"] = feature_set['D'] emotion1.prov(activity) emotions.prov(activity) emotions.onyx__hasEmotion.append(emotion1) entry.emotions = [ emotions, ] yield entry
def test_turtle(self): """Any model should be serializable as a turtle file""" ana = EmotionAnalysis() res = Results() res.analysis.append(ana) entry = Entry(text='Just testing') eSet = EmotionSet() emotion = Emotion() entry.emotions.append(eSet) res.entries.append(entry) eSet.onyx__hasEmotion.append(emotion) eSet.prov__wasGeneratedBy = ana.id triples = ('ana a :Analysis', 'entry a :entry', ' nif:isString "Just testing"', ' onyx:hasEmotionSet eSet', 'eSet a onyx:EmotionSet', ' prov:wasGeneratedBy ana', ' onyx:hasEmotion emotion', 'emotion a onyx:Emotion', 'res a :results', ' me:AnalysisInvoloved ana', ' prov:used entry') t = res.serialize(format='turtle') print(t) g = rdflib.Graph().parse(data=t, format='turtle') assert len(g) == len(triples)
def test_turtle(self): """Any model should be serializable as a turtle file""" ana = EmotionAnalysis() res = Results() res.activities.append(ana) entry = Entry(text='Just testing') eSet = EmotionSet() emotion = Emotion() entry.emotions.append(eSet) res.entries.append(entry) eSet.onyx__hasEmotion.append(emotion) eSet.prov__wasGeneratedBy = ana.id triples = ('ana a :Analysis', 'ent[]ry a :entry', ' nif:isString "Just testing"', ' onyx:hasEmotionSet eSet', 'eSet a onyx:EmotionSet', ' prov:wasGeneratedBy ana', ' onyx:hasEmotion emotion', 'emotion a onyx:Emotion', 'res a :results', ' me:AnalysisInvolved ana', ' prov:used entry') t = res.serialize(format='turtle') print(t) g = rdflib.Graph().parse(data=t, format='turtle') assert len(g) == len(triples)
def analyse_entry(self, entry, activity): category = "emoml:big6happiness" number = max(-1, min(1, random.gauss(0, 0.5))) if number > 0: category = "emoml:big6anger" emotionSet = EmotionSet() emotion = Emotion({"onyx:hasEmotionCategory": category}) emotionSet.onyx__hasEmotion.append(emotion) emotionSet.prov(activity) entry.emotions.append(emotionSet) yield entry
def analyse_entry(self, entry, params): category = "emoml:big6happiness" number = max(-1, min(1, random.gauss(0, 0.5))) if number > 0: category = "emoml:big6anger" emotionSet = EmotionSet() emotion = Emotion({"onyx:hasEmotionCategory": category}) emotionSet.onyx__hasEmotion.append(emotion) emotionSet.prov__wasGeneratedBy = self.id entry.emotions.append(emotionSet) yield entry
def analyse(self, **params): logger.debug("Hashtag SVM Analysing with params {}".format(params)) text_input = params.get("input", None) self.ESTIMATOR = params.get("estimator", 'LinearSVC') # EXTRACTING FEATURES text = self._text_preprocessor(text_input) X = self._convert_text_to_vector(text=text, text_input=text_input, Dictionary=self._Dictionary) feature_text = self._extract_features(X=X, classifiers=self._classifiers, estimator=self.ESTIMATOR) # GENERATING RESPONSE response = Results() entry = Entry() entry.nif__isString = text_input emotionSet = EmotionSet() emotionSet.id = "Emotions" if self.ESTIMATOR == 'SVC': emotionSet.onyx__maxIntensityValue = float(100.0) emotion1 = Emotion() for dimension in ['V','A','D']: weights = [feature_text[i] for i in feature_text if (i != 'surprise')] if not all(v == 0 for v in weights): value = np.average([self.centroids[i][dimension] for i in feature_text if (i != 'surprise')], weights=weights) else: value = 5.0 emotion1[self.centroid_mappings[dimension]] = value emotionSet.onyx__hasEmotion.append(emotion1) for i in feature_text: if(self.ESTIMATOR == 'SVC'): emotionSet.onyx__hasEmotion.append(Emotion( onyx__hasEmotionCategory=self.wnaffect_mappings[i], onyx__hasEmotionIntensity=feature_text[i])) else: if(feature_text[i] > 0): emotionSet.onyx__hasEmotion.append(Emotion( onyx__hasEmotionCategory=self.wnaffect_mappings[i])) entry.emotions = [emotionSet,] response.entries.append(entry) return response
def test(self, info=None): if not info: info = { "name": "CentroidTest", "description": "Centroid test", "version": 0, "centroids": { "c1": { "V1": 0.5, "V2": 0.5 }, "c2": { "V1": -0.5, "V2": 0.5 }, "c3": { "V1": -0.5, "V2": -0.5 }, "c4": { "V1": 0.5, "V2": -0.5 } }, "aliases": { "V1": "X-dimension", "V2": "Y-dimension" }, "centroids_direction": ["emoml:big6", "emoml:fsre-dimensions"] } c = CentroidConversion(info) es1 = EmotionSet() e1 = Emotion() e1.onyx__hasEmotionCategory = "c1" es1.onyx__hasEmotion.append(e1) res = c._forward_conversion(es1) assert res["X-dimension"] == 0.5 assert res["Y-dimension"] == 0.5 e2 = Emotion() e2.onyx__hasEmotionCategory = "c2" es1.onyx__hasEmotion.append(e2) res = c._forward_conversion(es1) assert res["X-dimension"] == 0 assert res["Y-dimension"] == 1 e = Emotion() e["X-dimension"] = -0.2 e["Y-dimension"] = -0.3 res = c._backwards_conversion(e) assert res["onyx:hasEmotionCategory"] == "c3" e = Emotion() e["X-dimension"] = -0.2 e["Y-dimension"] = 0.3 res = c._backwards_conversion(e) assert res["onyx:hasEmotionCategory"] == "c2"
def test_convert_emotions(self): self.senpy.activate_all(sync=True) plugin = Plugin({ 'id': 'imaginary', 'onyx:usesEmotionModel': 'emoml:fsre-dimensions' }) eSet1 = EmotionSet() activity = plugin.activity() eSet1.prov(activity) eSet1['onyx:hasEmotion'].append( Emotion({ 'emoml:arousal': 1, 'emoml:potency': 0, 'emoml:valence': 0 })) response = Results({ 'activities': [activity], 'entries': [ Entry({ 'nif:isString': 'much ado about nothing', 'onyx:hasEmotionSet': [eSet1] }) ] }) params = { 'emotion-model': 'emoml:big6', 'algorithm': ['conversion'], 'conversion': 'full' } r1 = deepcopy(response) r1.parameters = params self.senpy.analyse(r1) assert len(r1.entries[0].emotions) == 2 params['conversion'] = 'nested' r2 = deepcopy(response) r2.parameters = params self.senpy.analyse(r2) assert len(r2.entries[0].emotions) == 1 assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1 params['conversion'] = 'filtered' r3 = deepcopy(response) r3.parameters = params self.senpy.analyse(r3) assert len(r3.entries[0].emotions) == 1 r3.jsonld()
def analyse(self, **params): logger.debug("emotionService with params {}".format(params)) filename = params.get("i", None) ## FILE MANIPULATIONS ------------------------------- \ if validators.url(filename): filename = self._download_file(saveFolder = self._storage_path, url = filename) else: filename = os.path.join(self._storage_path,filename) logger.info("{} {}".format(datetime.now(), filename)) if not os.path.isfile(filename): raise Error("File %s does not exist" % filename) ## EXTRACTING FEATURES ------------------------------- \ feature_set = self._extract_features(filename, convert=True) # self._remove_file(filename) ## GENERATING OUTPUT --------------------------------- \ response = Results() entry = Entry() entry['filename'] = os.path.basename(filename) emotionSet = EmotionSet() emotionSet.id = "Emotions" emotion1 = Emotion() for dimension in self._dimensions: emotion1[ self._centroid_mappings[dimension] ] = 5*(1+feature_set[dimension]) emotionSet.onyx__hasEmotion.append(emotion1) entry.emotions = [emotionSet,] response.entries.append(entry) return response
def analyse_entry(self, entry, params): text_input = entry.get("text", None) text= self._my_preprocessor(text_input) dictionary={} lang = params.get("language", "auto") if lang == 'es': with open(self.anew_path_es,'rb') as tabfile: reader = csv.reader(tabfile, delimiter='\t') for row in reader: dictionary[row[2]]={} dictionary[row[2]]['V']=row[3] dictionary[row[2]]['A']=row[5] dictionary[row[2]]['D']=row[7] else: with open(self.anew_path_en,'rb') as tabfile: reader = csv.reader(tabfile, delimiter='\t') for row in reader: dictionary[row[0]]={} dictionary[row[0]]['V']=row[2] dictionary[row[0]]['A']=row[4] dictionary[row[0]]['D']=row[6] feature_set=self._extract_features(text,dictionary,lang) emotions = EmotionSet() emotions.id = "Emotions0" emotion1 = Emotion(id="Emotion0") emotion1["onyx:hasEmotionCategory"] = self.emotions_ontology[feature_set['emotion']] emotion1["http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#valence"] = feature_set['V'] emotion1["http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#arousal"] = feature_set['A'] emotion1["http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#dominance"] = feature_set['D'] emotions.onyx__hasEmotion.append(emotion1) entry.emotions = [emotions,] yield entry
def test_convert_emotions(self): self.senpy.activate_all(sync=True) plugin = Plugin({ 'id': 'imaginary', 'onyx:usesEmotionModel': 'emoml:fsre-dimensions' }) eSet1 = EmotionSet() eSet1.prov__wasGeneratedBy = plugin['id'] eSet1['onyx:hasEmotion'].append( Emotion({ 'emoml:arousal': 1, 'emoml:potency': 0, 'emoml:valence': 0 })) response = Results({ 'entries': [Entry({ 'text': 'much ado about nothing', 'emotions': [eSet1] })] }) params = {'emotionModel': 'emoml:big6', 'conversion': 'full'} r1 = deepcopy(response) self.senpy.convert_emotions(r1, [ plugin, ], params) assert len(r1.entries[0].emotions) == 2 params['conversion'] = 'nested' r2 = deepcopy(response) self.senpy.convert_emotions(r2, [ plugin, ], params) assert len(r2.entries[0].emotions) == 1 assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1 params['conversion'] = 'filtered' r3 = deepcopy(response) self.senpy.convert_emotions(r3, [ plugin, ], params) assert len(r3.entries[0].emotions) == 1
def test_convert_emotions(self): self.senpy.activate_all(sync=True) plugin = Plugin({ 'id': 'imaginary', 'onyx:usesEmotionModel': 'emoml:fsre-dimensions' }) eSet1 = EmotionSet() activity = plugin.activity() eSet1.prov(activity) eSet1['onyx:hasEmotion'].append(Emotion({ 'emoml:arousal': 1, 'emoml:potency': 0, 'emoml:valence': 0 })) response = Results({ 'activities': [activity], 'entries': [Entry({ 'nif:isString': 'much ado about nothing', 'onyx:hasEmotionSet': [eSet1] })] }) params = {'emotion-model': 'emoml:big6', 'algorithm': ['conversion'], 'conversion': 'full'} r1 = deepcopy(response) r1.parameters = params self.senpy.analyse(r1) assert len(r1.entries[0].emotions) == 2 params['conversion'] = 'nested' r2 = deepcopy(response) r2.parameters = params self.senpy.analyse(r2) assert len(r2.entries[0].emotions) == 1 assert r2.entries[0].emotions[0]['prov:wasDerivedFrom'] == eSet1 params['conversion'] = 'filtered' r3 = deepcopy(response) r3.parameters = params self.senpy.analyse(r3) assert len(r3.entries[0].emotions) == 1 r3.jsonld()
def convert(self, emotionSet, fromModel, toModel, params): cf, ct = self.centroids_direction logger.debug('{}\n{}\n{}\n{}'.format(emotionSet, fromModel, toModel, params)) e = EmotionSet() if fromModel == cf and toModel == ct: e.onyx__hasEmotion.append(self._forward_conversion(emotionSet)) elif fromModel == ct and toModel == cf: for i in emotionSet.onyx__hasEmotion: e.onyx__hasEmotion.append(self._backwards_conversion(i)) else: raise Error('EMOTION MODEL NOT KNOWN') yield e
def analyse_entry(self, entry, params): text_input = entry.get("text", None) text = self._my_preprocessor(text_input) feature_text = self._extract_features(text) emotionSet = EmotionSet(id="Emotions0") emotions = emotionSet.onyx__hasEmotion for i in feature_text: emotions.append( Emotion(onyx__hasEmotionCategory=self._wnaffect_mappings[i], onyx__hasEmotionIntensity=feature_text[i])) entry.emotions = [emotionSet] yield entry
def analyse_entry(self, entry, activity): params = activity.params text_input = entry['nif:isString'] text = self._my_preprocessor(text_input) feature_text = self._extract_features(text) emotionSet = EmotionSet(id="Emotions0") emotions = emotionSet.onyx__hasEmotion for i in feature_text: emotions.append( Emotion(onyx__hasEmotionCategory=self._wnaffect_mappings[i], onyx__hasEmotionIntensity=feature_text[i])) entry.emotions = [emotionSet] yield entry
def analyse(self, **params): logger.debug( "wassaRegression LSTM Analysing with params {}".format(params)) st = datetime.now() text_input = params.get("input", None) text = self._text_preprocessor(text_input) self.ESTIMATOR = params.get("estimator", 'LSTM') if self.ESTIMATOR == 'LSTM': X_lstm = self._lists_to_vectors(text=text) feature_text = self._extract_features(X_lstm) elif self.ESTIMATOR == 'averaged': X_lstm = self._lists_to_vectors(text=text) X_svr = self._convert_text_to_vector(text=text, text_input=text_input) feature_text_lstm = self._extract_features(X_lstm) feature_text_svr = self._extract_features_svr(X_svr) feature_text = { emo: np.mean([feature_text_lstm[emo], feature_text_svr[emo]]) for emo in self._emoNames } else: X_svr = self._convert_text_to_vector(text=text, text_input=text_input) feature_text = self._extract_features_svr(X_svr) logger.info("{} {}".format(datetime.now() - st, "string analysed")) response = Results() entry = Entry() entry.nif__isString = text_input emotionSet = EmotionSet() emotionSet.id = "Emotions" emotionSet.onyx__maxIntensityValue = float(100.0) emotion1 = Emotion() for dimension in ['V', 'A', 'D']: weights = [feature_text[i] for i in feature_text] if not all(v == 0 for v in weights): value = np.average( [self.centroids[i][dimension] for i in feature_text], weights=weights) else: value = 5.0 emotion1[self.centroid_mappings[dimension]] = value emotionSet.onyx__hasEmotion.append(emotion1) for i in feature_text: emotionSet.onyx__hasEmotion.append( Emotion(onyx__hasEmotionCategory=self.wnaffect_mappings[i], onyx__hasEmotionIntensity=float(feature_text[i]) * emotionSet.onyx__maxIntensityValue)) entry.emotions = [ emotionSet, ] response.entries.append(entry) return response
def test_emotion_set(self): e = EmotionSet() self.assertRaises(jsonschema.ValidationError, e.validate) e.nif__anchorOf = "so much testing" e.prov__wasGeneratedBy = "" e.validate()
def test_emotion_set(self): e = EmotionSet() self.assertRaises(jsonschema.ValidationError, e.validate) e.nif__anchorOf = "so much testing" e.prov__wasGeneratedBy = "" e.validate()
def analyse(self, **params): logger.debug("Hashtag LSTM Analysing with params {}".format(params)) text_input = params.get("input", None) self._ESTIMATION = params.get("estimation", 'Probabilities') # EXTRACTING FEATURES text = self._text_preprocessor(text_input) X = self._lists_to_vectors(text=text) feature_text = self._extract_features(X=X) # GENERATING RESPONSE response = Results() entry = Entry() entry.nif__isString = text_input emotionSet = EmotionSet() emotionSet.id = "Emotions" if self._ESTIMATION == 'Probabilities': emotionSet.onyx__maxIntensityValue = float(100.0) emotion1 = Emotion() for dimension in ['V', 'A', 'D']: weights = [ feature_text[i] for i in feature_text if (i != 'surprise') ] if not all(v == 0 for v in weights): value = np.average([ self.centroids[i][dimension] for i in feature_text if (i != 'surprise') ], weights=weights) else: value = 5.0 emotion1[self.centroid_mappings[dimension]] = value emotionSet.onyx__hasEmotion.append(emotion1) for i in feature_text: if self._ESTIMATION == 'Probabilities': emotionSet.onyx__hasEmotion.append( Emotion(onyx__hasEmotionCategory=self.wnaffect_mappings[i], onyx__hasEmotionIntensity=float(feature_text[i]) * 100)) elif self._ESTIMATION == 'Classes': if feature_text[i] > 0: emotionSet.onyx__hasEmotion.append( Emotion(onyx__hasEmotionCategory=self. wnaffect_mappings[i])) #onyx__hasEmotionIntensity=int(feature_text[i]))) entry.emotions = [ emotionSet, ] response.entries.append(entry) return response