def callSentenceGen(reply,k): with codecs.open("strarf_serif.txt","rb","utf-8") as f: srctxt=[] for line in f: srctxt.append(line) srctxt.append(u"EOS") modelgen=trigramModelGenerator.modelgenerator() gen=modelgen.GeneratorForUnigram(self.itemlist) SS=modelgen.SpaceSaving(gen,k) freq1=trigramModelGenerator.generateModel(srctxt) keyword1=keywordext.extraction(reply.text,freq1) sentenceGen=sentenceGenerator.sentenceGenerator(freq1) sentence=sentenceGen.generateSentence(keyword1) return sentence
def callSentenceGen_com(reply,k): #with codecs.open("strarf_serif.txt","rb","utf-8") as f: #with codecs.open("Japanese.0304.text.non-mentions.txt","rb","utf-8") as f: with codecs.open("strarf_serif.txt","rb","utf-8") as f: srctxt=[] for line in f: srctxt.append(line) srctxt.append(u"EOS") bytesrctxt=" ".join(srctxt) freq1=trigramModelGenerator.generateModel(srctxt) cpickler.topickle(freq1) keyword1=keywordext.extraction(reply,freq1) sentenceGen=sentenceGenerator.sentenceGenerator(freq1) sentence=sentenceGen.generateSentence(keyword1) return sentence
def testSentenceGenerator(self): freq1={u"テスト":{u"foo":{u"foo":1}},u"foo":{u"foo":{u"foo":1}}} keyword=u"テスト" sentenceGen=sentenceGenerator.sentenceGenerator(freq1,keyword) sentence=sentenceGen.generateSentence() self.assertEqual(len(sentence),12) self.assertEqual(sentence[0],u"テスト") self.assertEqual(sentence[1],u"foo") self.assertEqual(sentence[2],u"foo") self.assertEqual(sentence[3],u"foo") self.assertEqual(sentence[4],u"foo") self.assertEqual(sentence[5],u"foo") self.assertEqual(sentence[6],u"foo") self.assertEqual(sentence[7],u"foo") self.assertEqual(sentence[8],u"foo") self.assertEqual(sentence[9],u"foo")
def callSentenceGen_pickle(reply,modelpath): freq1=cpickler.frompickle(modelpath) keyword1=keywordext.extraction(reply.text,freq1) sentenceGen=sentenceGenerator.sentenceGenerator(freq1) sentence=sentenceGen.generateSentence(keyword1) return sentence
def testBigram1(self): freq1={u"テスト":{u"テスト":{u"テスト":1}}} keyword=u"テスト" sentenceGen=sentenceGenerator.sentenceGenerator(freq1,keyword) bigramResult=sentenceGen.bigram(keyword) self.assertEqual(bigramResult,u"テスト")
def testNoWord2(self): freq1={u"あーてすてす":{u"foo":{u"bar":1}}} keyword=u"あーてすてす" sentenceGen=sentenceGenerator.sentenceGenerator(freq1,keyword) sentence=sentenceGen.generateSentence() self.assertIn("Error2",sentence)
for id in range(0, 9): botList.append(algaeChatbot(id)) #segment seg = segment() #database dbMgr.initConnect("35.236.188.139", "root", "threesththreesththreesth", "threesth") #deepAI deepAI = deepAIMgr() #sentaence sentenceMgr = [] for id in range(0, 9): sentenceMgr.append(sentenceGenerator(id)) #algae algaeDeviceList = [] algaeDeviceList.append(algaeATT('muCg1jZhIZY6s961aJ03rcxk', '4LpvkqOyciVeG0lqFyC6yXg4gvel80JLErW6hK70')) algaeDeviceList.append(algaeATT('1yIBQapqIvJmKpAEX2IJKgc5', '4O8qd506sCJMW1VeVvE41lqDtwntCIz2YZsuALS')) algaeDeviceList.append(algaeATT('xlvfKg52QzsAmJn7XSVqBvSF', '4PwZsM3RGJzuG0lqFzagjYK8CgDnIdpanumy2PJ1')) algaeDeviceList.append(algaeATT('q35bNns5XPEqJ5DbgwvsRW3Z', '4UzyyY2mPXm1i0ByXzX1LF5QyCjwD1KTl58BSr8A')) algaeDeviceList.append(algaeATT('XNQ90OLBUmEzHZh0Iv8rC64r', '4JovjWsPB5kfW1VeVnTQWPHUB7Si8qDwUwAAyeE')) algaeDeviceList.append(algaeATT('4qcN9roOwWQHHYwKEPJrlbpf', '4NKhlbNfSs5qW1VeVzQz7VQSyS45L17EnqiSFzo2')) algaeDeviceList.append(algaeATT('O4xQniLbfzOWRxJC2UyP4q50', '4QKgrGvpJOobW1VeVq8gmj9S5M4KJqhMwUrDKy8')) algaeDeviceList.append(algaeATT('QLyIkkOBVvieLmVYsBNjL2M4', '4NnFth5L5d0aW1VeVyYynXw2dJGu71ID5sQngmb')) algaeDeviceList.append(algaeATT('sJFbVrGQKSahaUsKW3CisEHh', '4Smr2oddHVhUW1VeVvw4J7KVnqezCGYgFiuXso6')) time.sleep(3) for id in range(0, 9):
def callSentenceGen_pickle(self,reply): keyword1=keywordext.extraction(reply,self.freq1) sentenceGen=sentenceGenerator.sentenceGenerator(self.freq1) sentence=sentenceGen.generateSentence(keyword1) return sentence