コード例 #1
0
def Main(filename):
    entLine = open(filename).readlines()
    que = Queue()
    entProDict = {}
    proList = []
    entList = []
    for e in entLine:
        print e
        entLineList = e.split(',')
        entToSearch = entLineList[0]
        entList.append(entToSearch)

        queryStrings = []

        for i in range(1,len(entLineList),1):
            queryStrings.append(entLineList[i])

        #webScraping(entToSearch, queryStrings)

    for ent in entList:
        ReplaceCorefPointers(ent)
        collectEntities(ent)
        entityClusterAndNormalise(ent)
        success = inference_test(ent)  
    return success
コード例 #2
0
                                        stringToken[w_index]=noun
                                        replaceList.append([pronoun,noun])
                                        
                                    ereList[l_index] = ' '.join(stringToken)
                                    newline_i = score+': ('+ereList[0] + ';'+ereList[1] + ';'+ereList[2] + ')'
                                    extList[i] = newline_i
                                elif(isReplace==True and w_index == -1):
                                    derefString = ereList[l_index]
                                    if noun in setForReplacement:
                                        derefString = derefString.replace(pronoun,primaryEnt)
                                        replaceList.append([pronoun,primaryEnt])
                                    else:
                                        derefString = derefString.replace(pronoun,noun)
                                        replaceList.append([pronoun,noun])
                                        
                                    ereList[l_index] = derefString
                                    newline_i = score+': ('+ereList[0] + ';'+ereList[1] + ';'+ereList[2] + ')'
                                    extList[i] = newline_i
                    sentenceToExtractionMap.update({sentNo:extList})
  
        xmlfileName = str(dicts)
        storeInDb(sentenceToExtractionMap,sentencewiseCorefResultDict,xmlfileName)    
#######################################################
##           Write the outut to the files            ##
#######################################################
        # xmlfileName = str(dicts) +'.txt'
        # printToFile(sentenceToExtractionMap,sentencewiseCorefResultDict,xmlfileName)
    dbObj.client.close()
    collectEntities(primaryEntity, url)