def __init__(self, folder, langs): self.folder = folder self.langs = langs self.map = ['word', 'lemma'] self.get = {} self.get[('synset', None)] = util.Shared() self.get[('synset', None)].loadTxtModel(self.folder+"/synsets.txt") self.get[('domain', None)] = util.Shared() self.get[('domain', None)].loadTxtModel(self.folder+"/domains.txt") for lang in langs: self.get[('word', lang)] = util.Shared() self.get[('word', lang)].loadTxtModel(self.folder+"/"+lang+"/words.txt") self.get[('lemma', lang)] = util.Shared() self.get[('lemma', lang)].loadTxtModel(self.folder+"/"+lang+"/lexemes.txt") print("MultilingualWordVector READY!")
def __init__(self, file_name, folder, lang): self.file_name = file_name self.folder = folder if lang in wn.langs(): self.lang = lang else: print("language: '%s' is not supported, try another language" % lang) #initialize self.WordIndex = {} self.SynsetIndex = {} self.pos_list = ['a', 's', 'r', 'n', 'v'] self.Shared = util.Shared()
def __init__(self, file_name, folder, lang): self.file_name = file_name self.folder = folder if lang in wn.langs(): self.lang = lang else: print("language: '%s' is not supported, try another language" % lang) #initialize self.WordIndex = {} self.SynsetIndex = {} self.pos_list = ['a', 's', 'r', 'n', 'v'] self.pointer_map = {"@":"hypernym", "&":"similar", "$":"verbGroup", "!":"antonym"} self.Shared = util.Shared()
def __init__(self, file_name, folder, wnd_file, wndh_file): self.dictS = {} self.dictC = {} self.hype = {} self.hypo = {} self.loadWNDFile(wnd_file) self.loadWNDHFile(wndh_file) self.file_name = file_name self.folder = folder #initialize self.SynsetIndex = {} self.CategoryIndex = {} self.Shared = util.Shared()
def __init__(self, folder, langs, generality=False): self.folder = folder self.langs = langs self.map = ['word', 'lemma'] self.N = 30 self.generality = generality self.get = {} self.get[('synset', None)] = util.Shared() self.get[('synset', None)].loadTxtModel(self.folder + "/synsets.txt") for lang in langs: self.get[('word', lang)] = util.Shared() self.get[('word', lang)].loadTxtModel(self.folder + "/" + lang + "/words.txt") self.get[('lemma', lang)] = util.Shared() self.get[('lemma', lang)].loadTxtModel(self.folder + "/" + lang + "/lemmas.txt") print("MultilingualWordVector READY!") if self.generality: self.generality = {} self.generality[('synset', None)] = util.Shared() self.generality[('synset', None)].loadGenerality(self.folder + "/generality/synsets.txt") for lang in langs: self.generality[('word', lang)] = util.Shared() self.generality[('word', lang)].loadGenerality(self.folder + "/" + lang + "/generality/words.txt") self.generality[('lemma', lang)] = util.Shared() self.generality[( 'lemma', lang)].loadLemmaGenerality(self.folder + "/" + lang + "/generality/lemmas.txt") print("MultilingualLemmaGenerality READY!") else: print("MultilingualLemmaGenerality DON'T USE!")