le_obj.sense_el.attrib['annotator'] = '' #STEP X: CHANGE VERSION INFO my_parser.lexicon_el.attrib['label'] = 'ODWN-ORBN-LMF-1.2' #STEP X: REMOVE LE IDS remove_le_ids() #STEP X: REMOVE _SUB_ remove_sub_ids() #STEP X: one_lemma_per_synset() #STEP X: REMOVE EMPTY LEAVE ODWN SYNSETS remove_empty_leave_odwn_synsets() #STEP X: ADD ADJECTIVE SYNSETS add_adjective_synsets() #STEP X: ADD LE_IDS IN ADJECTIVE SYNSETS add_adjective_le_ids() #STEP X: RUN STATS #my_parser.get_stats(verbose=True) #STEP X: export it to version 1.2 my_parser.export(output_path) logger.info('finished conversion')
if counter % 100 == 0: log_it("_".join( [provenance, str(counter), synset_identifier, lemma])) long_pos = pos_dict[short_pos] instance.les_add_le(lemma, long_pos, short_pos, synset_identifier, provenance, definition="", sense_id=None, sense_number=None) log_it("adding antoni and google") add_antoni() log_it("gathering stats") instance.get_stats(verbose=True) instance.clean() #export output_path = os.path.join(cwd, 'odwn', 'resources', 'odwn', 'odwn_orbn_gwg-LMF_1.1.xml') instance.export(output_path)
short_pos = synset_identifier[-1] if short_pos not in ['n','v']: continue if counter % 100 == 0: log_it("_".join([provenance,str(counter),synset_identifier,lemma])) long_pos = pos_dict[short_pos] instance.les_add_le(lemma, long_pos, short_pos, synset_identifier, provenance, definition="", sense_id=None, sense_number=None) log_it("adding antoni and google") add_antoni() log_it("gathering stats") instance.get_stats(verbose=True) instance.clean() #export output_path = os.path.join(cwd,'odwn','resources','odwn','odwn_orbn_gwg-LMF_1.1.xml') instance.export(output_path)