read_terms(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/bacteria_habitat_OntoBiotope-34')) read_heads(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/heads_tolearn_onto'), action='learn') #test.cleaning_helper() print '#' * 100 #test.cleaning_helper() # LEARNING print 'LEARNING' print "Reading lemma" read_lemma(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/lemma')) print "Reading types" read_types(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/types')) print "Reading heads" read_heads(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/heads_tolearn_dico'), action='learn') print "Saving" test.save(prefix='/bibdev/travail/typage/typage_biotope_task3.4/dumps/expe1_20120912/expe1_20120912_after_learning') # TAGGING print "\nTAGGING" print "Tagging heads" read_heads(open('/bibdev/travail/typage/typage_biotope_task3.4/data/expe_20120912/heads_totag'), action='tag') print "Saving" test.save(prefix='/bibdev/travail/typage/typage_biotope_task3.4/dumps/expe1_20120912/expe1_20120912_after_tagging') #trouves = 0 #non_trouves = 0 #for k,v in test.terms.items(): # print k, ':', v # if hasattr(v, 'head') and hasattr(v, '_subsets'): # trouves += 1 # else:
#read_terms(open('../data/mbto.obo')) #read_heads(open('../data/mbto.heads'), action='learn') #test.cleaning_helper() print '#' * 100 #test.cleaning_helper() # LEARNING print 'LEARNING' print "Reading lemma" read_lemma(open('../data/expe1_20120910/lemma')) print "Reading types" read_types(open('../data/expe1_20120910/types')) print "Reading heads" read_heads(open('../data/expe1_20120910/heads_tolearn'), action='learn') print "Saving" test.save(prefix='../dumps/expe1_20120910/expe1_20120910_after_learning') # TAGGING print "\nTAGGING" print "Tagging heads" read_heads(open('../data/expe1_20120910/heads_totag'), action='tag') print "Saving" test.save(prefix='../dumps/expe1_20120910/expe1_20120910_after_tagging') #trouves = 0 #non_trouves = 0 #for k,v in test.terms.items(): # print k, ':', v # if hasattr(v, 'head') and hasattr(v, '_subsets'): # trouves += 1 # else:
print "Reading onto" read_terms(codecs.open(BASE_DATA + u'onto')) print '%d terms currently in memory.\n' % len(test.terms) print 'The following inconsistencies were found in the ontology:' test.cleaning_helper() print print "Reading onto heads" read_heads(codecs.open(BASE_DATA + u'heads_tolearn_onto', encoding='UTF-8'), action='learn') print '%d terms currently in memory.\n' % len(test.terms) print 'The following inconsistencies were found in the ontology:' test.cleaning_helper() print print "Saving after learning onto" test.save(prefix=BASE_DUMPS + EXPE + u'_after_learning_onto') print '-' * 80 if FLAT_OK: ############################################################################# # LEARNING FLAT RESOURCES print 'LEARNING FLAT RESOURCES' print '-' * 80 ############################################################################# print "Reading flat resources types" read_types(codecs.open(BASE_DATA + u'types', encoding='UTF-8')) print '%d terms currently in memory.\n' % len(test.terms) print "Reading flat resources heads" read_heads(codecs.open(BASE_DATA + u'heads_tolearn_dico', encoding='UTF-8'), action='learn')
def vote_person(id): names[id]["likes"] += 1 test.save(names) return redirect("/info/" + id)
def vote(id): names[id]["likes"] += 1 test.save(names) return redirect("/")
# read_terms(open('../data/mbto.obo')) # read_heads(open('../data/mbto.heads'), action='learn') # test.cleaning_helper() print "#" * 100 # test.cleaning_helper() # LEARNING print "LEARNING" print "Reading lemma" read_lemma(open("../data/expe1_20120908/lemma")) print "Reading types" read_types(open("../data/expe1_20120908/types")) print "Reading heads" read_heads(open("../data/expe1_20120908/heads_tolearn"), action="learn") print "Saving" test.save(prefix="../dumps/expe1/expe1_20120908_after_learning") # TAGGING print "\nTAGGING" print "Tagging heads" read_heads(open("../data/expe1_20120908/heads_totag"), action="tag") print "Saving" test.save(prefix="../dumps/expe1/expe1_20120908_after_tagging") # trouves = 0 # non_trouves = 0 # for k,v in test.terms.items(): # print k, ':', v # if hasattr(v, 'head') and hasattr(v, '_subsets'): # trouves += 1 # else:
elif prompt == 'update': # Loop through all items and ask for a new value for category, info in scentsyItems.items(): print(f"\nCategory: {category.title()}") for item in info.keys(): try: newValue = int( input(f"How many {item.title()} do you have? >> ")) except ValueError: newValue = 0 print(f"Update: {item.title()}: {newValue}\n") scentsyItems[category][item] = newValue save(scentsyItems) continue elif prompt == 'save': # Save into .json file save(scentsyItems) elif prompt == 'delete': # Delete an item from a category category = input("What category are you looking for? >> ") for key, value in scentsyItems.items(): if category == key: print( f"You have accessed the '{category.title()}' category.\n") for nested_key, nested_value in value.items(): print(