def encoder(domains_list): apn_list = [] for dom in domains_list: for a in dom.apn: if a not in apn_list: apn_list.append(a) save_arrays.save(apn_list, "./files/apn.obj") cit_list = [] for dom in domains_list: for c in dom.cit: if c not in cit_list: cit_list.append(c) save_arrays.save(cit_list, "./files/cit.obj") loc_list = [] for dom in domains_list: for l in dom.loc: if l not in loc_list: loc_list.append(l) save_arrays.save(loc_list, "./files/loc.obj") obj_list = [] for dom in domains_list: for o in dom.obj: if o not in obj_list: obj_list.append(o) save_arrays.save(obj_list, "./files/obj.obj") tru_list = [] for dom in domains_list: for t in dom.tru: if t not in tru_list: tru_list.append(t) save_arrays.save(tru_list, "./files/tru.obj")
def encoder(domains_list): apn_list = [] for dom in domains_list: apn_list = apn_list + [element.upper() for element in dom.apn] apn_list = list(set(apn_list)) save_arrays.save(apn_list, "apn.obj") cit_list = [] for dom in domains_list: cit_list = cit_list + [element.upper() for element in dom.cit] cit_list = list(set(cit_list)) save_arrays.save(cit_list, "cit.obj") loc_list = [] for dom in domains_list: loc_list = loc_list + [element.upper() for element in dom.loc] loc_list = list(set(loc_list)) save_arrays.save(loc_list, "loc.obj") obj_list = [] for dom in domains_list: obj_list = obj_list + [element.upper() for element in dom.obj] obj_list = list(set(obj_list)) save_arrays.save(obj_list, "obj.obj") tru_list = [] for dom in domains_list: tru_list = tru_list + [element.upper() for element in dom.tru] tru_list = list(set(tru_list)) save_arrays.save(tru_list, "tru.obj")
'action_type': train_y[0], 'type1': train_y[1], 'param1': train_y[2], 'type2': train_y[3], 'param2': train_y[4], 'type3': train_y[5], 'param3': train_y[6], 'type4': train_y[7], 'param4': train_y[8] }, batch_size=128, epochs=10, verbose=2, validation_split=0.1) save_arrays.save(history, 'history') net.save('model') else: name = sys.argv[1] model = load_model(name) testing = open("mini_test", "rb") test = pickle.load(testing) test_x, test_y = neuralNet.split(test) res_y = model.predict(test_x) l = [] for i in range(len(res_y[0])): pred = np.array([]) correct = np.array([]) for j in range(len(test_y)):
import utils import save_arrays import sys if __name__ == '__main__': name = "goals" read_folder, save_path = utils.get_folders(sys.argv, name) plans = utils.get_plans(read_folder) goals = [] for p in plans: if p.goals not in goals: goals.append(p.goals) save_arrays.save(goals, save_path) print(len(goals))
dictionary[num_actions[i]] = 1 else: dictionary[num_actions[i]] += 1 dictionary = { k: v for k, v in sorted( dictionary.items(), key=lambda item: item[0], reverse=False) } vals = np.fromiter(dictionary.values(), dtype=float) file = open("num_azioni_nei_piani.txt", "w") for k, w in dictionary.items(): file.write("Numero azioni nel piano: " + str(k) + " Frequenza: " + str(w) + "\n") file.close() plt.plot(vals) plt.xlabel('numero_azioni_nei_piani') plt.ylabel('frequenza_numero_azioni') plt.savefig("plot_distribuzione_numero_azioni_nei_piani.png") plt.show() if __name__ == '__main__': folder = "XmlPlans" plans = get_plans(folder) plot_num_actions_plans(plans) domains = logistics_domains(folder) init_statistics(domains) save_arrays.save(domains, "./files/domains.obj") save_arrays.save(plans, "./files/piani.obj") encoder(domains)
def save_db(db): np.random.shuffle(db) dim = int(0.8 * len(db)) train, test = db[:dim], db[dim:] save_arrays.save(train, "training_set") save_arrays.save(test, "test_set")
import utils import numpy as np import save_arrays import pickle import oneHot_deep if __name__ == '__main__': file = open("goal", "rb") goal = pickle.load(file) dizionario_goal = oneHot_deep.create_dictionary_goals(goal) dizionario_goal = oneHot_deep.shuffle_dictionary(dizionario_goal) oneHot_deep.completa_dizionario(dizionario_goal) save_arrays.save(dizionario_goal, "dizionario_goal")
import crea_istanze if __name__ == '__main__': file1 = open("apn.obj", "rb") apn_list = pickle.load(file1) file2 = open("cit.obj", "rb") cit_list = pickle.load(file2) file3 = open("obj.obj", "rb") obj_list = pickle.load(file3) file4 = open("loc.obj", "rb") loc_list = pickle.load(file4) file5 = open("tru.obj", "rb") tru_list = pickle.load(file5) apn_list = na.natsorted(apn_list[1:]) cit_list = na.natsorted(cit_list) obj_list = na.natsorted(obj_list[1:]) tru_list = na.natsorted(tru_list) loc_list = na.natsorted(loc_list) folder = "SOL_files" plans = utils.get_plans(folder) oneHot.init(plans, apn_list, cit_list, obj_list, loc_list, tru_list) db = crea_istanze.crea(plans) np.random.shuffle(db) dim = int(0.8 * len(db)) train, test = db[:dim], db[dim:] save_arrays.save(train, "training_set") save_arrays.save(test, "test_set") print('dataset created')