def test(test_loader, model_path, report_freq=100, save_examples=False): """ runs a saved model over test data. user must provide model_path. """ ae = load_model(model_path=model_path) ae.eval() total_step = len(test_loader) test_loss = 0 with torch.no_grad(): for i, (img, attr) in enumerate(test_loader): img = img.to(DEVICE) gen_img, mu, sig = ae(img, attr) loss = loss_function(gen_img, img, mu, sig) test_loss += loss.item() if (i + 1) % report_freq == 0: print("Step [{}/{}] Test Loss: {:.4f}".format( i + 1, total_step, loss.item())) if save_examples: save_to = f'TestExamples_Step{i+1}.png' utils.make_examples(img, gen_img, save_to=save_to) save_pretty = f'TestExamplesPretty_Step{i+1}.png' utils.make_examples_pretty(img, gen_img, save_to=save_pretty) test_loss /= total_step print('====> Test set loss: {:.4f}'.format(test_loss))
def calc_Naive_Bayse(train_p, dev_p): """ Naive Bayse while get train and test-set :param train_p: train parsed :param dev_p: test-set parsed :return: the accuracy over the test-set """ train, att = make_examples(copy.deepcopy(train_p)) dev, att_dev = make_examples(copy.deepcopy(dev_p)) F2I = parseAttributes(train_p[0]) naive_bayes = NaiveBayes(train, dev, attributes=att, F2I=F2I) acc = naive_bayes.naiveBayes() avg_acu = "{0:.2f}".format(acc) return avg_acu
def Naive_Byse_k_folds(train_p): """ Naive Bayse while all the data - with k-folds divide the data to k folds and change from test to train k times and return the avarage accuracy over the tests set :param train_p: all the data :return: the k-folds accuracy """ all_ex, att = make_examples(copy.deepcopy(train_p)) F2I = parseAttributes(train_p[0]) k = 5 accuracy = 0 data = dev_train_sep(k, data=all_ex) for i in range(k): dev = data[i] train = [] for j in range(k): if not j == i: train += data[j] naive_bayes = NaiveBayes(train, dev, attributes=att, F2I=F2I) acc = naive_bayes.naiveBayes() accuracy += acc avg_acu = "{0:.2f}".format(accuracy / k) print("Naive Byse : " + str(avg_acu)) return avg_acu
def ID3_k_folds(train_p): """ k- folds for ID3, get data-set and divide to k (k=5) return the avarage accuracy over all the the k times we runs the algorithm each time one fold is the test-set and the other folds are train set :param train_p: all the data :return: avarage accuracy over all the the k times """ all_ex, att = make_examples(copy.deepcopy(train_p)) F2I = parseAttributes(train_p[0]) k=5 accuracy = 0 data = dev_train_sep(k,data=all_ex) for i in range(k): dev = data[i] train =[] for j in range(k): if not j == i: train += data[j] default, n =max_can_eat(train) mode = "no" if default: mode = "yes" d = ID3(F2I, copy.deepcopy(att),mode, copy.deepcopy(train)) tree = d.DTL() acc= ID3.get_accuracy(tree=copy.deepcopy(tree),test=copy.deepcopy(dev),F2I=copy.deepcopy(F2I), attributes=copy.deepcopy(att), default= mode) accuracy +=acc avg_acu = "{0:.2f}".format(accuracy / k) return avg_acu,tree
def calc_ID3(train_p,dev_p): """ :param train_p: train-set :param dev_p: dev-set :return:the accuracy (and the tree) """ train, att = make_examples(copy.deepcopy(train_p)) dev,att_dev = make_examples(copy.deepcopy(dev_p)) F2I = parseAttributes(train_p[0]) default, n = max_can_eat(train) mode = "no" if default: mode = "yes" d = ID3(F2I, copy.deepcopy(att),mode, copy.deepcopy(train)) tree = d.DTL() acc= ID3.get_accuracy(tree=copy.deepcopy(tree),test=copy.deepcopy(dev),F2I=copy.deepcopy(F2I), attributes=copy.deepcopy(att), default= mode) avg_acu = "{0:.2f}".format(acc) return avg_acu,tree
def ID3_print_Tree(train_p): """ print the tree :param train_p:the train-set :return: print the tree """ all_ex, att = make_examples(copy.deepcopy(train_p)) F2I = parseAttributes(train_p[0]) default, n = max_can_eat(all_ex) default_yes_no = "no" if default: default_yes_no = "yes" d = ID3(F2I, copy.deepcopy(att),default_yes_no,copy.deepcopy(all_ex)) tree = d.DTL() #d.print_tree(tree) return tree
def train(train_loader, model_path=None, num_epochs=10, seed=42, report_freq=100, save_examples=False): """ runs training for AutoEncoder model if model_path not provided, training starts from scratch """ torch.manual_seed(seed) if model_path: ae = load_model(model_path=model_path) else: # model_path not provided => will save to default path # check if default path exists and ask user what to do if os.path.exists(DEFAULT_MODEL_PATH): print( f'Detected a model already saved at {DEFAULT_MODEL_PATH} (the default model path)' ) print( 'Would you like to resume (r) training of this model or start from scratch and overwrite (o) it?' ) answer = input('Resume (r) / Overwrite (o): ').strip().lower() if answer == 'r': ae = load_model(model_path=DEFAULT_MODEL_PATH) elif answer == 'o': ae = create_model() else: raise ValueError( f'You response "{answer}" was not understood.') else: # no concern of overwriting ae = create_model() optimizer = torch.optim.Adam(ae.parameters(), lr=1e-3) # Train the model total_step = len(train_loader) losses = [] for epoch in range(num_epochs): for i, (img, attr) in enumerate(train_loader): img = img.to(DEVICE) attr = attr.to(DEVICE) # Forward pass gen_img, mu, sig = ae(img, attr) loss = loss_function(gen_img, img, mu, sig) # Backward and optimize optimizer.zero_grad() loss.backward() optimizer.step() losses.append(loss.item()) if (i + 1) % report_freq == 0: print("Epoch [{}/{}], Step [{}/{}] Loss: {:.4f}".format( epoch + 1, num_epochs, i + 1, total_step, loss.item())) if save_examples: save_to = f'TrainExamples_Epoch{epoch+1}_Step{i+1}.png' utils.make_examples(img, gen_img, save_to=save_to) # Save model after each epoch save_to = model_path if model_path else DEFAULT_MODEL_PATH # use this default path if None provided torch.save(ae.state_dict(), save_to) print(f'Saved model to {save_to}')