class pytorch_nn(): def __init__(self): weights_pth = os.path.join('./checkpoints', 'nn_weights.pth') self.model = MLP(input_size=5, output_size=3) self.model.load_state_dict( torch.load(weights_pth, map_location=torch.device('cpu'))) self.model.eval() def predict(self, in_vector): in_vector = torch.from_numpy(np.array(in_vector)) in_vector = Variable(in_vector).float() outputs = self.model.forward(in_vector) prob, pred = outputs.max(0, keepdim=True) return outputs, prob, pred
def getNewNN(thisDict): weights_pth = "checkpoints/nn_weights.pth" model = MLP(input_size=5, output_size=3) model.load_state_dict( torch.load(weights_pth, map_location=torch.device('cpu'))) model.eval() in_vector = [] for key, value in thisDict.items(): print(key, value) temp = masterMerged.loc[masterMerged[key + "_x"] == value].head(1) in_vector.append(temp.iloc[0][key + "_y"]) #in_vector = [1, 589, 9, 1, 0] in_vector = torch.from_numpy(np.array(in_vector)) in_vector = Variable(in_vector).float() outputs = model.forward(in_vector) return outputs.tolist()
run_save_prefix = os.path.join(save_folder, f"{dataset_name}/e={eta}/") print(run_save_prefix) ensure_dir(run_save_prefix) for number_of_nodes in NODES: run_name = f'NODE{number_of_nodes:02d}_e={eta}_d={dataset_name}'.replace(".", ",") mae_array = [] for i in range(LOOPS): i += 1 print(run_name) net = MLP(train[0], train[1], number_of_nodes, momentum=0, outtype="linear") train_losses, valid_losses, _, _, pocket_epoch = net.train( train[0], train[1], valid[0], valid[1], eta, 300000, early_stop_count=100000, shuffle=False, batch_size=batch_size ) net.forward(test[0]) print(mae(net.outputs, test[1])) mae_array.append(mae(net.outputs, test[1])) mean, stddev = statistics.mean(mae_array), statistics.stdev(mae_array) results[dataset_name][eta][number_of_nodes] = mean, stddev plt.title(f'Performance e={eta} d={dataset_name}') plt.plot(NODES, np.log([results[dataset_name][eta][node][0] for node in NODES])) plt.xlabel('number of nodes') plt.ylabel('log of test MAE') plt.savefig(fname=os.path.join(f'{run_save_prefix}', f'node_performance'), dpi=300) plt.close() results_filename = f"results etas:{ETA_VALUES}" \ f" n:{NODES}" \
dataset[1], n_hidden_nodes, momentum=momentum) train_losses, _, train_accuracies, _, pocket_epoch = net.train( dataset[0], dataset[1], dataset[0], dataset[1], eta, epochs, early_stop_count=early_stop, shuffle=shuffle, batch_size=batch_size, early_stopping_threshold=early_stopping_threshold) train_accuracies = np.array(train_accuracies) * 100 / n net.forward(dataset[0]) print("pocket epoch", pocket_epoch) print( f"w1:\n{net.weights1}\nw2:{net.weights2}\nnet.hidden:\n{net.hidden}\noutputs:{net.outputs}\n" ) save_prefix = os.path.join(save_folder, name) fig, ax1 = plt.subplots() plt.title(name + " Learning curve") x = np.arange(len(train_losses)) ax1.set_xlabel('Epoch') ax1.set_ylabel('Log MSE loss') ax1.plot(x, np.log(train_losses), color='tab:red',