with np.load("TINY_MNIST.npz") as data: x, t = data["x"], data["t"] x_eval, t_eval = data["x_eval"], data["t_eval"] xed = np.shape(x_eval) nl = xed[0] nc = xed[1] N_array = [ 5, 50, 100, 200, 400, 800] N_array_d = np.shape(N_array) n = N_array_d[0] Err_Nr = np.zeros(n) l = 0 for nepochs in N_array: t_eval_p = np.zeros(nl) for i in range(0, nl): t_eval_p[i] = k_NN(x, t, x_eval[i],1, nepochs) for i in range(0, nl): if t_eval_p[i] != t_eval[i]: Err_Nr[l] = Err_Nr[l] + 1 l = l+1 plt.plot(N_array, Err_Nr) plt.show() print("T size\t\tErr_Nr") for i in range (0,n): print_text = '%d %s %d' % (N_array[i], "\t\t", Err_Nr[i]) print(print_text)
# DRZEWA DECYZYJNE start_tree = timer() score_dtree, error_dtree = dtree.dtree(train_inputs, test_inputs, train_classes, test_classes) end_tree = timer() # NAIWNY BAYES start_bayes = timer() score_nbayes, error_nbayes = bayes.bayes(train_inputs, test_inputs, train_classes, test_classes) end_bayes = timer() # k-NN k = 3 start_knn3 = timer() score_knn3, error_knn3 = k_NN.k_NN(3, train_inputs, test_inputs, train_classes, test_classes) end_knn3 = timer() # neural network start_nn = timer() score_nn, error_nn = nn.nn(df, train_inputs, test_inputs, train_classes, test_classes) end_nn = timer() # SVM start_svm = timer() score_svm, error_svm = svm.svm_f(train_inputs, test_inputs, train_classes, test_classes) end_svm = timer() # k-NN k = 5
x, t = data["x"], data["t"] x_eval, t_eval = data["x_eval"], data["t_eval"] xed = np.shape(x_eval) nl = xed[0] nc = xed[1] N = 800 k_array = [1, 3, 5, 7, 21, 101, 401] k_array_d = np.shape(k_array) n = k_array_d[0] Err_Nr = np.zeros(n) l = 0 for k in k_array: t_eval_p = np.zeros(nl) for i in range(0, nl): t_eval_p[i] = k_NN(x, t, x_eval[i], k, N) for i in range(0, nl): if t_eval_p[i] != t_eval[i]: Err_Nr[l] = Err_Nr[l] + 1 l = l+1 plt.plot(k_array, Err_Nr) plt.show() print("k\t\tErr_Nr") for i in range (0,n): print_text = '%d %s %d' % (k_array[i], "\t\t", Err_Nr[i]) print(print_text)