def optimization(x, y, x_test, y_test): print("Multilayer perceptron optimizer") best_result = -999999 a = 0 # 80 or 70 b = 0 # 40 2.5496 % 25 also 3.1% for i in range(50, 80, 10): for j in range(5, 45, 10): print("i: {0}, j: {1}".format(i, j)) mlp = MLPClassifier(hidden_layer_sizes=(i, j)) mlp.fit(x, y) y_pred = mlp.predict(x_test) confusion = confusion_matrix(y_test, y_pred) result = judge.get_points(confusion) if result > best_result: best_result = result a = i b = j print("Best so far: a:{0}, b:{1}".format(a, b)) new_a = a new_b = b range_to_search_a = 5 range_to_search_b = 3 for i in range(new_a - range_to_search_a, new_a + range_to_search_a, 1): for j in range(new_b - range_to_search_b, new_b + range_to_search_b, 1): print("i: {0}, j: {1}".format(i, j)) mlp = MLPClassifier(hidden_layer_sizes=(i, j)) mlp.fit(x, y) y_pred = mlp.predict(x_test) confusion = confusion_matrix(y_test, y_pred) result = judge.get_points(confusion) if result > best_result: best_result = result a = i b = j print("Found best: a:{0}, b:{1}".format(a, b)) return [a, b]
def optimize(x, y, x_test, y_test): best_result = -99999999 best_number = 1 for i in range(1, 20, 1): print(i) knn = KNeighborsClassifier(i) knn.fit(x, y) y_pred = knn.predict(x_test) confusion = confusion_matrix(y_test, y_pred) result = judge.get_points(confusion) if result > best_result: best_result = result best_number = i print("Best neighbors number = {0}".format(best_number)) return best_number
def display(confusion, labels, number, name=""): print("Displaying!") cm = confusion print(cm) plt.figure(number) ax = plt.subplot() sns.heatmap(cm, annot=True, ax=ax, fmt='g') #annot=True to annotate cells # labels, title and ticks ax.set_xlabel('Predicted labels') ax.set_ylabel('True labels') result = judge.get_points(confusion) result = -result * 100 result = round(result, 4) if result > 100: result = "Just really bad.." ax.set_title( "Confusion matrix of the {0} classifier, \nresult: {1} % of bad classification" .format(name, result)) ax.xaxis.set_ticklabels(labels) ax.yaxis.set_ticklabels(labels)
def error_scorer(y_true, y_pred): confusion = confusion_matrix(y_true, y_pred) return judge.get_points(confusion)