def generate_data(): size_clusterA = no_of_points // clusterA size_clusterB = no_of_points // clusterB colorA = ['#2AFB05', '#4CD933', '#56CF41', '#56CF41', '#27C40B', '#71FB58'] colorB = ['#DD3E28', '#BB6B20', '#E518B4', '#B4703B', '#FB10A2', '#AA5B43'] for i in range(clusterA): tmp = [] if i != clusterA - 1: tmp = randomFunc.generate_N_random( [x + i * multiplication_ratio for x in mean_classA], sigma_classA, 2, size_clusterA) else: tmp = randomFunc.generate_N_random( [x + i * multiplication_ratio for x in mean_classA], sigma_classA, 2, no_of_points - size_clusterA * i) naive.add_plotting_data(tmp, 'ro', colorA[i], "Class A Shade " + str(i + 1)) for i in range(clusterB): tmp = [] if i != clusterB - 1: tmp = randomFunc.generate_N_random( [x + i * multiplication_ratio for x in mean_classB], sigma_classB, 2, size_clusterB) else: tmp = randomFunc.generate_N_random( [x + i * multiplication_ratio for x in mean_classB], sigma_classB, 2, no_of_points - size_clusterB * i) naive.add_plotting_data(tmp, 'ro', colorB[i], "Class B Shade " + str(i + 1)) naive.plot_curve("GMM", [-1, 6, -1, 6], ["X Axis", "Y Axis"])
def generate_GMM_decision_boundary(): pos = [] neg = [] for i in np.arange(-1, 6, 0.01): for j in np.arange(-1, 6, 0.01): diff = get_diff_of_dscriminant_function(i, j) if diff >= 0: pos.append([i, j]) else: neg.append([i, j]) naive.add_plotting_data(pos, '.', 'black', "Region A") naive.add_plotting_data(neg, '.', 'white', "Region B") generate_data()
def single_update_perceptron_learning(): global c1_x, c1_y, c1_z, c2_x, c2_y, c2_z global error_log_single error_log_single = [] itr = 0 w = np.zeros((4, 1)) x = np.zeros((4, 1)) plt.ion() total_iterations = 0 while itr < no_of_data_points: x[0][0] = c1_x[itr] x[1][0] = c1_y[itr] x[2][0] = c1_z[itr] x[3][0] = 1 if (np.matmul(np.transpose(w), x)) <= 0: w = w + neta * x itr = -1 x[0][0] = c2_x[0 if itr == -1 else itr] x[1][0] = c2_y[0 if itr == -1 else itr] x[2][0] = c2_z[0 if itr == -1 else itr] x[3][0] = 1 if (np.matmul(np.transpose(w), x)) > 0: w = w - neta * x itr = -1 check_error(w, total_iterations) itr += 1 total_iterations += 2 x1 = [] y1 = [] z1 = [] for i in np.arange(-2, 7, 0.1): for j in np.arange(-2, 7, 0.1): x1.append(i) y1.append(j) z = (w[0][0] * i + w[1][0] * j + w[3][0]) / (-1 * w[2][0]) z1.append(z) # Plotting Curves data_classA = [] data_classB = [] data = [] for i in range(len(c1_x)): data_classA.append([c1_x[i], c1_y[i], c1_z[i]]) for i in range(len(c2_x)): data_classB.append([c2_x[i], c2_y[i], c2_z[i]]) for i in range(len(x1)): data.append([x1[i], y1[i], z1[i]]) add_plotting_data(data_classA, 'o', 'red', "Class A") add_plotting_data(data_classB, 'o', 'blue', "Class B") add_plotting_data(data, 'None', 'green', "Plane") plot_3d_curve("Single Update Perceptron Learning")
def batch_update_perceptron_learning(): global c1_x, c1_y, c1_z, c2_x, c2_y, c2_z global error_log_batch error_log_batch = [] itr = 0 w = np.zeros((4, 1)) x = np.zeros((4, 1)) plt.ion() check = False total_iterations = 0 while check != True: itr = 0 error_points = [] check = True while itr < no_of_data_points: x[0][0] = c1_x[itr] x[1][0] = c1_y[itr] x[2][0] = c1_z[itr] x[3][0] = 1 if (np.matmul(np.transpose(w), x)) <= 0: error_points.append([c1_x[itr], c1_y[itr], c1_z[itr], True]) check = False x[0][0] = c2_x[itr] x[1][0] = c2_y[itr] x[2][0] = c2_z[itr] x[3][0] = 1 if (np.matmul(np.transpose(w), x)) > 0: error_points.append([c2_x[itr], c2_y[itr], c2_z[itr], False]) check = False itr += 1 error_log_batch.append([total_iterations, len(error_points)]) for point in error_points: x[0][0] = point[0] x[1][0] = point[1] x[2][0] = point[2] x[3][0] = 1 if point[3]: w = w + neta * x else: w = w - neta * x total_iterations += 1 # Plotting Curves x1 = [] y1 = [] z1 = [] data_classA = [] data_classB = [] data = [] for i in np.arange(-2, 7, 0.1): for j in np.arange(-2, 7, 0.1): x1.append(i) y1.append(j) z = (w[0][0] * i + w[1][0] * j + w[3][0]) / (-1 * w[2][0]) z1.append(z) for i in range(len(c1_x)): data_classA.append([c1_x[i], c1_y[i], c1_z[i]]) for i in range(len(c2_x)): data_classB.append([c2_x[i], c2_y[i], c2_z[i]]) for i in range(len(x1)): data.append([x1[i], y1[i], z1[i]]) add_plotting_data(data_classA, 'o', 'red', "Class A") add_plotting_data(data_classB, 'o', 'blue', "Class B") add_plotting_data(data, 'None', 'green', "Plane") plot_3d_curve("Batch Update Perceptron Learning")
data_classA = [] data_classB = [] data = [] for i in np.arange(-2, 7, 0.1): for j in np.arange(-2, 7, 0.1): x1.append(i) y1.append(j) z = (w[0][0] * i + w[1][0] * j + w[3][0]) / (-1 * w[2][0]) z1.append(z) for i in range(len(c1_x)): data_classA.append([c1_x[i], c1_y[i], c1_z[i]]) for i in range(len(c2_x)): data_classB.append([c2_x[i], c2_y[i], c2_z[i]]) for i in range(len(x1)): data.append([x1[i], y1[i], z1[i]]) add_plotting_data(data_classA, 'o', 'red', "Class A") add_plotting_data(data_classB, 'o', 'blue', "Class B") add_plotting_data(data, 'None', 'green', "Plane") plot_3d_curve("Batch Update Perceptron Learning") if __name__ == "__main__": generate_data() single_update_perceptron_learning() batch_update_perceptron_learning() add_plotting_data(error_log_single, 'None', 'blue', "Single Update") add_plotting_data(error_log_batch, 'None', 'red', "Batch Update") plot_curve("Error Rate", False, ["Iterations", "Misclassified Points"]) plt.show() plt.pause(20)