xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution), np.arange(x2_min, x2_max, resolution)) Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T) Z = Z.reshape(xx1.shape) plt.contourf(xx1, xx2, Z, alpha=0.4, cmap=cmap) plt.xlim(xx1.min(), xx1.max()) plt.ylim(xx2.min(), xx2.max()) # plot class samples for idx, cl in enumerate(np.unique(y)): plt.scatter(x=X[y == cl, 0], y=X[y == cl, 1], alpha=0.8, c=cmap(idx), marker=markers[idx], label=cl) ada = AdalineSGD(n_iter=15, eta=0.01, random_state=1) ada.fit(X_std, y) plot_decision_regions(X_std, y, classifier=ada) plt.title('Adaline - Stochastic Gradient Descent') plt.xlabel('sepal length [standardized]') plt.ylabel('petal length [standardized]') plt.legend(loc='upper left') plt.show() plt.plot(range(1, len(ada.cost_) + 1), ada.cost_, marker='o') plt.xlabel('Epochs') plt.ylabel('Average Cost') plt.show()
import numpy as np from adalineSGD import AdalineSGD from mainPerceptron import plot_decision_regions # Print the tail of the data df = pd.read_csv('http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None) # Visualisation the Data y = df.iloc[0:100, 4].values print(y) y = np.where(y == 'Iris-setosa', -1, 1) print(y) X = df.iloc[0:100, [0, 2]].values X_std = np.copy(X) X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std() X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std() ada = AdalineSGD(n_iter=15, eta=0.01, random_state=1) ada.fit(X_std, y) plot_decision_regions(X_std, y, classifier=ada) plt.title('Adaline - Stochastic Gradient descent') plt.xlabel('sepal length [standardized') plt.ylabel('petal length [standardized') plt.legend(loc='upper left') plt.show() plt.plot(range(1, len(ada.cost_) + 1), ada.cost_, marker='o') plt.xlabel('Epochs') plt.ylabel('Average Cost') plt.show()
perceptron algorithm and look at the number of errors make during each epoch. """ from adalineGD import AdalineGD from adalineSGD import AdalineSGD # Standardize data X_std = np.copy(X) X_std[:, 0] = (X[:, 0] - X[:, 0].mean()) / X[:, 0].std() X_std[:, 1] = (X[:, 1] - X[:, 1].mean()) / X[:, 1].std() adln = AdalineGD(eta=0.01, n_iter=25) adln.fit(X_std, y) adln1 = AdalineSGD(eta=0.01, n_iter=25) adln1.fit(X_std, y) plt.plot(range(1, len(adln1.errors_) + 1), adln1.errors_, marker='x', color='red', label='AdalineSGD') plt.plot(range(1, len(adln.errors_) + 1), adln.errors_, marker='o', color='blue', label='AdalineGD') plt.xlabel('Epoch #') plt.ylabel('Errors')
from init_obj import create_universe, prepare_data, show_universe, test_model from adaline import Adaline from adalineSGD import AdalineSGD import numpy as np groups = create_universe() X, Y = prepare_data(groups) # show_universe(groups) y = np.where(Y == 'A', 1, -1) model = Adaline(0.0001, 50) model.fit(X, y) # stochastic model2 = AdalineSGD(0.0001, 50) model2.fit(X, y) test_model(X, model, model2)
marker=markers[idx], label=cl) # read the data df = pd.read_csv( 'https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data', header=None) y = df.iloc[0:100, 4].values y = np.where(y == 'Iris-setosa', -1, 1) x = df.iloc[0:100, [0, 2]].values # standardizing instead x_std = np.copy(x) x_std[:, 0] = (x[:, 0] - x[:, 0].mean()) / x[:, 0].std() x_std[:, 1] = (x[:, 1] - x[:, 1].mean()) / x[:, 1].std() ada = AdalineSGD(n_iter=15, eta=0.01, random_state=1) ada.fit(x_std, y) plot_decision_regions(x_std, y, classifier=ada) plt.title('Adaline - Stochastic Gradient Descent') plt.xlabel('sepal length [standardized]') plt.ylabel('petal length [standardized]') plt.legend(loc='upper left') plt.show() plt.plot(range(1, len(ada.cost_) + 1), ada.cost_, marker='o') plt.xlabel('Epochs') plt.ylabel('Average Cost') plt.show()
plt.legend(loc='upper left DG') plt.tight_layout() # plt.savefig('./adaline_2.png', dpi=300) plt.show() plt.plot(range(1,len(ada.cost_)+1),ada.cost_, marker='o') plt.xlabel('Epochs DG') plt.ylabel('sum-squared-error DG') plt.tight_layout() plt.show() print("=======================ADALINE=========================") print("=======================SDG=========================") adaSDG = AdalineSGD(n_iter=15, eta=0.01, random_state=1) adaSDG.fit(X_std, y) fc.plot_dexision_regions(X_std, y, classifier=adaSDG) plt.title('AdalineSDG - Stochastic Gradient Descent') plt.xlabel('sepal length [standardized]SDG') plt.ylabel('petal length [standardized]SDG') plt.legend(loc='upper left SDG') plt.tight_layout() #plt.savefig('./adaline_4.png', dpi=300) plt.show() plt.plot(range(1, len(adaSDG.cost_) + 1), adaSDG.cost_, marker='o') plt.xlabel('Epochs SDG') plt.ylabel('Average Cost SDG') plt.tight_layout()