# In[16]: # train 3-layer model layers_dims = [train_X.shape[0], 5, 2, 1] parameters = model(train_X, train_Y, layers_dims, optimizer="gd") # Predict predictions = predict(train_X, train_Y, parameters) # Plot decision boundary plt.title("Model with Gradient Descent optimization") axes = plt.gca() axes.set_xlim([-1.5, 2.5]) axes.set_ylim([-1, 1.5]) plot_decision_boundary(lambda x: predict_dec(parameters, x.T), train_X, train_Y) # ### 5.2 - Mini-batch gradient descent with momentum # # Run the following code to see how the model does with momentum. Because this example is relatively simple, the gains from using momemtum are small; but for more complex problems you might see bigger gains. # In[17]: # train 3-layer model layers_dims = [train_X.shape[0], 5, 2, 1] parameters = model(train_X, train_Y, layers_dims, beta=0.9, optimizer="momentum")
# train 3-layer model layers_dims = [train_X.shape[0], 5, 2, 1] # contains the dimensions of each layer in the network parameters = model( train_X, train_Y, layers_dims, optimizer="gd" ) # configuration variable that is internal to the variable using gradient descent # Predict predictions = predict(train_X, train_Y, parameters) # Plot decision boundary plt.title("Model with Gradient Descent optimization") # set title for the axes axes = plt.gca() # To get the current polar axes axes.set_xlim([-1.5, 2.5]) # set x-axis limits axes.set_ylim([-1, 1.5]) # set y-axis limits plot_decision_boundary(lambda x: predict_dec(parameters, x.T), train_X, train_Y) # Generates contour plot ### 5.2) Mini-batch gradient descent with momentum # # Run the following code to see how the model does with momentum. Because this example is relatively simple, the gains from using momemtum are small; # But for more complex problems, might see bigger gains. # # train 3-layer model layers_dims = [train_X.shape[0], 5, 2, 1] parameters = model(train_X, train_Y, layers_dims, beta=0.9, optimizer="momentum")