def L_layer_model(X, Y, layers_dims, learning_rate=0.5, num_iterations=10000, print_cost=False): #lr was 0.009 np.random.seed(1) costs = [] parameters = functions.initialize_parameters_deep(layers_dims) for i in range(0, num_iterations): AL, caches = functions.L_model_forward(X, parameters) cost = functions.compute_cost(AL, Y) grads = functions.L_model_backward(AL, Y, caches) parameters = functions.update_parameters(parameters, grads, learning_rate) if print_cost and i % 100 == 0: print("Cost after iteration %i: %f" % (i, cost)) if print_cost and i % 100 == 0: costs.append(cost) #plt.ylabel('cost') #plt.xlabel('iterations (per tens)') #plt.title("Learning rate =" + str(learning_rate)) #plt.show() return parameters
def L_layer_model( X, y, layers_dims, learning_rate=0.01, num_iterations=3000, print_cost=True, hidden_layers_activation_fn="relu"): random.seed(version =2) np.random.seed(random.randint(0,1000)) parameters = fn.initialize_parameters(layers_dims) for i in range(num_iterations): AL, caches = fn.L_model_forward( X, parameters, hidden_layers_activation_fn) cost = fn.compute_cost(AL, y) grads = fn.L_model_backward(AL, y, caches, hidden_layers_activation_fn) parameters = fn.update_parameters(parameters, grads, learning_rate) if (i + 1) % 100 == 0 and print_cost: print(f"The cost after {i + 1} iterations is: {cost:.4f}") return parameters