# please, uncomment the week you're working on # setup_google_colab.setup_week1() # setup_google_colab.setup_week2() setup_google_colab.setup_week3() # setup_google_colab.setup_week4() # setup_google_colab.setup_week5() # setup_google_colab.setup_week6() import sys sys.path.append("..") import grading import download_utils # !!! remember to clear session/graph if you rebuild your graph to avoid out-of-memory errors !!! download_utils.link_all_keras_resources() import tensorflow as tf import keras from keras import backend as K import numpy as np # %matplotlib inline import matplotlib.pyplot as plt print(tf.__version__) print(keras.__version__) import grading_utils import keras_utils from keras_utils import reset_tf_session """# Fill in your Coursera token and email To successfully submit your answers to our grader, please fill in your Coursera submission token and email """
Your very own neural network In this notebook we're going to build a neural network using naught but pure numpy and steel nerves. It's going to be fun, I promise! In [1]: import sys sys.path.append("..") import tqdm_utils import download_utils In [2]: # use the preloaded keras datasets and models download_utils.link_all_keras_resources() In [3]: from __future__ import print_function import numpy as np np.random.seed(42) Here goes our main class: a layer that can do .forward() and .backward() passes. In [4]: class Layer: """ A building block. Each layer is capable of performing two things: - Process input to get output: output = layer.forward(input) - Propagate gradients through itself: grad_input = layer.backward(input, grad_output) Some layers also have learnable parameters which they update during layer.backward. """ def __init__(self):