def CNNForward(model, x): """Runs the forward pass. Args: model: Dictionary of all the weights. x: Input to the network. Returns: var: Dictionary of all intermediate variables. """ x = x.reshape([-1, 48, 48, 1]) h1c = Conv2D(x, model['W1']) + model['b1'] h1r = ReLU(h1c) h1p = MaxPool(h1r, 3) h2c = Conv2D(h1p, model['W2']) + model['b2'] h2r = ReLU(h2c) h2p = MaxPool(h2r, 2) h2p_ = np.reshape(h2p, [x.shape[0], -1]) y = Affine(h2p_, model['W3'], model['b3']) var = { 'x': x, 'h1c': h1c, 'h1r': h1r, 'h1p': h1p, 'h2c': h2c, 'h2r': h2r, 'h2p': h2p, 'h2p_': h2p_, 'y': y } return var
def gen(Z, w, w1, w2, w3): h0 = ReLU(batchnorm(T.dot(Z, w))) h1 = ReLU(batchnorm(T.dot(h0, w1))) h1_output = h1.reshape((h1.shape[0], nkerns[2], 7, 7)) h2_input = repeat(repeat(h1_output, 2, 2), 2, 3) h2 = ReLU(batchnorm(conv2d(h2_input, w2, border_mode='half'))) h3_input = repeat(repeat(h2, 2, 2), 2, 3) h3 = T.tanh(conv2d(h3_input, w3, border_mode='half')) return h3
def gen(Z, w, w1, w2, w3, w4): h0 = ReLU(batchnorm(T.dot(Z, w))) h1_input = h0.reshape((h0.shape[0], nkerns[3], 4, 4)) h1 = ReLU(batchnorm(conv2d(h1_input, w1, border_mode='half'))) h2_input = repeat(repeat(h1, 2, 2), 2, 3) h2 = ReLU(batchnorm(conv2d(h2_input, w2, border_mode='half'))) h3_input = repeat(repeat(h2, 2, 2), 2, 3) h3 = ReLU(batchnorm(conv2d(h3_input, w3, border_mode='half'))) h4_input = repeat(repeat(h3, 2, 2), 2, 3) h4 = T.tanh(conv2d(h4_input, w4, border_mode='half')) return h4
def __init__(self): self.layers = [ Conv2d(1, 6, 5, padding=2), ReLU(), MaxPool2d(2, 2), Conv2d(6, 16, 5), ReLU(), MaxPool2d(2, 2), Flatten(), Linear(400, 120), ReLU(), Linear(120, 84), ReLU(), Linear(84, 10) ]