def fully_connected(X, n_in, n_out, activation=None, name=None): with scope('fully_connected', X.graph): # Not working yet W = Parameter(np.random.randn(n_in, n_out), 'Weights') b = Parameter(np.random.randn(n_out), 'Bias') temp = Matmul(X, W) + b if activation == 'softmax': return Softmax(temp) elif activation == 'sigmoid': return Sigmoid(temp) elif activation == 'linear': return temp else: return Leaky_relu(temp)
def cross_entropy(y_estimated, y): with scope('Loss : Cross_entropy', y_estimated.graph): return -Reduce_sum(y * Log(y_estimated))
def l1(y_estimated, y): with scope('Loss : L1', y_estimated.graph): return Reduce_sum(Absolute(y_estimated - y))
def l2(y_estimated, y): with scope('Loss : L2', y_estimated.graph): return Reduce_sum((y_estimated - y)**2)