예제 #1
0
def fully_connected(X, n_in, n_out, activation=None, name=None):
    with scope('fully_connected', X.graph):  # Not working yet
        W = Parameter(np.random.randn(n_in, n_out), 'Weights')
        b = Parameter(np.random.randn(n_out), 'Bias')
        temp = Matmul(X, W) + b

        if activation == 'softmax':
            return Softmax(temp)

        elif activation == 'sigmoid':
            return Sigmoid(temp)

        elif activation == 'linear':
            return temp

        else:
            return Leaky_relu(temp)
예제 #2
0
def cross_entropy(y_estimated, y):
    with scope('Loss : Cross_entropy', y_estimated.graph):
        return -Reduce_sum(y * Log(y_estimated))
예제 #3
0
def l1(y_estimated, y):
    with scope('Loss : L1', y_estimated.graph):
        return Reduce_sum(Absolute(y_estimated - y))
예제 #4
0
def l2(y_estimated, y):
    with scope('Loss : L2', y_estimated.graph):
        return Reduce_sum((y_estimated - y)**2)