def accuracy(y, t): y, t = as_variable(y), as_variable(t) pred = y.data.argmax(axis=1).reshape(t.shape) result = (pred == t.data) acc = result.mean() return Variable(as_array(acc))
def linear_simple(x, W ,b=None): x, W = as_variable(x), as_variable(W) t = matmul(x, W) if b is None: return t y = t + b t.data = None return y
def softmax_cross_entropy_simple(x, t): x, t = as_variable(x), as_variable(t) N = x.shape[0] p = as_array(softmax_simple(x)) p = np.clip(p, 1e-15, 1.0) log_p = np.log(p) tlog_p = log_p[np.arange(N), t.data] y = -1 * np.sum(tlog_p) / N return as_variable(y)
def softmax_simple(x, axis=1): x = as_variable(x) y = exp(x) sum_y = sum(y, axis=axis, keepdims=True) return y / sum_y
def sigmoid_simple(x): x = as_variable(x) y = 1 / (1 + exp(-x)) return y
def sum_to(x, shape): if x.shape == shape: return as_variable(x) return SumTo(shape)(x)
def broadcast_to(x, shape): if x.shape == shape: return as_variable(x) return BroadcastTo(shape)(x)
def reshape(x, shape): if x.shape == None: return as_variable(x) return Reshape(shape)(x)