def linear_simple(x, W, b=None): x, W = as_variable(x), as_variable(W) t = matmul(x, W) if b is None: return t y = t + b t.data = None return y
def softmax_cross_entropy_simple(x, t): x, t = as_variable(x), as_variable(t) N = x.shape[0] p = softmax_simple(x) p = clip(p, 1e-15, 1.0) # To avoid log(0) log_p = log(p) tlog_p = log_p[np.arange(N), t.data] y = -1 * sum(tlog_p) / N return y
def softmax_simple(x, axis=1): x = x - x.max(axis=axis, keepdims=True) # x = x x = as_variable(x) y = exp(x) sum_y = sum(y, axis=axis, keepdims=True) return y / sum_y
def forward(self, x): y = x - x.max(axis=self.axis, keepdims=True) # xのtypeは<class 'numpy.ndarray'> y = as_variable(y) y = exp(y) sum_y = y.sum(axis=self.axis, keepdims=True) return y / sum_y
def reshape(x, shape): if x.shape == shape: return as_variable(x) return Reshape(shape)(x)
def accuracy(y, t): y, t = as_variable(y), as_variable(t) pred = y.data.argmax(axis=1).reshape(t.shape) result = (pred == t.data) acc = result.mean() return Variable(as_array(acc))
def sum_to(x, shape): if x.shape == shape: return as_variable(x) return SumTo(shape)(x)
def broadcast_to(x, shape): if x.shape == shape: return as_variable(x) return BroadcastTo(shape)(x)