def calculate_loss(self, x, y): output = Softmax() layers = self.forward_propagation(x) loss = 0.0 for i, layer in enumerate(layers): loss += output.loss(layer.mulv, y[i]) return loss / float(len(y))
def calculate_loss(self, X, y): mulGate = MultiplyGate() addGate = AddGate() layer = Tanh() softmaxOutput = Softmax() input = X for i in range(len(self.W)): mul = mulGate.forward(self.W[i], input) add = addGate.forward(mul, self.b[i]) input = layer.forward(add) return softmaxOutput.loss(input, y)