def demoLinear(): eaData = eaDataLinear([2, -3.4], [4.2], m=1000) net = eaNetLinear(eaData, L=1, n=4) nnInitWb(net) nnFit(eaData, net, learn_rate=0.02) Ea.show(net, 3) plotCost("Linear Costs", net.costs, netInfo=eaNetInfo(net))
def demoLogistics(): eaData = eaDataTFRing() net = eaNetLogistics(eaData) nnInitWb(net) nnFit(eaData, net, learn_rate=0.1) Ea.show(net, 3) plotPredict(net, eaData.X, eaData.Y, cmap=['#0099CC', '#FF6666','#6622FF']) plotCost("Logistics Costs", net.costs,netInfo=eaNetInfo(net))
def debug(self): print(help(self.keypoints[0])) pts = [] for pt in self.keypoints: ePt = Ea() ePt.angle = pt.angle ePt.class_id = pt.class_id ePt.octave = pt.octave ePt.pt = (pt.pt[0], pt.pt[1]) ePt.response = pt.response ePt.size = pt.size pts.append(ePt) Ea.show(pts, 500) pass
return 1 eaActivFun = Ea() eaActivFun.linear.g = linear eaActivFun.linear.dg = dLinear eaActivFun.linear.name = 'linear' eaActivFun.sigmoid.g = sigmoid eaActivFun.sigmoid.dg = dSigmoid eaActivFun.sigmoid.name = 'sigmoid' eaActivFun.tanh.g = tanh eaActivFun.tanh.dg = dTanh eaActivFun.tanh.name = 'tanh' eaActivFun.relu.g = relu eaActivFun.relu.dg = dRelu eaActivFun.relu.name = 'relu' eaActivFun.leakyRelu.g = leakyRelu eaActivFun.leakyRelu.dg = dLeakyRelu eaActivFun.leakyRelu.name = 'leakyRelu' eaActivFun.softmax.g = softmax eaActivFun.softmax.dg = dSoftmax eaActivFun.softmax.name = 'softmax' if __name__ == '__main__': Ea.show(eaActivFun)
loss = -1 / m * np.sum( Y * np.log(A) + (1 - Y) * np.log(1 - A), axis=1, keepdims=False) loss = np.sum(loss) return loss # softmax+交叉熵 结果的交叉熵导函数(对应多分类问题) def dCrossSoftmax(A, Y): return A - Y eaCostFun = Ea() eaCostFun.L1.J = bias eaCostFun.L1.dJ = dBias eaCostFun.L1.name = 'bias' eaCostFun.L2.J = variance eaCostFun.L2.dJ = dVariance eaCostFun.L2.name = 'variance' eaCostFun.L3.J = cross eaCostFun.L3.dJ = dCross eaCostFun.L3.name = 'cross' eaCostFun.L4.J = crossSoftmax eaCostFun.L4.dJ = dCrossSoftmax eaCostFun.L4.name = 'cross' if __name__ == '__main__': Ea.show(eaCostFun)