def test_sig_grad(): assert nnlib.sig_grad(1,1) == 0 assert nnlib.sig_grad(1,2) == -2
def main(): """ eta and alpha are parameters that can be tweaked. """ eta = 0.90 alpha = 0.24 """ maxIter represents the limit of how many times the data can be rerun and the weights can be adjusted before giving up. """ maxIter = 500000 """ stop_err represents the acceptable value for giving up and will kick in if the error value becomes smaller and maxIter still has not stopped the script. """ stop_err = 0.001 """ The data set is splitted into: [[input],[output]] """ dataset = input("Name of dataset: ") data = io.readDataset("data/%s.data" % dataset) inodes = 1 # This must match the number of input fields in the data set hnodes = 4 onodes = 1 # This must match the number of output fields in the data set """ The bias node will be placed at the end of the node-array and will have connections to all nodes in the hidden layer and the output layer """ b = inodes + hnodes + onodes wtot = inodes*hnodes+hnodes+hnodes*onodes+onodes """ The initial weight values for each connection will be set randomly and later changed by the backpropagation. """ weights = [random.random() for r in range(wtot)] """ The following is index ranges for the different layers. Do not change! """ ilayer = range(0,inodes) hlayer = range(inodes,inodes+hnodes) olayer = range(inodes+hnodes,b) """ Debug is turned on during the last iteration if turned off. Otherwise, the debug information will be printed for each entry in the data set every time. """ debug = False con = [[],[]] """ Add connections between the nodes in the hidden layer and the nodes in the input layer. """ net.connect_nodes(0, hlayer, ilayer, con, weights, b) net.connect_nodes(1, olayer, hlayer, con, weights, b) lastIter = False err = 0 minNErr = 1 idata = False for times in range(maxIter): ########################### # Propagating ########################### if err != 0 and err < stop_err: lastIter = True #debug = True idata = [] random.shuffle(data) """ Testing all rows of the data set """ (err, t, nodes) = net.testData(data, False, lastIter, idata, b, ilayer, hlayer, olayer, debug, con, inodes, hnodes) if err < minNErr: minNErr = err print(log.hl("%i \t> Error = %.8f\n" % (times, err),1)) if lastIter: for ida in idata: print(ida) log.d(debug, " Stopped after %s iterations.\n\n" % log.hl(times+1, 1)) break ########################### # backpropagating ########################### """ inserting gradient. """ hGrad = net.mkarr(b) for i in olayer: hGrad[i] = nnlib.tanh_grad(t[i-(inodes+hnodes)], nodes[i]) for c in con[1]: c.backpropagate(hGrad) for i in hlayer: hGrad[i] = nnlib.sig_grad(hGrad[i], nodes[i]) #log.d(debug, " hGrad[%d] = %.2f" % (i, hGrad[i])) for c in con[0]: c.backpropagate(hGrad) net.adjust_weights(1, eta, hGrad, nodes, alpha, con) net.adjust_weights(0, eta, hGrad, nodes, alpha, con) try: testdata = io.readDataset("data/%s.test" % dataset) (err, t, nodes) = net.testData(testdata, True, False, [], b, ilayer, hlayer, olayer, con. inodes, hnodes) print(log.hl("\nRunning Testdata...",2)) print("--------------------------------") print("MAE: %.8f\n" % (err)) while True: inrow = io.splitTestData(input("Manual input:")); for new_data in inrow: print("In: %.4f" % new_data) nodes = net.propagateData(inrow, b, ilayer, hlayer, olayer, debug, con) for i in olayer: print("Out: %.4f" % nodes[i]) except KeyboardInterrupt: print("Quitting...")