Ejemplo n.º 1
0
def main():
    m = 5
    numInputs = 2

    #refX, refY = genAndSaveData(numInputs, m)
    refX, refY = loadData()

    refDataX = NNData(numInputs, m)
    refDataY = NNData(1, m)
    refDataX.data = refX
    refDataY.data = refY

    refDataX.mPrint()
    refDataY.mPrint()

    net = NNetwork()
    layer0 = NNInput(net, 'Input0', 2)

    layer1 = NNInnerProduct(net, 'InnerProduct1', 4)
    layer2 = NNRelu(net, 'NNRelu1', 4)

    layer3 = NNInnerProduct(net, 'InnerProduct2', 1)
    layer4 = NNSigmoid(net, 'NNSigmoid1', 1)

    net.initWeights()
    net.forward(refDataX)
    print '==============================forward done=============================='

    y, yHat, loss = net.computeLoss(refDataY)

    net.backprop(y, yHat)
Ejemplo n.º 2
0
 def initWeights(self):
     print self.name, "Initializing Weights, biases"
     self.W = NNData(self.nlOut, self.nlIn)
     self.B = NNData(self.nlOut, 1)
     self.W.rand()
     #self.W.ones();
     #self.B.data[0,0]=10;
     #print self.W.data.shape;
     self.W.mPrint()
     self.B.mPrint()
Ejemplo n.º 3
0
class NNInnerProduct(NNLayer):
    def __init__(self, network, name, nlOut):
        NNLayer.__init__(self, network, name, nlOut)

    def initWeights(self):
        print self.name, "Initializing Weights, biases"
        self.W = NNData(self.nlOut, self.nlIn)
        self.B = NNData(self.nlOut, 1)
        self.W.rand()
        #self.W.ones();
        #self.B.data[0,0]=10;
        #print self.W.data.shape;
        self.W.mPrint()
        self.B.mPrint()

    def forward(self, X):
        self.outData = self.W * X + self.B
        return self.outData

    def backprop(self, dGlobal):
        m = dGlobal.shape[1]
        W = self.W.data
        dGlobalNew = 1.0 / m * np.matmul(W.T, dGlobal)
        #elf.dw = NNData(self.nlOut, self.nlIn)
        #elf.db = NNData(self.nlOut,1);

        return dGlobalNew
Ejemplo n.º 4
0
def nndata_deserializer(obj):
    if "__NNData__" in obj:
        item = obj["__NNData__"]
        return_obj = NNData(item["x"], item["y"])
        return_obj.train_pool = item["train_pool"]
        return_obj.test_pool = item["test_pool"]
        return_obj.train_pool = item["train_indices"]
        return_obj.test_pool = item["test_indices"]
        return_obj.train_pool = item["train_percentage"]
        return return_obj
    if "__deque__" in obj:
        item = obj["__deque__"]
        return deque(item)
    return obj
Ejemplo n.º 5
0
def main():
    xor_x = [[0, 0], [1, 0], [0, 1], [1, 1]]
    xor_y = [[0], [1], [1], [0]]
    xor_data = NNData(xor_x, xor_y, 100)

    xor_data_encoded = json.dumps(xor_data, cls=NNDataSerializer)
    xor_data_decoded = json.loads(xor_data_encoded,
                                  object_hook=nndata_deserializer)

    network = FFBPNetwork(2, 1)
    network.add_hidden_layer(3)
    network.train(xor_data_decoded,
                  10001,
                  order=NNData.Order.RANDOM,
                  verbosity=2)

    network = FFBPNetwork(1, 1)
    network.add_hidden_layer(3)
    data = json.loads(load_sin_data(), object_hook=nndata_deserializer)
    network.train(data, 10001, order=NNData.Order.RANDOM)
    network.test(data)
Ejemplo n.º 6
0
def testNetwork(net, test_set_x, test_set_y):
    numInputs = test_set_x.shape[0]
    m = test_set_x.shape[1]
    print "M = ", m
    nnTestX = NNData(numInputs, m)
    nnTestY = NNData(1, m)
    nnTestX.data = test_set_x
    nnTestY.data = test_set_y

    net.loadWeights()

    y = test_set_y
    yHat = net.forward(nnTestX)

    yHat[yHat > 0.5] = 1.0
    yHat[yHat <= 0.5] = 0.0
    err = np.sum(np.abs(y - yHat))
    print "NumErrors", err, err / m * 100, " Correct Pred Percent = ", 100 - err / m * 100
Ejemplo n.º 7
0
def trainNetwork(net, nIterations, alpha, train_set_x, train_set_y):
    numInputs = train_set_x.shape[0]
    m = train_set_x.shape[1]
    refDataX = NNData(numInputs, m)
    refDataY = NNData(1, m)
    refDataX.data = train_set_x
    refDataY.data = train_set_y

    net.initWeights()

    net.gradientCheck(alpha, refDataX, refDataY)
    exit()
    JArr = []
    prevJ = 1e6
    for i in range(nIterations):
        net.forward(refDataX)

        y, yHat, loss, J = net.computeLoss(refDataY)
        alpha = heardEnter(alpha)
        if (i % 100 == 0):
            print '==============================forward====== ', i, J
            #print yHat
            net.debugInfo()
        #if(i==1200):
        # 	alpha = alpha/4
        #if(i>300):
        JArr.append(J)

        net.backprop(y, yHat)
        net.gradientDescent(alpha)
        #exit()
        #if(prevJ - J)/prevJ *100 < 5:
        #	alpha = alpha/2
        #	print '>>> changing alpa ', i
        #prevJ = J

    plt.plot(JArr)
    plt.show()
    net.saveWeights()
Ejemplo n.º 8
0
def load_xor_data():
    xor_x = [[0, 0], [1, 0], [0, 1], [1, 1]]
    xor_y = [[0], [1], [1], [0]]
    xor_data = NNData(xor_x, xor_y, 100)
    return json.dumps(xor_data, cls=NNDataSerializer)
Ejemplo n.º 9
0
class NNInnerProduct(NNLayer):
    def __init__(self, network, name, nlOut):
        NNLayer.__init__(self, network, name, nlOut)
        self.type = "InnerProduct"

    def initWeights(self):
        print self.name, "Initializing Weights, biases"
        self.W = NNData(self.nlOut, self.nlIn)
        self.B = NNData(self.nlOut, 1)
        self.W.rand()
        #	self.B.rand();
        #self.W.ones();
        #self.B.data[0,0]=10;
        #print self.W.data.shape;
        self.dW = NNData(self.nlOut, self.nlIn)
        self.dB = NNData(self.nlOut, 1)

        self.pW = NNData(self.nlOut, self.nlIn)
        self.pB = NNData(self.nlOut, 1)

        #self.W.mPrint();
        #self.B.mPrint();
        #exit()

    def saveWeights(self):
        sio.savemat(self.name + ".mat", {'W': self.W.data, 'B': self.B.data})
        #print self.name, ": No Weights, biases"

    def loadWeights(self):
        self.initWeights()

        mDict = sio.loadmat(self.name + ".mat")
        #exit()
        self.W.data = mDict['W']
        self.B.data = mDict['B']
        #self.W.mPrint();
        #exit()

    def forward(self, X):
        self.outData = self.W * X + self.B
        #self.W.mPrint()

        self.X = X.data
        return self.outData

    def backprop(self, dGlobal):
        m = dGlobal.shape[1]
        W = self.W.data
        dGlobalNew = np.matmul(W.T, dGlobal)
        self.dW.data = (1.0 / m) * np.matmul(dGlobal, self.X.T)
        #print self.name
        #print self.dW.data.shape
        #print self.dW.data;

        self.dB.data = (1.0 / m) * np.sum(dGlobal, axis=1, keepdims=True)
        #print type(self.dB.data)
        #exit()
        return dGlobalNew

    def restorePivot(self):
        print "================restoring Pivot===========>>>>>>"
        #self.W.mPrint()
        self.W.data = np.copy(self.pW.data)
        self.B.data = np.copy(self.pB.data)
        #self.W.mPrint()
        #exit()

    def gradientDescent(self, alpha):
        W = self.W.data
        B = self.B.data
        #print self.dW.data;
        #print type(self.dW.data)

        self.pW.data = np.copy(self.W.data)
        self.pB.data = np.copy(self.B.data)

        #exit()
        #W = W - np.clip(np.sign(self.dW.data),-alpha,alpha);
        #B = B - np.clip(np.sign(self.dB.data), -alpha, alpha);

        W = W - alpha * self.dW.data
        B = B - alpha * self.dB.data
        self.W.data = W
        self.B.data = B

    def debugInfo(self):
        print self.name
        #self.W.mPrintSTD()
        #if(self.name == "InnerProductFinal"):
        #	self.outData.mPrint();

        self.W.mPrintSTD()
        #self.B.mPrintSTD()
        self.outData.mPrintSTD()
Ejemplo n.º 10
0
print(
    "\n\n-----------------------------------\n        General Information\n-----------------------------------\n"
)

print(
    "\t- A neural network is saved and updated for you automatically after each generation.\n\t- If you've run this simulation before, it'll be included in the starting population."
)
input("\nPress [Enter] when you are ready to begin the simulation")

print(
    "\n\n-----------------------------------\n         Simulation Start\n-----------------------------------\n"
)

print("Preparing the starting population. This will only take a moment...")

data = NNData()
pop = Population(13, 5, int(pop_size), data)

print("Starting population complete!")

print("\n\n===================================\n    Simulating " +
      num_generations +
      " generation(s)\n===================================\n\n")

pop.simulate_generations(int(num_generations), is_static, is_print)

print(
    "\n-----------------------------------\n          Continuing play\n-----------------------------------\n"
)

response = input(