예제 #1
0
파일: test.py 프로젝트: QiweiWen/nnfpga
def test_training():
    net = nn.Neuralnet(reg=1)
    owidth = net.nunits[net.nlayers - 1]
    NINST = 100
    teachers = []
    invecs = []
    #o[3] = i[4] xor i[2]
    for i in range(0, NINST):
        teachers.append(Alnvec(elements=[0] * owidth))
        invec = np.random.random(size=net.nunits[0])
        invec = matwrap.Alnvec(elements=invec)
        if (invec[4] >= 0.5):
            invec[4] = 1
        else:
            invec[4] = 0

        if (invec[2] >= 0.5):
            invec[2] = 1
        else:
            invec[2] = 0

        teachers[i][3] = 0
        if (invec[4] != invec[2]):
            teachers[i][3] = 1

        invecs.append(invec)

    res = do_optimise(net, invecs, teachers)
    pdb.set_trace()
예제 #2
0
파일: test.py 프로젝트: QiweiWen/nnfpga
def test_feed_forward():
    net = nn.Neuralnet(reg=1)
    invec = np.random.random(size=net.nunits[0])
    invec = matwrap.Alnvec(elements=invec)
    res = net.feed_forward(invec)
    npres = numpy_feed_forward(net, np.array(invec.data[0:invec.dim]))
    return (res, npres)
예제 #3
0
    def pop_init(self, i, o, innDict, innNum, nodeID):
        #Population
        population = []

        #Create the initial population
        for pop in range(self.n):

            #Create the Brain for the gen
            population.append(nn.Neuralnet(i, o))
            innDict, innNum, nodeID = \
            population[pop].create_net(innDict, innNum, nodeID)

        return population, innDict, innNum, nodeID
예제 #4
0
def trow_processor_test():
    net = nn.Neuralnet()
    owidth = net.nunits[net.nlayers - 1]
    NINST = 10
    os.system("rm -rf log && mkdir log")
    teachers = []
    invecs = []
    for i in range(0, NINST):
        k = randint(0, owidth - 1)
        teachers.append(Alnvec(elements=[0] * owidth))
        teachers[i][k] = 1
        invec = np.random.random(size=net.nunits[0])
        invec = matwrap.Alnvec(elements=invec)
        invecs.append(invec)

    for i in range(0, NINST):
        ffres = net.feed_forward(invecs[i])
        net.feed_backward (ffres, teachers[i], \
                           log = os.path.join("log", "fpgatest.log.%d" % i))
예제 #5
0
파일: test.py 프로젝트: QiweiWen/nnfpga
def test_feed_backward():
    net = nn.Neuralnet(reg=5)
    owidth = net.nunits[net.nlayers - 1]
    NINST = 10
    teachers = []
    invecs = []
    for i in range(0, NINST):
        k = randint(0, owidth - 1)
        teachers.append(Alnvec(elements=[0] * owidth))
        teachers[i][k] = 1
        invec = np.random.random(size=net.nunits[0])
        invec = matwrap.Alnvec(elements=invec)
        invecs.append(invec)
    gc = net.train(invecs, teachers)
    # brute force gradients
    weight = True
    for i in range(0, net.nparams):
        (isw, tup) = gc.offset_to_coord(i)
        if ((weight and isw) != weight):
            weight = isw
            print "bias begins"
        if (isw == True):
            (n, j, k) = tup
            parameter = net.weight_matrixes[n][j, k]
            print("backprop gradient: %f" % gc.wget(n, j, k))
        else:
            (n, j) = tup
            parameter = 0
            k = 0
            print("backprop gradient: %f" % gc.bget(n, j))
        accu = 0
        for inst in range(0, NINST):
            accu += gc.bruteforce(isw, xentro(), invecs[inst], teachers[inst],
                                  n, j, k)
        accu += parameter * net.regterm
        accu /= NINST
        print("brute force gradient: %f" % accu)
예제 #6
0
파일: impexp.py 프로젝트: denes-panta/Code
    def imp(path, pop_length, name, i, o):
        #New population for the imported genomes
        population = []

        #Get full path
        imp_path = os.path.join(path, str(name))

        #Innovation dictionary and innovation number
        innovDict = pl.load(open(imp_path + "/innov_dict", "rb"))
        innovNum = max(innovDict.keys(), key=int)

        #Get nodeID
        nID = 0

        for ind, innov in enumerate(innovDict):
            if innovDict[innov].s_type == "node":
                if innovDict[innov].nodeID > nID:
                    nID = innovDict[innov].nodeID

        #Number of genome leaders saved
        specNum = len(os.listdir(imp_path))
        specList = []
        n = int((specNum - 1) / 2)

        #Number of genomes to be created by
        n_gen = int(math.floor(pop_length / n))

        #Creat the population from the saved files
        for lead in range(n):
            #Get nodeDict and linkDict
            i_linkDict = pl.load(
                open(imp_path + "/link_Dict" + str(lead), "rb"))
            i_nodeDict = pl.load(
                open(imp_path + "/node_Dict" + str(lead), "rb"))

            #Append speclist
            specList.append(lead + 1)

            for gen in range(n_gen):
                #Create new genome
                genome = nn.Neuralnet(i, o)

                #Assign species number
                genome.species = lead + 1

                #Create link dictionary
                for ind, l_ID in enumerate(i_linkDict):
                    link = i_linkDict[l_ID]

                    genome.linkDict[l_ID] = p.Link(link.innID, link.inp_n,
                                                   link.out_n, link.recurr)
                    genome.linkDict[l_ID].set_vars(link.w, link.enabled)

                #Create node dictionary
                for ind, n_ID in enumerate(i_nodeDict):
                    node = i_nodeDict[n_ID]
                    genome.nodeDict[n_ID] = p.Node(node.innID, node.nodeID,
                                                   node.n_type, node.recurr)
                    genome.nodeDict[n_ID].set_vars(
                        node.splitY,
                        node.value,
                    )

                genome.linkDict, genome.nodeDict = \
                mcu.evolution.flush(genome.linkDict, genome.nodeDict)

                population.append(genome)

        #If the new population is smaller then the required one,
        #fill it up with random ones
        while len(population) < pop_length:
            #Get random leader
            lead = np.random.randint(low=0, high=n)

            #Create new genome
            genome = nn.Neuralnet(i, o)

            #Assign species number
            genome.species = lead

            #Get linkDict and nodeDict
            i_linkDict = pl.load(
                open(imp_path + "/link_Dict" + str(lead), "rb"))
            i_nodeDict = pl.load(
                open(imp_path + "/node_Dict" + str(lead), "rb"))

            #Create link dictionary
            for ind, l_ID in enumerate(i_linkDict):
                link = i_linkDict[l_ID]

                genome.linkDict[l_ID] = p.Link(link.innID, link.inp_n,
                                               link.out_n, link.recurr)
                genome.linkDict[l_ID].set_vars(link.w, link.enabled)

            #Create node dictionary
            for ind, n_ID in enumerate(i_nodeDict):
                node = i_nodeDict[n_ID]
                genome.nodeDict[n_ID] = p.Node(node.innID, node.nodeID,
                                               node.n_type, node.recurr)
                genome.nodeDict[n_ID].set_vars(
                    node.splitY,
                    node.value,
                )

            genome.linkDict, genome.nodeDict = \
            mcu.evolution.flush(genome.linkDict, genome.nodeDict)

            population.append(genome)

        return population, innovDict, innovNum, nID, specList, specNum