Пример #1
0
    def splitintobatches(self, trainingdata, batchsize): #splits trainingdata into batches of size batchsize
        N = len(trainingdata.dic)
        if batchsize > N:
            batchsize = N
        k = int(np.ceil(N/batchsize))

        Batch_sets=[0]*k
        Batch_sets[0]=TrainingDataSgfPass()
        Batch_sets[0].dic = dict(list(trainingdata.dic.items())[:batchsize])
        for i in range(k-1):
            Batch_sets[i]=TrainingDataSgfPass()
            Batch_sets[i].dic=dict(list(trainingdata.dic.items())[i*batchsize:(i+1)*batchsize])
        Batch_sets[k-1]=TrainingDataSgfPass()
        Batch_sets[k-1].dic = dict(list(trainingdata.dic.items())[(k-1)*batchsize:N])
        number_of_batchs = k
        return[number_of_batchs, Batch_sets]
Пример #2
0
 def Learnsplit(self, trainingdata, eta, batch_size, stoch_coeff, error_function, trainingrate, error_tolerance, maxepochs):
     N = len(trainingdata.dic)
     splitindex = int(round(N*trainingrate))
     trainingset, testset = TrainingDataSgfPass(), TrainingDataSgfPass()
     trainingset.dic = dict(list(trainingdata.dic.items())[:splitindex])
     testset.dic = dict(list(trainingdata.dic.items())[splitindex:])
     
     error = [error_tolerance+1]
     epochs = 0
     while error[-1:][0] > error_tolerance and epochs < maxepochs:
         epochs += 1
         self.Learn(trainingdata, 1, batch_size, stoch_coeff, error_function)
         error.append(self.PropagateSet(testset,error_function))
     return [error,epochs]