def buildXor(self): self.params['dataset'] = 'XOR' d = ClassificationDataSet(2) d.addSample([0., 0.], [0.]) d.addSample([0., 1.], [1.]) d.addSample([1., 0.], [1.]) d.addSample([1., 1.], [0.]) d.setField('class', [[0.], [1.], [1.], [0.]]) self.trn_data = d self.tst_data = d global trn_data trn_data = self.trn_data nn = FeedForwardNetwork() inLayer = TanhLayer(2, name='in') hiddenLayer = TanhLayer(3, name='hidden0') outLayer = ThresholdLayer(1, name='out') nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() nn.randomize() self.net_settings = str(nn.connections) self.nn = nn
def createNN(): nn = FeedForwardNetwork() inLayer = TanhLayer(4, name='in') hiddenLayer = TanhLayer(6, name='hidden0') outLayer = ThresholdLayer(3) nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() return nn
def _buildStructure(self, inputdim, insize, inlayer, convSize, numFeatureMaps): #build layers outdim = insize - convSize + 1 hlayer = TanhLayer(outdim * outdim * numFeatureMaps, name='h') self.addModule(hlayer) outlayer = SigmoidLayer(outdim * outdim, name='out') self.addOutputModule(outlayer) # build shared weights convConns = [] for i in range(convSize): convConns.append(MotherConnection(convSize * numFeatureMaps * inputdim, name='conv' + str(i))) outConn = MotherConnection(numFeatureMaps) # establish the connections. for i in range(outdim): for j in range(outdim): offset = i * outdim + j outmod = ModuleSlice(hlayer, inSliceFrom=offset * numFeatureMaps, inSliceTo=(offset + 1) * numFeatureMaps, outSliceFrom=offset * numFeatureMaps, outSliceTo=(offset + 1) * numFeatureMaps) self.addConnection(SharedFullConnection(outConn, outmod, outlayer, outSliceFrom=offset, outSliceTo=offset + 1)) for k, mc in enumerate(convConns): offset = insize * (i + k) + j inmod = ModuleSlice(inlayer, outSliceFrom=offset * inputdim, outSliceTo=offset * inputdim + convSize * inputdim) self.addConnection(SharedFullConnection(mc, inmod, outmod))
def buildIris(self): self.params['dataset'] = 'iris' self.trn_data, self.tst_data = pybrainData(0.5) global trn_data trn_data = self.trn_data nn = FeedForwardNetwork() inLayer = TanhLayer(4, name='in') hiddenLayer = TanhLayer(6, name='hidden0') outLayer = ThresholdLayer(3, name='out') nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() nn.randomize() self.net_settings = str(nn.connections) self.nn = nn
def buildParity(self): self.params['dataset'] = 'parity' self.trn_data = ParityDataSet(nsamples=75) self.trn_data.setField('class', self.trn_data['target']) self.tst_data = ParityDataSet(nsamples=75) global trn_data trn_data = self.trn_data nn = FeedForwardNetwork() inLayer = TanhLayer(4, name='in') hiddenLayer = TanhLayer(6, name='hidden0') outLayer = ThresholdLayer(1, name='out') nn.addInputModule(inLayer) nn.addModule(hiddenLayer) nn.addOutputModule(outLayer) in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer) nn.addConnection(in_to_hidden) nn.addConnection(hidden_to_out) nn.sortModules() nn.randomize() self.net_settings = str(nn.connections) self.nn = nn
def buildBMTrainer(self): x, y = self.readexcel() # 模拟size条数据: # self.writeexcel(size=100) # resx=contrib(x,0.9) # print '**********************' # print resx # x1=x[:,[3,4,5,6,7,8,9,10,11,0,1,2]] # resx1=contrib(x1) # print '**********************' # print resx1 self.realy = y per = int(len(x)) # 对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化) self.sx = MinMaxScaler() self.sy = MinMaxScaler() xTrain = x[:per] xTrain = self.sx.fit_transform(xTrain) yTrain = y[:per] yTrain = self.sy.fit_transform(yTrain) # 初始化前馈神经网络 self.__fnn = FeedForwardNetwork() # 构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多 inLayer = LinearLayer(x.shape[1], 'inLayer') hiddenLayer0 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer0') hiddenLayer1 = TanhLayer(self.hiddendim, 'hiddenLayer1') hiddenLayer2 = SigmoidLayer(int(self.hiddendim / 3), 'hiddenLayer2') outLayer = LinearLayer(self.rescol, 'outLayer') # 将构建的输出层、隐藏层、输出层加入到fnn中 self.__fnn.addInputModule(inLayer) self.__fnn.addModule(hiddenLayer0) self.__fnn.addModule(hiddenLayer1) self.__fnn.addModule(hiddenLayer2) self.__fnn.addOutputModule(outLayer) # 对各层之间建立完全连接 in_to_hidden = FullConnection(inLayer, hiddenLayer0) hidden_to_hidden0 = FullConnection(hiddenLayer0, hiddenLayer1) hidden_to_hidden1 = FullConnection(hiddenLayer1, hiddenLayer2) hidden_to_out = FullConnection(hiddenLayer2, outLayer) # 与fnn建立连接 self.__fnn.addConnection(in_to_hidden) self.__fnn.addConnection(hidden_to_hidden0) self.__fnn.addConnection(hidden_to_hidden1) self.__fnn.addConnection(hidden_to_out) self.__fnn.sortModules() # 初始化监督数据集 DS = SupervisedDataSet(x.shape[1], self.rescol) # 将训练的数据及标签加入到DS中 # for i in range(len(xTrain)): # DS.addSample(xTrain[i], yTrain[i]) for i in range(len(xTrain)): DS.addSample(xTrain[i], yTrain[i]) # 采用BP进行训练,训练至收敛,最大训练次数为1000 trainer = BMBackpropTrainer(self.__fnn, DS, learningrate=0.0001, verbose=self.verbose) if self.myalg: trainingErrors = trainer.bmtrain(maxEpochs=10000, verbose=True, continueEpochs=3000, totalError=0.0001) else: trainingErrors = trainer.trainUntilConvergence( maxEpochs=10000, continueEpochs=3000, validationProportion=0.1) # CV = CrossValidator(trainer, DS, n_folds=4, valfunc=ModuleValidator.MSE) # CV.validate() # CrossValidator # trainingErrors = trainer.trainUntilConvergence(maxEpochs=10000,continueEpochs=5000, validationProportion=0.1) # self.finalError = trainingErrors[0][-2] # self.finalerror=trainingErrors[0][-2] # if (self.verbose): # print '最后总体容差:', self.finalError self.__sy = self.sy self.__sx = self.sx for i in range(len(xTrain)): a = self.sy.inverse_transform( self.__fnn.activate(xTrain[i]).reshape(-1, 1)) self.restest.append( self.sy.inverse_transform( self.__fnn.activate(xTrain[i]).reshape(-1, 1))[0][0])