def Save(self): print("Saving network...") now = datetime.datetime.now() NetworkWriter.writeToFile(self.net, "saved_networks/network_" + str(now.day) + "_" + str(now.month) + "_" + str(now.year)) print("Neural network saved.")
def Treinar(rede,base): treinamento = BackpropTrainer(rede, dataset = base, learningrate = 0.01, momentum = 0.06) for i in range(1, 10000): erro = treinamento.train() NetworkWriter.writeToFile(rede, 'filename.xml')
def mutationEvol(itVar ,rna, rank, verbose, path, inn): aux4 = 0 z = 0 it = 0 gen = itVar rna = copy.copy(rna) ind = 0 cnt = 0 p1 = [] for i in range(len(rna.params)): p1.append(0.0) for i in range((len(rank)*2)): #DUAS VEZES RANKING #if(ind<2): rna1 = NetworkReader.readFrom(path+'/gen'+str(gen)+'/rna'+str(rank[cnt])+'.xml') for k in range(len(rna1.params)): if(ind == 0): rna.params[k] = rna1.params[k] p1[k] = rna1.params[k] for j in range(len(rna.params)): aux1 = randint(1, 9) aux2 = randint(10, (15+(15*cnt))) aux = randint(aux1, aux2) if(ind!=0): if(aux%2==0): rna.params[j] = p1[j] if(j>(inn-1)): #rna.params[j] *= float(randint(10000, 18000)/10000.0) rna.params[j] += float(randint(1000, 8000)/10000.0) else: rna.params[j] = p1[j] if(j>(inn-1)): #rna.params[j] *= float(randint(2000, 10000)/10000.0) rna.params[j] -= float(randint(1000, 8000)/10000.0) cnt+=1 if(cnt==(len(rank))): cnt=0 ind+=1 if(verbose==True): print "rna.params:" print rna.params print "\n" NetworkWriter.writeToFile(rna, path+'/gen'+str(gen+1)+'/rna'+str((i+1))+'.xml')
def fstGenCreate(rna, path, rnaNmbr): """ #Criacao da primeira geracao de um sistema genetico ##Parametros -rna-> rede neural base para do sistema criada a partir da funcao (buildNetwork()) - NeuralNetwork -path-> endereco para a pasta que ira alocar as geracoes - String -rnaNmbr-> numero de redes neurais de cada geracao - Inteiro ##Retorno none """ newpath = path + '/gen1' if not os.path.exists(newpath): os.makedirs(newpath) for i in range(rnaNmbr): a=i+1 rna1 = copy.copy(rna) for k in range(len(rna1.params)): rna1.params[k] = float(randint(-2000, 2000))/1000.0 NetworkWriter.writeToFile(rna1, path+'/gen1/rna'+str(a)+'.xml')
def move_function(board): global net best_max_move = None max_value = -1000 best_min_move = None min_value = 1000 #value is the chance of black winning for m in board.get_moves(): nextboard = board.peek_move(m) value = net.activate(board_to_input(nextboard)) if value > max_value: max_value = value best_max_move = m if value < min_value: min_value = value best_min_move = m ds = SupervisedDataSet(97, 1) best_move = None #active player if board.active == BLACK: ds.addSample(board_to_input(board), max_value) best_move = best_max_move elif board.active == WHITE: ds.addSample(board_to_input(board), min_value) best_move = best_min_move trainer = BackpropTrainer(net, ds) trainer.train() NetworkWriter.writeToFile(net, 'CheckersMini/synapsemon_random_black_mini_140.xml') NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_random_black_mini_140_copy.xml') return best_move
def keyPressEvent(self, e): global All_Cells if e.key() == Qt.Key_Space: self.stop = not (self.stop) elif e.key() == Qt.Key_W: today = '%i%02i%02i%02i%02i%02i' % ( datetime.datetime.today().year, datetime.datetime.today().month, datetime.datetime.today().day, datetime.datetime.today().hour, datetime.datetime.today().minute, datetime.datetime.today().second) os.mkdir('../data/%s' % today) for i in range(len(All_Cells)): NetworkWriter.writeToFile(All_Cells[i].brain, '../data/%s/%02i.xml' % (today, i)) elif not (self.timer_set): self.timer_set = True @pyqtSlot() def step(): self.evolution_step() self.timer = QTimer(self) self.timer.timeout.connect(step) self.timer.start(10)
def train(self): print "Enter the number of times to train, -1 means train until convergence:" t = int(raw_input()) print "Training the Neural Net" print "self.net.indim = "+str(self.net.indim) print "self.train_data.indim = "+str(self.train_data.indim) trainer = BackpropTrainer(self.net, dataset=self.train_data, momentum=0.1, verbose=True, weightdecay=0.01) if t == -1: trainer.trainUntilConvergence() else: for i in range(t): trainer.trainEpochs(1) trnresult = percentError( trainer.testOnClassData(), self.train_data['class']) # print self.test_data tstresult = percentError( trainer.testOnClassData(dataset=self.test_data), self.test_data['class'] ) print "epoch: %4d" % trainer.totalepochs, \ " train error: %5.2f%%" % trnresult, \ " test error: %5.2f%%" % tstresult if i % 10 == 0 and i > 1: print "Saving Progress... Writing to a file" NetworkWriter.writeToFile(self.net, self.path) print "Done training... Writing to a file" NetworkWriter.writeToFile(self.net, self.path) return trainer
def pickleWeights(): global neuralNetwork save = True if input("Save to file? (y/n) ").lower() == "y" else False if save: filename = input("File name? (Please do not indicate the extension)\n") + ".xml" print("Saving to", filename) NetworkWriter.writeToFile(neuralNetwork, filename)
def save(self, filename, desc=None): NetworkWriter.writeToFile(self.net, filename + '.xml') params = {'labels': self.labels, 'mean': self.mean.tolist(), 'std': self.std.tolist()} with open(filename + '.yaml', 'w') as f: f.write(yaml.dump(params, default_flow_style=False))
def trainData(data, filename): net = buildNetwork(data.indim, 40, data.outdim, hiddenclass=TanhLayer, outclass=SigmoidLayer) trainer = BackpropTrainer( net, dataset=data, verbose=True, momentum=0.1, weightdecay=0.01) _ , valid_errors = trainer.trainUntilConvergence(continueEpochs=2) NetworkWriter.writeToFile(net, filename) print "Valid error: ", min(valid_errors) return net
def main(): train_file = 'data/train.csv' # validation_file = 'data/validation.csv' output_model_file = 'model.xml' # hidden_size = 4 epochs = 500 # load data # def loadData(): train = np.loadtxt(train_file, delimiter=' ') Input = train[0:,0:3] Output = train[0:,3:5] # validation = np.loadtxt(validation_file, delimiter=',') # train = np.vstack((train, validation)) # x_train = train[:, 0:-1] # y_train = train[:, -1] # y_train = y_train.reshape(-1, 1) # input_size = x_train.shape[1] # target_size = y_train.shape[1] # prepare dataset # def prepare dataset(input_size, target_size): ds = SDS(Input,Output) # ds.addSample(input_size) # ds.setField('input', x_train) # ds.setField('target', y_train) # init and train # def initTrain(input_size, hidden_size, input, output): # net = buildNetwork(input_size, hidden_size, target_size, bias=True) net = buildNetwork(3, # input layer 4, # hidden0 2, # output hiddenclass=SigmoidLayer, outclass=SigmoidLayer, bias=True ) net = NetworkReader.readFrom('model.xml') for i,o in zip(Input,Output): ds.addSample(i,o) print i, o trainer = BackpropTrainer(net, ds) print "training for {} epochs...".format(epochs) for i in range(epochs): mse = trainer.train() rmse = sqrt(mse) print "training RMSE, epoch {}: {}".format(i + 1, rmse) if os.path.isfile("../stopfile.txt") == True: break NetworkWriter.writeToFile(net, output_model_file)
def SaveNetwork(self): """ Creating dump of network. """ FCLogger.debug('Saving network to PyBrain xml-formatted file...') NetworkWriter.writeToFile(self.network, self.networkFile) FCLogger.info('Network saved to file: {}'.format(os.path.abspath(self.networkFile)))
def writetrainedinfo(self, neuralnetwork): """ # Using the Python pickle fileObject = open('traininfo', 'w') pickle.dump(neuralnetwork, fileObject) fileObject.close() """ # Writing file using the NetworkWriter NetworkWriter.writeToFile(neuralnetwork, 'trainedinfo.xml')
def save(self, filename, desc=None): NetworkWriter.writeToFile(self.net, filename + '.xml') params = { 'labels': self.labels, 'mean': self.mean.tolist(), 'std': self.std.tolist() } with open(filename + '.yaml', 'w') as f: f.write(yaml.dump(params, default_flow_style=False))
def writetrainedinfo(self, neuralnetwork): """ # Using the Python pickle fileObject = open('traininfo', 'w') pickle.dump(neuralnetwork, fileObject) fileObject.close() """ # Writing file using the NetworkWriter NetworkWriter.writeToFile(neuralnetwork, 'xtrainedinfo.xml')
def saveNetwork(net, name): """Экспорт нейронной сети в файл Аргументы: net - нейронная сеть, PyBrain network name -- имя файла, строка """ NetworkWriter.writeToFile(net, name)
def SaveNetwork(self): """ Creating dump of network. """ FCLogger.debug('Saving network to PyBrain xml-formatted file...') NetworkWriter.writeToFile(self.network, self.networkFile) FCLogger.info('Network saved to file: {}'.format( os.path.abspath(self.networkFile)))
def save_values(): global best, additional_left NetworkWriter.writeToFile(net, NETWORK_TEMP_FILE_NAME) if 'avg_error' in globals() and avg_error < best: best = avg_error NetworkWriter.writeToFile(net, NETWORK_FILE_NAME) print "Updated best network and saved to file" additional_left = ADDITIONAL_EPOCH with open(NETWORK_VAL_FILE_NAME, "wb") as f: dump((epoch, additional_left, best), f) print "Network values saved"
def save( history, net ): """ This function gets called after each training/testing block or when the script gets closed. It saves the neural net and RL history of the agent so that it can be restored or reused in another model. """ base = os.path.splitext( sys.argv[2] )[0] print 'Saving network to: ' + base + '.xml' NetworkWriter.writeToFile( net, base + '.xml' ) fileObject = open( base + '.history', 'w' ) pickle.dump( history, fileObject ) fileObject.close()
def SaveNetwork(self): """ Creating dump of network. """ FCLogger.debug( 'Autosaving - enabled. Trying to save network as PyBrain xml-formatted file...' ) NetworkWriter.writeToFile(self.network, self.networkFile) FCLogger.info('Current network saved to file: {}'.format( os.path.abspath(self.networkFile)))
def save_network(network, network_file): if bucket: k = Key(bucket) k.key = network_file tmp_file = NamedTemporaryFile(delete=False) NetworkWriter.writeToFile(network, tmp_file.name) k.set_contents_from_filename(tmp_file.name) os.unlink(tmp_file.name) print "network saved to s3" else: NetworkWriter.writeToFile(network, network_storage_path + network_file) print "network saved to local filesystem"
def perceptron(hidden_neurons=20, weightdecay=0.01, momentum=0.1): INPUT_FEATURES = 1000 CLASSES = 9 HIDDEN_NEURONS = hidden_neurons WEIGHTDECAY = weightdecay MOMENTUM = momentum g = generate_data() alldata = g['d'] testdata = generate_Testdata(g['index'])['d'] #tstdata, trndata = alldata.splitWithProportion(0.25) #print type(tstdata) trndata = _convert_supervised_to_classification(alldata, CLASSES) tstdata = _convert_supervised_to_classification(testdata, CLASSES) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() #fnn = NetworkReader.readFrom('ncibig(500+83.85).xml') fnn = buildNetwork(trndata.indim, HIDDEN_NEURONS, trndata.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(fnn, dataset=trndata, momentum=MOMENTUM, verbose=True, weightdecay=WEIGHTDECAY, learningrate=0.01) result = 0 ssss = 0 for i in range(200): trainer.trainEpochs(1) trnresult = percentError(trainer.testOnClassData(), trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class']) out = fnn.activateOnDataset(tstdata) ssss = out out = out.argmax(axis=1) result = out df = pd.DataFrame(ssss) df.to_excel("ncibigout.xls") df = pd.DataFrame(result) df.insert(1, '1', tstdata['class']) df.to_excel("ncibig.xls") error = 0 for i in range(len(tstdata['class'])): if tstdata['class'][i] != result[i]: error = error + 1 #print (len(tstdata['class'])-error)*1.0/len(tstdata['class'])*100 print AAC(result, tstdata['class']) print AUC(np.transpose(tstdata['class'])[0], result.transpose()) print Fscore(np.transpose(tstdata['class'])[0], result.transpose()) NetworkWriter.writeToFile(fnn, 'ncibig.xml')
def save_network_to_file(self, filename): """Save Network to File Saves the neural network including all connection weights into a NetworkWriter format xml file for future loading. Arguments: filename: The filename into which the network should be saved. """ NetworkWriter.writeToFile(self._network, filename) return
def trainNetwork(): print "[Training] Network has Started..." inputSize = 0 with open('file1.txt', 'r') as f: #automatically closes file at the end of the block #first_line = f.readline() #inputSize = len(first_line) dataset = SupervisedDataSet(4, 1) #specify size of data and target f.seek(0) #Move back to beginnning of file #iterate through the file. 1 picture per line for line in f: mylist = json.loads(line) #list object target = mylist[-1] #retrieve and then delete the target classification del mylist[-2:] #print target dataset.addSample(tuple(mylist), (target,)) #print json.loads(line) if os.path.isfile('annModel.xml'): skynet = NetworkReader.readFrom('annModel.xml')#for use if individual sample files used else: skynet = buildNetwork(dataset.indim, 8, dataset.outdim, bias=True, hiddenclass=TanhLayer) #input,hidden,output #SoftmaxLayer, SigmoidLayer, LinearLayer, GaussianLayer #Note hidden neuron number is arbitrary, can try 1 or 4 or 3 or 5 if this methods doesnt work out trainer = BackpropTrainer(skynet, dataset,learningrate = 0.3, weightdecay = 0.01,momentum = 0.9) #trainer.trainUntilConvergence() for i in xrange(1000): trainer.train() #trainer.trainEpochs(1000) #Save the now trained neural network NetworkWriter.writeToFile(skynet,'annModel.xml') print "[Network] has been Written" ################## SVM Method ####################### #Change append method in write method for target persistence dataX = [] datay = [] with open(writeFile, 'r') as f: for line in f: mylist = json.loads(line) target2 = mylist[-1] dataX.append(mylist[:-2]) datay.append(target2) #datay = [target2] * len(dataX) #Targets, size is n_samples, for use with indiviual sample files with same target print [target2] print dataX print datay clf = svm.LinearSVC() clf.fit(dataX,datay) #Persist the trained model joblib.dump(clf,'svmModel.pkl')
def Treinar(): treinamento = BackpropTrainer(rede, dataset = base, learningrate = 0.01, momentum = 0.06) for i in range(1, 10000): erro = treinamento.train() NetworkWriter.writeToFile(rede, 'filename.xml') rede = buildNetwork(3, 4, 1) base = SupervisedDataSet(3, 1) PrimeiraCarga() Treinar() #rede = NetworkReader.readFrom('filename.xml') print(rede.activate([0.8, 0.3, 0.3]))
def Treinar(): print 'Inicializando o treinamento da Rede......Aguarde' ds = SupervisedDataSet(50,1) with open('trainning.txt') as f: for line in f: if line[0] != '#': line = line.replace('\n','') line = line.split(',') exemplo = [] for x in line: exemplo.append(x) ds.addSample(exemplo[1:],exemplo[:1]) # o 1: pega o primeiro valor que e targer. ## Dataset #trainer = BackpropTrainer(net, learningrate = 0.04, momentum = 0.07, verbose = False) trainer = BackpropTrainer(net, learningrate = 0.04, momentum = 0.07, verbose = False) trainer.trainOnDataset(ds,10000) NetworkWriter.writeToFile(net, 'filename.xml') print 'Treinado e Pronto'
def train(): global neuralNetwork, inputDataSet, outputData if len(inputDataSet) == 1 and inputDataSet[0][2] < 0.78: outputData = 0.3 #Encourage exploration else: outputData = float(input("Money Earned? ")) if inputDataSet[len(inputDataSet)-1][2] < 0.3 and outputData < 0: outputData = 0.3 #Encourage exploration if risk is low print("Training with", outputData) trainingSets = SupervisedDataSet(3, 1) for i in range(len(inputDataSet)): print(inputDataSet[i]) trainingSets.addSample(inputDataSet[i], outputData) trainer = BackpropTrainer(neuralNetwork, trainingSets, learningrate = 0.7, momentum = 0.3) trainer.trainEpochs() del inputDataSet[:] NetworkWriter.writeToFile(neuralNetwork, "network.xml") print("Updating neural network")
def perceptron(hidden_neurons=20, weightdecay=0.01, momentum=0.1): INPUT_FEATURES = 200 CLASSES = 9 HIDDEN_NEURONS = hidden_neurons WEIGHTDECAY = weightdecay MOMENTUM = momentum g = generate_data() alldata = g['d'] testdata = generate_Testdata(g['index'])['d'] #tstdata, trndata = alldata.splitWithProportion(0.25) #print type(tstdata) trndata = _convert_supervised_to_classification(alldata,CLASSES) tstdata = _convert_supervised_to_classification(testdata,CLASSES) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() #fnn = NetworkReader.readFrom('ncibig(500+83.85).xml') fnn = buildNetwork(trndata.indim, HIDDEN_NEURONS, trndata.outdim,outclass=SoftmaxLayer) trainer = BackpropTrainer(fnn, dataset=trndata, momentum=MOMENTUM,verbose=True, weightdecay=WEIGHTDECAY,learningrate=0.01) result = 0; ssss = 0; for i in range(200): trainer.trainEpochs(1) trnresult = percentError(trainer.testOnClassData(),trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class']) out = fnn.activateOnDataset(tstdata) ssss = out out = out.argmax(axis=1) result = out df = pd.DataFrame(ssss) df.to_excel("ncibigout.xls") df = pd.DataFrame(result) df.insert(1,'1',tstdata['class']) df.to_excel("ncibig.xls") error = 0; for i in range(len(tstdata['class'])): if tstdata['class'][i] != result[i]: error = error+1 #print (len(tstdata['class'])-error)*1.0/len(tstdata['class'])*100 print AAC(result,tstdata['class']) print AUC(np.transpose(tstdata['class'])[0],result.transpose()) print Fscore(np.transpose(tstdata['class'])[0],result.transpose()) NetworkWriter.writeToFile(fnn, 'ncibig.xml')
def main(data_source=default_src): train_data, train_labels, valid_data, valid_labels, test_data, test_labels = generate_datasets( data_source) net = nn.nn_setup() timestamp = datetime.now().strftime("%Y%m%d_%H:%M:%S") netpath = net_file_base + timestamp + ".p" trained_net = nn.train_network_until_convergence(net, train_data, train_labels, valid_data, valid_labels, file_path=result_path) NetworkWriter.writeToFile(trained_net, netpath) testset_file = open(testset_path, "wb+") test_set = {'test_data': test_data, 'test_labels': test_labels} pickle.dump(test_set, testset_file)
def Treinar(rede, base): try: print("Iniciando treinamento") erro = 10 erro_old = 0 tentativas = 0 tentativa_controle = 0 '''xml = "D:\SmartCane" dir = os.listdir(xml) for file in dir: if file == "baseIA.xml": os.remove(file)''' treinamento = BackpropTrainer(rede, dataset=base, learningrate=0.01, momentum=0.06) #for i in range(1, 100): while (tentativas < 10000) and (round(erro, 3) > 0.001): print("Preparando treinamento") erro = treinamento.train() if (erro < 0.001): print("Erro baixo") else: print(erro) if (tentativa_controle > 1000) and (erro_old > 0) and (erro_old <= erro): break if tentativa_controle > 1000: erro_old = erro tentativa_controle = 0 tentativas += 1 tentativa_controle += 1 NetworkWriter.writeToFile(rede, 'baseIA.xml') print("Base gravada com sucesso") except: global threadON threadON = False
def init_brain(learn_data, epochs, hidden_count, TrainerClass=BackpropTrainer): global data_dir print("\t Epochs: ", epochs) if learn_data is None: return None print("Building network") net = buildNetwork(7 * 7, hidden_count, 4, hiddenclass=SigmoidLayer) # net = buildNetwork(64 * 64, 32 * 32, 8 * 8, 5) # net = buildNetwork(64 * 64, 5, hiddenclass=LinearLayer) # fill dataset with learn data trans = {'0': 0, '1': 1, '2': 2, '3': 3} ds = ClassificationDataSet(7 * 7, nb_classes=4, class_labels=['0', '1', '2', '3']) for inp, out in learn_data: ds.appendLinked(inp, [trans[out]]) ds.calculateStatistics() print("\tNumber of classes in dataset = {0}".format(ds.nClasses)) print("\tOutput in dataset is ", ds.getField('target').transpose()) ds._convertToOneOfMany(bounds=[0, 1]) print("\tBut after convert output in dataset is \n", ds.getField('target')) trainer = TrainerClass(net, learningrate=0.1, verbose=True) trainer.setData(ds) print( "\tEverything is ready for learning.\nPlease wait, training in progress..." ) start = time.time() trainer.trainEpochs(epochs=epochs) end = time.time() f = open(data_dir + "/values.txt", "w") f.write("Training time: %.2f \n" % (end - start)) f.write("Total epochs: %s \n" % (trainer.totalepochs)) # f.write("Error: %.22f" % (trainer.trainingErrors[len(trainer.trainingErrors) - 1])) f.close() print("Percent of error: ", percentError(trainer.testOnClassData(), ds['class'])) print("\tOk. We have trained our network.") NetworkWriter.writeToFile(net, data_dir + "/net.xml") return net
def genTrain(rnaNmbr, gen, epochs, genPath, dataset): """ #Treinamento de uma geracao (backpropagation) ##Parametros -rnaNmbr-> numero de redes neurais de cada geracao - Inteiro -gen-> numero da geracao para o treinamento - Inteiro -epochs-> numero de iteracoes de treino para cada rna - Inteiro -genPath-> endereco para a pasta que aloca as geracoes de um sistema genetico - String -dataset-> array contendo os dados do dataset - Array de Float ##Retorno none """ k = 0 inNmbr = len(dataset[0]) outNmbr = len(dataset[1]) ds = SupervisedDataSet(inNmbr, outNmbr) for i in range(len(dataset)/2): ds.addSample(dataset[k], dataset[k+1]) k = k+2 for l in range(rnaNmbr): rna = NetworkReader.readFrom(genPath+'/gen'+str(gen)+'/rna'+str(l+1)+'.xml') t = BackpropTrainer(rna , learningrate=0.01, momentum=0.1, verbose=True) t.trainOnDataset(ds, epochs) NetworkWriter.writeToFile(rna, genPath+'/gen'+str(gen)+'/rna'+str(l+1)+'.xml') print "Geracao: {:}".format(l+1)
def end_function(board, lose): global net ds = SupervisedDataSet(97, 1) if lose: if board.active == BLACK: ds.addSample(board_to_input(board), 0) whiteboard = board_to_input(board) whiteboard[96] = 0 ds.addSample(whiteboard, 1) elif board.active == WHITE: ds.addSample(board_to_input(board), 1) blackboard = board_to_input(board) blackboard[96] = 1 ds.addSample(blackboard, 0) else: #black loses if board.active == BLACK: ds.addSample(board_to_input(board), 0) whiteboard = board_to_input(board) whiteboard[96] = 0 ds.addSample(whiteboard, 0) #black wins elif board.active == WHITE: ds.addSample(board_to_input(board), 1) blackboard = board_to_input(board) blackboard[96] = 1 ds.addSample(blackboard, 1) trainer = BackpropTrainer(net, ds) trainer.train() NetworkWriter.writeToFile( net, 'CheckersMini/synapsemon_random_white_mini_50.xml') NetworkWriter.writeToFile( net, 'CheckersMini/synapsemon_random_white_mini_50_copy.xml')
def move_function(board): global net #active player #if board.active == BLACK: # ds.addSample(board_to_input(board), max_value) # best_move = best_max_move #elif board.active == WHITE: # ds.addSample(board_to_input(board), min_value) # best_move = best_min_move boardString = board_to_input(board) black_material = 0 white_material = 0 for i in range(32): isKing = boardString[i + 64] == 1 if boardString[i] == 1: if isKing: black_material = black_material + 2 else: black_material = black_material + 1 if boardString[i + 32] == 1: if isKing: white_material = white_material + 2 else: white_material = white_material + 1 board_val = black_material / (black_material + white_material) #create a new dataset. Add a sample with board as input, value as output ds = SupervisedDataSet(97, 1) ds.addSample(boardString, board_val) trainer = BackpropTrainer(net, ds) trainer.train() NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_primer110.xml') NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_primer110_copy.xml') return random.choice(board.get_moves())
def end_function(board, lose): global net ds = SupervisedDataSet(97, 1) if lose: if board.active == BLACK: ds.addSample(board_to_input(board), 0) whiteboard = board_to_input(board) whiteboard[96] = 0 ds.addSample(whiteboard, 1) elif board.active == WHITE: ds.addSample(board_to_input(board), 1) blackboard = board_to_input(board) blackboard[96] = 1 ds.addSample(blackboard, 0) else: #black loses if board.active == BLACK: ds.addSample(board_to_input(board), 0) whiteboard = board_to_input(board) whiteboard[96] = 0 ds.addSample(whiteboard, 0) #black wins elif board.active == WHITE: ds.addSample(board_to_input(board), 1) blackboard = board_to_input(board) blackboard[96] = 1 ds.addSample(blackboard, 1) trainer = BackpropTrainer(net, ds) trainer.train() NetworkWriter.writeToFile(net, 'CheckersMini/synapsemon_random_black_mini_140.xml') NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_random_black_mini_140_copy.xml')
def move_function(board): global net #active player #if board.active == BLACK: # ds.addSample(board_to_input(board), max_value) # best_move = best_max_move #elif board.active == WHITE: # ds.addSample(board_to_input(board), min_value) # best_move = best_min_move boardString=board_to_input(board) black_material=0 white_material=0 for i in range(32): isKing=boardString[i+64]=='1' if boardString[i]=='1': if isKing: black_material=black_material+2 else: black_material=black_material+1 if boardString[i+32]=='1': if isKing: white_material=white_material+2 else: white_material=white_material+1 board_val = black_material/(black_material+white_material) #create a new dataset. Add a sample with board as input, value as output ds = SupervisedDataSet(97, 1) ds.addSample(boardString, board_val) trainer = BackpropTrainer(net, ds) trainer.train() NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_primer1.xml') NetworkWriter.writeToFile(net, 'SynapsemonPie/synapsemon_primer1_copy.xml') return random.choice(board.get_moves())
forecasts = [] for i in range(30): fc = bestnet.activate(unknown) forecasts.append(fc) unknown = np.append(unknown[1:], fc) plt.plot(validds['target']) plt.plot(bestnet.activateOnDataset(validds)) plt.title('Validation') plt.figure() pred = bestnet.activateOnDataset(testds) plt.plot(testds['target']) plt.plot(pred) plt.title('Testing') # plt.figure() # plt.plot(testds['target'][:30]) # plt.plot(forecasts) # plt.title('Forecasts') plt.show() # NetworkWriter.writeToFile( bestnet, '../data/cpu_rnn_networks/' + machine + ".xml") with open('../data/cpu_rnn_networks/hyperparams.csv', mode='a') as f: print([ machine, besterr, besthparams[1], besthparams[3], besthparams[4], besthparams[2] ], sep=',', end='\n', file=f)
t0 = cpu_time() df_freq['target'] = classification_encoder.fit_transform(df.target) binary_classes = binary_categorizer.fit_transform(df_freq.target.values.reshape(len(df_freq), 1)) binary_target_labels = list(df_freq.columns[-9:]) for i, label in enumerate(class_labels): df_freq[label] = binary_classes[:, i] print("Transforming labels from text took {} sec of the CPU's time.".format(cpu_time() - t0)) ds = util.dataset_from_dataframe(df_freq, normalize=False, delays=[0], inputs=feature_labels, outputs=class_labels, verbosity=1) nn = util.ann_from_ds(ds, N_hidden=[27, 18], hidden_layer_type=['Linear', 'Sigmoid'], output_layer_type='Linear', verbosity=1) trainer = util.build_trainer(nn, ds=ds, verbosity=1) trainer.trainUntilConvergence(maxEpochs=500, verbose=True) NetworkWriter.writeToFile(trainer.module, __file__ + '.xml') # this only works for the linear NN where the output is a float 0..8 # df['target_i'] = df_freq.target # df['predicted_i'] = np.clip(np.round(trainer.module.activateOnDataset(ds)), 0, 8) # df['predicted'] = [class_labels[i] for i in df['predicted_i']] # df.to_csv('training_set_with_predictions') # columns = feature_labels + target_labels + ['Predicted--{}'.format(outp) for outp in target_labels] predicted_prob = pd.DataFrame((pd.np.array(trainer.module.activate(i)) for i in trainer.ds['input']), columns=class_labels) def llfun(act, pred): small_value = 1e-15
allScore += creature.score allScore /= len(creatures) creatures.sort(key=CreaturesScoresComparator, reverse=True) # Finding the best and save it in case of beating record bestCreature = creatures[0] print("Best score:", bestCreature.score) graphFile = open(graphFileAdress, 'a') graphFile.write(str(iteration)) graphFile.write(';') graphFile.write(str(bestCreature.score)) graphFile.write('\n') graphFile.close() if scoreRecord < bestCreature.score: scoreRecord = bestCreature.score NetworkWriter.writeToFile(bestCreature.Network, bestBotFileAdress) # Graphing average print("Average score:", allScore) graphFile = open(averageFileAdress, 'a') graphFile.write(str(iteration)) graphFile.write(';') graphFile.write(str(allScore)) graphFile.write('\n') graphFile.close() # Opening graph program if iteration == 0: _thread.start_new_thread(grapherThread, ()) # Getting half-final halfFinal = []
if(oldtstError==0): oldtstError = tstError if(oldtstError<tstError): tstErrorCount = tstErrorCount+1 print 'No Improvement, count=%d' % tstErrorCount print ' Old Validation Error:', oldtstError print 'Current Validation Error:', tstError if(oldtstError>tstError): print 'Improvement made!' print ' Old Validation Error:', oldtstError print 'Current Validation Error:', tstError tstErrorCount=0 oldtstError = tstError NetworkWriter.writeToFile(TDNNClassificationNet, networkPath) plotLearningCurve() trainingTime = time.time()-time_start trainingTime=np.reshape(trainingTime, (1)) np.savetxt("25sigmoid/Trainingtime.txt", trainingTime) #################### # Manual OFFLINE Test #################### # TDNNClassificationNet = NetworkReader.readFrom('25sigmoid/TrainUntilConv.xml') # print 'Loaded Trained Network!' # # print TDNNClassificationNet.paramdim
) # train- set error mode trainer.trainUntilConvergence(maxEpochs=MAX_EPOCHS, validationProportion=VALIDATION_PROPORTION) # train predictedVals = trainer.testOnClassData(dataset=alldata) # set trainerror = percentError(predictedVals, alldata["class"]) # validation # prediction answerlist = predictOnData(test_X) #################################################################### END_TIME = "-".join(str(datetime.datetime.now()).split(":")) t1 = time.time() print("[Total Time]") print(t1 - t0) # report NetworkWriter.writeToFile(n, "NN_model(%s).xml" % START_TIME) report = NN_Report() print(report) with open("NN_result(%s).txt" % START_TIME, "w+") as f: f.writelines("[predicted y]") f.write(str([y + 1 for y in answerlist])) # because of Y=Y-1 before f.write("\n#############################\n") f.writelines(report) #################################################### # def NN_Report(): # print("[Time]") # print(time_info) # print("---------------------------------------") # print("#1 Data Description")
if(oldtstError==0): oldtstError = tstError if(oldtstError<tstError): tstErrorCount = tstErrorCount+1 print 'No Improvement, count=%d' % tstErrorCount print ' Old Validation Error:', oldtstError print 'Current Validation Error:', tstError if(oldtstError>tstError): print 'Improvement made!' print ' Old Validation Error:', oldtstError print 'Current Validation Error:', tstError tstErrorCount=0 oldtstError = tstError NetworkWriter.writeToFile(LSTMClassificationNet, networkPath) plotLearningCurve() trainingTime = time.time()-time_start trainingTime=np.reshape(trainingTime, (1)) np.savetxt("20LSTMCell/Trainingtime.txt", trainingTime) #################### # Manual OFFLINE Test #################### # Test accuraccy on the TEST data # testingdata = SequenceClassificationDataSet(10,1, nb_classes=8) # # for i in range(16000,20000): # if i%200==0:
return pca def logistic_regression_classifier(train_x, train_y): from sklearn.linear_model import LogisticRegression model = LogisticRegression(penalty='l2') model.fit(train_x, train_y) return model if __name__ == '__main__': data_file = "mnist.pkl.gz" train_x, train_y, test_x, test_y = read_data(data_file) print "create pca...." pca = pca_process(train_x, 335) train_z = pca.transform(train_x) test_z = pca.transform(test_x) #Xtest也需要转化为Ztest print train_z.shape[1] print "create model....." #model = logistic_regression_classifier(train_z, train_y) #trainpredict = model.predict(train_z) #testpredict = model.predict(test_z) #trainaccuracy = metrics.accuracy_score(train_y, trainpredict) #print 'trainaccuracy: %.2f%%' % (100 * trainaccuracy) #testaccuracy = metrics.accuracy_score(test_y, testpredict) #print 'testaccuracy: %.2f%%' % (100 * testaccuracy) start_time = time.time() net = detection1.create_network(train_z, train_y, test_z, test_y) print 'training took %fs!' % (time.time() - start_time) NetworkWriter.writeToFile(net, 'mynetwork2.xml')
print "Network Player 4 wins over Tactical Player" count += 1 wincnt += 1 semicount += 1 semiwincnt += 1 else: outcome = 0 print "Tie Game" ds = smartPlayer.gameOver(outcome) if (ds != None): trainer = BackpropTrainer(nn, dataset=ds) trainer.trainUntilConvergence(maxEpochs=50) NetworkWriter.writeToFile(nn, "othelloNetwork4.xml") #print "Terminate now to safely save" #time.sleep(3) perc = float(wincnt) / float(count) perc *= float(100) print perc if semicount == 20: semiperc = float(semiwincnt) / float(semicount) semiperc *= float(100) print "Last ", semicount, " games: ", semiperc, "%" semicount = 0 semiwincnt = 0
def goClassifer(self, iteration, learningrate, momentum, toFile): self.TrainingSetEventList[:] = [] print "Iteration Count: " + str(iteration) #Set up Classicication Data, 4 input, output is a one dim. and 2 possible outcome or two possible classes trndata = ClassificationDataSet(14, nb_classes=7) tstdata = ClassificationDataSet(14, nb_classes=7) SAMPLE_SIZE = 100 AmountPerSpecies = 100 SingleBatIDToAdd = [1, 2, 3, 5, 6] # for single MultiBatIDToAdd = [10, 11, 12, 14]# for multi AddBatIDToAdd = [1, 2, 3, 5, 6] AddSingleMulti = [1, 2, 3, 5, 6,10, 11, 12, 14] TraningDataAmount = 5000 print "Adding Bat Single Species Events" minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target = self.getTrainingSpeciesDistributedData(SingleBatIDToAdd, AmountPerSpecies) SAMPLE_SIZE = len(minFreq) for i in range (0, SAMPLE_SIZE): #trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [1]) #self.convertID(target[i]) trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [self.convertIDSingle(target[i])]) #self.convertID(target[i]) #print "Adding Bat Multi Species Events" #minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target = self.getTrainingSpeciesDistributedData(MultiBatIDToAdd, AmountPerSpecies) #SAMPLE_SIZE = len(minFreq) #for i in range (0, SAMPLE_SIZE): # trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [2]) print "Adding noise events" NoiseID = 8 minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage = self.getDistributedData(AmountPerSpecies, NoiseID) SAMPLE_SIZE = len(minFreq) for i in range (0, SAMPLE_SIZE): trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [self.convertIDSingle(NoiseID)]) #self.convertID(NoiseID) print "Adding something else events" SomethingElseID = 9 SEAmount = 20 minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage = self.getDistributedData(SEAmount, SomethingElseID) SAMPLE_SIZE = len(minFreq) for i in range (0, SAMPLE_SIZE): trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [self.convertIDSingle(SomethingElseID)]) # Try to put all multievent in the something else event print "Adding something else events" SomethingElseID = 9 BatIDToAdd2 = [10, 11, 12, 14] minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target = self.getTrainingSpeciesDistributedData(BatIDToAdd2, SEAmount) SAMPLE_SIZE = len(minFreq) for i in range (0, SAMPLE_SIZE): trndata.addSample([ minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i] ], [self.convertIDSingle(SomethingElseID)]) print "Adding test data" minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target = self.getDistrubedTestData(TraningDataAmount, SingleBatIDToAdd) maxSize = len(minFreq) for i in range (0, maxSize): tstdata.addSample([minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i]], [ self.convertIDSingle (target[i]) ]) trndata._convertToOneOfMany( ) tstdata._convertToOneOfMany( ) print "Number of training patterns: ", len(trndata) print "Input and output dimensions: ", trndata.indim, trndata.outdim print "Learning Rate: " + str(learningrate) print "Momentum: " + str(momentum) #print "First sample (input, target, class):" #print trndata['input'][0], trndata['target'][0], trndata['class'][0] #print "200th sample (input, target, class):" #print trndata['input'][100], trndata['target'][100], trndata['class'][100] #set up the Feed Forward Network HiddenNeurons = 10 #learningrate = 0.01 #momentum = 0.1 weightdecay = 0 #from datainterface import ModuleWrapper, ClassificationModuleWrapper #from sgd import SGD net = buildNetwork(trndata.indim, HiddenNeurons, trndata.outdim, bias=True, outclass=SoftmaxLayer) #p0 = net.params.copy() #provider = ClassificationModuleWrapper(trndata, net, shuffling=False) #algo = SGD(provider, net.params.copy(), callback=self.printy, learning_rate=learningrate, momentum=momentum) #print '\n' * 2 #print 'SGD-CE' #algo.run(1000) trainer = BackpropTrainer(net, dataset=trndata, momentum=momentum, learningrate=learningrate, verbose=False, weightdecay=weightdecay) #raw_input("Press Enter to continue...") print "Training data" if toFile: #filename = "InputN" + str(trndata.indim) + "HiddenN" + str(HiddenNeurons) + "OutputN" + str(trndata.outdim) + "Momentum"+ str(momentum) + "LearningRate" + str(learningrate) + "Weightdecay" + str(weightdecay) root = "/home/anoch/Dropbox/SDU/10 Semester/MSc Project/Data Results/Master/BinarySpeciesTestMSE/" filename = "ClassifierSpeciesTest_" + str(iteration) +"_MSE_LR_"+str(learningrate) + "_M_"+str(momentum) folderName = root + "ClassifierSpeciesTest_MSE_LR_"+str(learningrate) + "_M_"+str(momentum) if not os.path.exists(folderName): os.makedirs(folderName) f = open(folderName + "/"+ filename + ".txt", 'w') value = "Added Bat Species: " + str(AddBatIDToAdd) + "\n" f.write(value) value = "Number of bat patterns: " + str(len(trndata)) + "\n" f.write(value) value = "Number of noise patterns: " + str(AmountPerSpecies) + "\n" f.write(value) value = "Number of patterns per species: " + str(AmountPerSpecies) + "\n" f.write(value) value = "Number of test data: " + str(TraningDataAmount) + "\n" f.write(value) value = "Input, Hidden and output dimensions: " + str(trndata.indim) + ", " + str(HiddenNeurons) + ", " + str(trndata.outdim) + "\n" f.write(value) value = "Momentum: " + str(momentum) + "\n" f.write(value) value = "Learning Rate: " + str(learningrate) + "\n" f.write(value) value = "Weight Decay: " + str(weightdecay) + "\n" f.write(value) f.write("Input Activation function: Linear function\n") f.write("Hidden Activation function: Sigmoid function\n") f.write("Output Activation function: Softmax function\n") maxEpoch = 100 for i in range(0,maxEpoch): # Train one epoch trainer.trainEpochs(10) averageError = trainer.testOnData(dataset=tstdata, verbose=False) #averageCEE = self.CrossEntropyErrorAveraged(net, tstdata) #print "Average Cross Entropy Error: " + str(averageCEE) #print "Mean Square Error: " + str(averageError) #"""procentError(out, true) return percentage of mismatch between out and target values (lists and arrays accepted) error= ((out - true)/true)*100""" trnresult = percentError(trainer.testOnClassData(), trndata['class']) tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class']) print("epoch: %4d" % trainer.totalepochs," train error: %5.2f%%" % trnresult," test error: %5.2f%%" % tstresult) if tstresult < 27.0: raw_input("Press Enter to continue...") break if toFile: dataString = str(trainer.totalepochs) + ", " + str(averageError) + ", " + str(trnresult) + ", " + str(tstresult) + "\n" f.write(dataString) NetworkWriter.writeToFile(net, "ThirdStageClassifier.xml") if toFile: import numpy as np f.close() ConfusionMatrix, BatTarget = self.CorrectRatio(trainer.testOnClassData(dataset=tstdata), tstdata['class']) filename = filename+ "_CR" result_file = open(folderName + "/"+ filename + ".txt", 'w') result_file.write("[Species]") result_file.write(str(BatTarget)) result_file.write(str(ConfusionMatrix)) np.savetxt(folderName + "/"+ filename+".csv", ConfusionMatrix, delimiter=",") result_file.close() self.CorrectRatio(trainer.testOnClassData(dataset=tstdata), tstdata['class']) print "Done training"
def persistData(self, data, name): with open(self.relPathFromFilename(name), "wb") as f: pickle.dump(data, f) if name == NEURAL_NET_DUMP_NAME: NetworkWriter.writeToFile(data.net, self.relPathFromFilename(name + DATA_DUMP_NN_EXT))
out = rnn.activate(X_train[0]) out = out.argmax(axis=0) index=0 # evaluate the net in test data result = [] for x in X_test: result.append(rnn.activate(x).argmax()) # filling the metrics values mresult = confusion_matrix(y_test,result) precision.append((precision_score(y_test,result))) recall.append((recall_score(y_test,result))) f1.append((f1_score(y_test,result))) accuracy.append((accuracy_score(y_test,result))) # saving the params NetworkWriter.writeToFile(rnn, 'params.xml') # printing the results print ("precision %4.2f,%4.2f"%(np.mean(precision),np.std(precision))) print ("recall %4.2f,%4.2f"%(np.mean(recall),np.std(recall))) print ("f1 %4.2f,%4.2f"%(np.mean(f1),np.std(f1))) print ("accuracy %4.2f,%4.2f"%(np.mean(accuracy),np.std(accuracy)))
df_freq = (df_freq - df_freq.mean()) / df_freq.std() print("Computing the TFIDF took {} sec of the CPU's time.".format(cpu_time() - t0)) df_binarized = otto.binarize_text_categories(df, class_labels=class_labels, target_column='target') for c in df_binarized.columns: df_freq[c] = df_binarized[c] ds = ann.dataset_from_dataframe(df_freq, normalize=False, delays=[0], inputs=feature_labels, outputs=class_labels, verbosity=1) nn = ann.ann_from_ds(ds, N_hidden=[64, 32, 16], hidden_layer_type=['SteepSigmoid', 'SteepSigmoid', 'SteepSigmoid'], output_layer_type='SteepSigmoid', verbosity=1) trainer = ann.build_trainer(nn, ds=ds, verbosity=1) trainer.trainUntilConvergence(maxEpochs=80, validationProportion=0.1, verbose=True) NetworkWriter.writeToFile(trainer.module, nlp.make_timetag() + '.xml') # columns = feature_labels + target_labels + ['Predicted--{}'.format(outp) for outp in target_labels] predicted_prob = pd.np.clip(pd.DataFrame((pd.np.array(trainer.module.activate(i)) for i in df_freq[feature_labels].values), columns=class_labels), 0, 1) log_losses = [round(otto.log_loss(ds['target'], otto.normalize_dataframe(predicted_prob).values, method=m).sum(), 3) for m in 'ksfohe'] print('The log losses for the training set were {}'.format(log_losses)) # df = pd.DataFrame(table, columns=columns, index=df.index[max(delays):]) # ################################################################################ # ########## Predict labels for Validation/Test Set for Kaggle submission # #
if __name__ == "__main__": trainingSetsA = linspace(0, 2, num=15) validationSetsA = linspace(0, 2, num=5) trainingSetsB = linspace(0, 1, num=15) validationSetsB = linspace(0, 1, num=5) a = lambda x: exp(multiply(-1, sqrt(x))) b = lambda x: arctan(x) network1 = buildNetwork( 1, 3, 1) if not isfile("./Q2A.xml") else NetworkReader.readFrom("./Q2A.xml") network2 = buildNetwork( 1, 3, 1) if not isfile("./Q2B.xml") else NetworkReader.readFrom("./Q2B.xml") if not isfile("./Q2A.xml"): NetworkWriter.writeToFile(network1, "./Q2A.xml") if not isfile("./Q2B.xml"): NetworkWriter.writeToFile(network2, "./Q2B.xml") trainingSets1 = SupervisedDataSet(1, 1) trainingSets2 = SupervisedDataSet(1, 1) [ trainingSets1.addSample(trainingSetsA[i], a(trainingSetsA[i])) for i in range(len(trainingSetsA)) ] [ trainingSets2.addSample(trainingSetsB[i], b(trainingSetsB[i])) for i in range(len(trainingSetsB)) ] trainer1 = BackpropTrainer(network1, trainingSets1, learningrate=0.1) trainer2 = BackpropTrainer(network2, trainingSets2, learningrate=0.1) trainer1.trainUntilConvergence()
def rnn(): # load dataframe from csv file df = pi.load_data_frame('../../data/NABIL.csv') # column name to match with indicator calculating modules # TODO: resolve issue with column name df.columns = [ 'Transactions', 'Traded_Shares', 'Traded_Amount', 'High', 'Low', 'Close'] data = df.Close.values # TODO: write min_max normalization # normalization # cp = dataframe.pop(' Close Price') # x = cp.values temp = np.array(data).reshape(len(data),1) min_max_scaler = preprocessing.MinMaxScaler() data = min_max_scaler.fit_transform(temp) # dataframe[' Close Price'] = x_scaled # prepate sequential dataset for pyBrain rnn network ds = SequentialDataSet(1, 1) for sample, next_sample in zip(data, cycle(data[1:])): ds.addSample(sample, next_sample) # build rnn network with LSTM layer # if saved network is available if(os.path.isfile('random.xml')): net = NetworkReader.readFrom('network.xml') else: net = buildNetwork(1, 20, 1, hiddenclass=LSTMLayer, outputbias=False, recurrent=True) # build trainer trainer = RPropMinusTrainer(net, dataset=ds, verbose = True) train_errors = [] # save errors for plotting later EPOCHS_PER_CYCLE = 5 CYCLES = 5 EPOCHS = EPOCHS_PER_CYCLE * CYCLES for i in range(CYCLES): trainer.trainEpochs(EPOCHS_PER_CYCLE) train_errors.append(trainer.testOnData()) epoch = (i+1) * EPOCHS_PER_CYCLE print("\r epoch {}/{}".format(epoch, EPOCHS), end="") sys.stdout.flush() # save the network NetworkWriter.writeToFile(net,'network.xml') print() print("final error =", train_errors[-1]) predicted = [] for dat in data: predicted.append(net.activate(dat)[0]) # data = min_max_scaler.inverse_transform(data) # predicted = min_max_scaler.inverse_transform(predicted) predicted_array = min_max_scaler.inverse_transform(np.array(predicted).reshape(-1,1)) print(predicted_array[-1]) plt.figure() legend_actual, = plt.plot(range(0, len(data)),temp, label = 'actual', linestyle = '--', linewidth = 2, c = 'blue') legend_predicted, = plt.plot(range(0, len(data)), predicted_array, label = 'predicted', linewidth = 1.5, c='red') plt.legend(handles=[legend_actual, legend_predicted]) plt.savefig('error.png') plt.show()
#Model settings DIM_LAYER = [alldata.indim] + N_NEURAL + [alldata.outdim] NETWORK_SETTINGS = {"outclass": SoftmaxLayer, "bias":True} N_HIDDEN_LAYER = len(N_NEURAL) LEARNING_RATE = 0.005 MOMENTUM = 0.5 WEIGHTDECAY = 0.01 MAX_EPOCHS = 1000 VALIDATION_PROPORTION = 0.1 ##################################### n = buildNetwork(*DIM_LAYER, **NETWORK_SETTINGS)# set Neural Network trainer = BackpropTrainer(n, dataset=alldata, learningrate=LEARNING_RATE, momentum=MOMENTUM, verbose=True, weightdecay=WEIGHTDECAY)# train- set error mode trainer.trainUntilConvergence(maxEpochs=MAX_EPOCHS, validationProportion=VALIDATION_PROPORTION)# train predictedVals = trainer.testOnClassData(dataset=alldata)# set trainerror = percentError(predictedVals ,alldata['class'])# validation # prediction answerlist = predictOnData(test_X) predictedVals2Raw = [y+1 for y in answerlist] #################################################################### END_TIME = "-".join(str(datetime.datetime.now()).split(":")) t1 = time.time() # report filename = "NN%sn%s" % (str(NROWS), "x".join([str(nn) for nn in N_NEURAL])) NetworkWriter.writeToFile(n, filename+".xml") NN_Report()
def train(self, X, y): if self._generate: self._model = train_network(X, y) NetworkWriter.writeToFile(self._model, self._filename) else: self._model = NetworkReader.readFrom(self._filename)
#fnn = buildNetwork( trndata.indim, 100 , trndata.outdim, outclass=SoftmaxLayer ) fnn = buildNetwork( trndata.indim, 100 , trndata.outdim, outclass=SoftmaxLayer ) trainer = BackpropTrainer( fnn, dataset=trndata, momentum=0.1, learningrate=0.01 , verbose=True, weightdecay=0.01) print 'neural net made.' print 'training start.' trainer.trainEpochs (25) # the more recipes there are, the longer each epoch will take print 'Percent Error on Test dataset: ' , percentError( trainer.testOnClassData ( dataset=tstdata ) , tstdata['class'] ) NetworkWriter.writeToFile(fnn, 'Desktop/food.xml') print 'time taken: ', time.clock() # TRAINING RESULTS # # dataset size epochs trained percent error(%) # 500 25 0.8 # 1000 50 0.4 ... it might be a 40% error rate... # 1000 100 0.4 # 1000 150 0.4 ..still 0.4% lol. though 99.6% accuracy rate sure is pretty high # 2000 50 0.2 pretty sure there are more decimals but # 2000 100 0.2 seems like percentError() truncated it # 3000 50 0.13333333333 ...nvm # 4000 50 0.1 # # using all 39774 recipes will probably take days to train
print("Number of test patterns:", len(test_set)) print("Input and output dimensions:", training_set.indim, training_set.outdim) print("Number of hidden units:", hidden_units) print() print("First sample (input, target, class):") print(training_set['input'][0], training_set['target'][0], training_set['class'][0]) print() network = buildNetwork(training_set.indim, hidden_units, training_set.outdim, outclass=SoftmaxLayer) trainer = BackpropTrainer(network, dataset = training_set) for i in range(1000): error = trainer.train() training_result = percentError(trainer.testOnClassData(dataset = training_set), training_set['class']) / 100 test_result = percentError(trainer.testOnClassData(dataset = test_set), test_set['class']) / 100 print("epoch: {0:4d} trnerr: {1:10.8f} tsterr: {2:10.8f} err: {3:10.8f}" .format(trainer.totalepochs, training_result, test_result, error)) # save network after every 10 epochs if (i+1) % 10 == 0: print() for c in range(test_set.nClasses): (class_, _) = test_set.splitByClass(c) sensitivity = (100 - percentError(trainer.testOnClassData(dataset = class_), class_['class']) / 100) if len(class_) > 0 else 0 print("Class {0:2d}: {1:4d} items. Correctly classified: {2:10.8f}".format(c, len(class_), sensitivity)) NetworkWriter.writeToFile(network, xmldir + networkname) print("Network saved as", networkname) print()
tanhTrainedNetworks = ["1tanh.xml", "2tanh.xml", "3tanh.xml"] if readFile: for f in regTrainedNetworks + tanhTrainedNetworks: if os.path.isfile(f): print "reading network", print f net = NetworkReader.readFrom(f) testNetwork(net) else: print "file",file,"does not exist" else: file = "newNetwork.xml" train_ds = SupervisedDataSet(28 * 28, 1) train_images, train_labels = mnist.load_mnist(dataset='training') for image, label in zip(train_images, train_labels): train_ds.addSample(ravel(image), label) if tanh: net = buildNetwork(train_ds.indim, 98, 98, 49, train_ds.outdim, bias=True, hiddenclass=TanhLayer) else: net = buildNetwork(train_ds.indim, 98, train_ds.outdim, bias=True) trainer = BackpropTrainer(net, train_ds) print "start training" for i in range(5): trainer.trainEpochs(1) print "epoch: %4d" % trainer.totalepochs testNetwork(net) NetworkWriter.writeToFile(net, file)
bar = np.random.randint(9) + 1 for j in range(32, 64): if inpt[j] < bar: inpt[j] = 0 else: inpt[j] = 1 for j in range(64, 96): inpt[j] = 0 denom = sum(inpt) if denom == 0: denom = 1 outpt = sum(inpt[:32]) / denom ds.addSample(inpt, outpt) #train network trainer = BackpropTrainer(net, ds) trainer.train() #save network NetworkWriter.writeToFile(net, 'synapsemon.xml') NetworkWriter.writeToFile(net, 'synapsemon_copy.xml') # test the network # for i in range(10): # a = np.random.randint(2, size=96) # for j in range(64, 96): # a[j] = 0 # outpt = sum(a[:32]) / sum(a) # print(outpt) # print(net.activate(a))
from pybrain.structure.modules import SoftmaxLayer from pybrain.tools.customxml.networkwriter import NetworkWriter classes = 3 new_data = genfromtxt('NerfGunConradTrainingPhotos/nerfgundata.csv',delimiter=',').transpose() new_target = genfromtxt('NerfGunConradTrainingPhotos/target.csv',delimiter=',') numsamples,numvals = new_data.shape #number of datapoints at which an image is analysed, number of images to be analysed X,y = new_data,new_target#switch to matrix terminology for math portion ds = ClassificationDataSet(numvals, 1 , nb_classes=classes)#get the numpy array parsed into a classification dataset for k in range(len(X)):#iterate over each sample ds.addSample(ravel(X[k]),y[k])#add each sample to the new ClassificationDataSet tstdatatmp, trndatatmp = ds.splitWithProportion( 0.25 ) tstdata = ClassificationDataSet(numvals, 1, nb_classes=classes) for n in range(0, tstdatatmp.getLength()): tstdata.addSample( tstdatatmp.getSample(n)[0], tstdatatmp.getSample(n)[1] ) trndata = ClassificationDataSet(numvals, 1, nb_classes=classes) for n in range(0, trndatatmp.getLength()): trndata.addSample( trndatatmp.getSample(n)[0], trndatatmp.getSample(n)[1] ) print(type(ds)) trndata._convertToOneOfMany() tstdata._convertToOneOfMany() fnn = buildNetwork(trndata.indim, 64 , trndata.outdim, outclass=SoftmaxLayer)#construct a network with random initial values, first value is the dimension of input vectors, second value is dimension of hidden layers, third value is dimension of output trainer = BackpropTrainer(fnn, dataset=trndata, momentum=0.1, learningrate=.001 , verbose=True, weightdecay=0.01) for i in range (0,200):#run for 5000 epochs print('Percent Error on Test dataset after training for epochs: ', i, ' ' , percentError( trainer.testOnClassData (dataset=tstdata ), tstdata['class'] ))#return percent error log = open('finallogfile.txt','a') log.write(str(percentError(trainer.testOnClassData (dataset=tstdata ), tstdata['class']))) log.write('\n') log.close() trainer.trainEpochs(1)#train one at a time to get percent error at each step NetworkWriter.writeToFile(fnn,'nerfgunnetwork.xml')