def predict_net(self, net, input_table, daily): real_plot = [] predict_plot = [] error_abs_sum = [] error_actual_sum = [] for date, value in input_table.iterrows(): ts = UnsupervisedDataSet(input_table.shape[1], ) ts.addSample(input_table.loc[date]) pre = [int(i) for i in net.activateOnDataset(ts)[0]] actual = np.array(daily.loc[date]) error_abs, error_actual = self.cal_error(pre, actual) error_abs_sum.append(error_abs) error_actual_sum.append(error_actual) for i in range(len(pre)): predict_plot.append(pre[i]) real_plot.append(actual[i]) mape_error = np.array(error_abs_sum).sum() / np.array( error_actual_sum).sum() print 'Mape= ', mape_error return np.array(error_abs_sum).sum(), np.array( error_actual_sum).sum(), real_plot, predict_plot
def get_nn_dom_prediction(train_data, train_truth, test_data, test_truth, hidden=(5, ), weight_decay=0.0): # Convert data to capture dominance. train_data, test_data = tuple( map(_convert_to_individual_alleles, [train_data, test_data])) mean = np.mean(train_truth) sd = np.std(train_truth) # Supervised training dataset. ds = SupervisedDataSet(train_data.shape[1], 1) ds.setField('input', train_data) ds.setField('target', (train_truth[:, np.newaxis] - mean) / sd) net = _get_nn(train_data.shape[1], hidden) _train_nn(net, ds, weight_decay) # Unsupervised (test) dataset. test_ds = UnsupervisedDataSet(test_data.shape[1]) test_ds.setField('sample', test_data) predicted = net.activateOnDataset(test_ds) * sd + mean return predicted.ravel()
def partialTraining(result_queue, li_pair): length = li_pair[0] index = li_pair[1] print length print index actualLength = 0 t_set = [] for i in range(index, index + length): try: t_set.append(numpy.loadtxt(filename + str(i + 1) + ".txt")) actualLength += 1 except Exception as e: break d_set = SupervisedDataSet(window, window) for i in range(0, actualLength - 1): d_set.addSample(t_set[i], t_set[i + 1]) network = buildNetwork(window, window - 1, window, outclass=LinearLayer, bias=True, recurrent=True) bpTrainer = BackpropTrainer(network, d_set) bpTrainer.trainEpochs(100) t_s = UnsupervisedDataSet(window, ) #add the sample to be predicted t_s.addSample(t_set[actualLength - 1]) result = network.activateOnDataset(t_s) result_queue.put(result[0])
def test_net(self, input_table, daily=None, label=None): if self.net_num == 1: for date, value in input_table.iterrows(): ts = UnsupervisedDataSet(input_table.shape[1], ) ts.addSample(value) pred = self.prediction_net[0].activateOnDataset(ts)[0] self.predict_plot.append(pred) self.result[date] = pred actual = np.array(daily.loc[date]) self.cal_error_for_list(pred, actual) else: for date, classNo in label.iterrows(): classNo_int = int(classNo[0]) # add test sample ts = UnsupervisedDataSet(input_table.shape[1], ) ts.addSample(input_table.loc[date]) # create prediction result pred = self.prediction_net[classNo_int].activateOnDataset( ts)[0] self.predict_plot.append(pred) self.result[date] = pred if isinstance(daily, pd.DataFrame): actual = np.array(daily.loc[date]) self.cal_error_for_list(pred, actual) else: pass if isinstance(daily, pd.DataFrame): print "MAPE = ", self.cal_error_sum()
def train(self): # We will build up a network piecewise in order to create a new dataset # for each layer. dataset = self.dataset piecenet = FeedForwardNetwork() piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0])) # Add a bias bias = BiasUnit() piecenet.addModule(bias) # Add the first visible layer firstRbm = self.iterRbms().next() visible = copy.deepcopy(firstRbm.visible) piecenet.addModule(visible) # For saving the rbms and their inverses self.invRbms = [] self.rbms = [] for rbm in self.iterRbms(): self.net.sortModules() # Train the first layer with an rbm trainer for `epoch` epochs. trainer = self.trainerKlass(rbm, dataset, self.cfg) for _ in xrange(self.epochs): trainer.train self.invRbms.append(trainer.invRbm) self.rbms.append(rbm) # Add the connections and the hidden layer of the rbm to the net. hidden = copy.deepcopy(rbm.hidden) biascon = FullConnection(bias, hidden) biascon.params[:] = rbm.biasWeights con = FullConnection(visible, hidden) con.params[:] = rbm.weights piecenet.addConnection(biascon) piecenet.addConnection(con) piecenet.addModule(hidden) # Overwrite old outputs piecenet.outmodules = [hidden] piecenet.outdim = rbm.hiddenDim piecenet.sortModules() dataset = UnsupervisedDataSet(rbm.hiddenDim) for sample, in self.dataset: new_sample = piecenet.activate(sample) dataset.addSample(new_sample) visible = hidden
def polynomialRegression(train_file, predict_file, res): X_train, y_train = load_svmlight_file(train_file) dim = X_train.shape[1] X_test, y_test = load_svmlight_file(predict_file, n_features=X_train.shape[1]) train = SupervisedDataSet(dim, 1) test = UnsupervisedDataSet(dim) trainM = X_train.todense() for x, y in zip(trainM, y_train): train.addSample(x, y) testM = X_test.todense() for x in testM: test.addSample(x) from pybrain.structure import SigmoidLayer, LinearLayer from pybrain.tools.shortcuts import buildNetwork print X_train.shape[1] net = buildNetwork( dim, 100, # number of hidden units 1, bias=True, hiddenclass=SigmoidLayer, outclass=LinearLayer) #---------- # train #---------- from pybrain.supervised.trainers import BackpropTrainer trainer = BackpropTrainer(net, train, verbose=True) trainer.trainUntilConvergence(maxEpochs=100) #---------- # evaluate #---------- result = [] for x in testM: result.append(net.activate(np.asarray(x).flatten())[0]) print result print y_train for i in result: with open(res, "a") as myfile: myfile.write(str(i) + ' ')
def _createUnsupervisedDataSet(X): alldata = UnsupervisedDataSet(X.shape[1]) for i in X: alldata.addSample(i) return alldata
maxepochs = 50 for i in range(0, maxepochs): sys.stdout.write('\r' + str(i) + " / " + str(maxepochs)) aux = trainer.train() fileObject = open('trainingData', 'w') pickle.dump(net, fileObject) fileObject.close() else: print "> Using a model from file" fileObject = open('trainingData', 'r') net = pickle.load(fileObject) print "CLASSIFY" ts = UnsupervisedDataSet(100, ) #input = map(int,'13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33'.split()) df = pd.read_csv('lastDay.csv', delimiter=',', index_col=None) input = [int(x) for x in df.values] print "Last day values: %s" % input minimumLevel = min(input) maximumLevel = max(input) - minimumLevel input = [element - minimumLevel for element in input] input = [element / float(maximumLevel * 2) for element in input] # ts.addSample(input) # verID = 175 # input = verificationSamples[verID][0]
from __future__ import print_function #!/usr/bin/env python """ Miniscule restricted Boltzmann machine usage example """ __author__ = 'Justin S Bayer, [email protected]' from pybrain.structure.networks.rbm import Rbm from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig, RbmBernoulliTrainer) from pybrain.datasets import UnsupervisedDataSet ds = UnsupervisedDataSet(6) ds.addSample([0, 1] * 3) ds.addSample([1, 0] * 3) cfg = RbmGibbsTrainerConfig() cfg.maxIter = 3 rbm = Rbm.fromDims(6, 1) trainer = RbmBernoulliTrainer(rbm, ds, cfg) print(rbm.params, rbm.biasParams) for _ in range(50): trainer.train() print(rbm.params, rbm.biasParams)
net.addConnection(FullConnection(bias, h1)) net.addConnection(FullConnection(bias, h2)) net.addConnection(FullConnection(bias, out)) net.sortModules() return net if __name__ == "__main__": import GwData data = GwData.GwData() xs = get_binary_data(data) ys = data.labels_for("50") sdataset = SupervisedDataSet(xs.shape[1], 1) udataset = UnsupervisedDataSet(xs.shape[1]) for i, x in enumerate(xs): sdataset.addSample(x, ys[i]) udataset.addSample(x) epochs = 100 layerDims = [xs.shape[1], 300, 100, 2] #net = buildNetwork(*layerDims) net = custom_build_network(layerDims) trainer = DeepBeliefTrainer(net, dataset=udataset) #trainer = DeepBeliefTrainer(net, dataset=sdataset) trainer.trainEpochs(epochs)
while not results.empty(): finalTrainingSet.append(results.get()) for i in range(size - 1): ds.addSample(finalTrainingSet[i], finalTrainingSet[i + 1]) net = buildNetwork(window, window - 1, window, outclass=LinearLayer, bias=True, recurrent=True) trainer = BackpropTrainer(net, ds) trainer.trainEpochs(100) ts = UnsupervisedDataSet(window, ) ts.addSample(finalTrainingSet[size - 1]) finalResult = net.activateOnDataset(ts) t1 = time.time() time_list.append(t1 - t0) result_list.append(finalResult[0]) #for elem in finalResult[0]: # print elem print "time average: ", numpy.mean(time_list) print "time std deviation", numpy.std(time_list) arr = numpy.array(result_list)
def test_network(self, network, data): dataset = UnsupervisedDataSet(2) dataset.addSample(data) return network.activateOnDataset(dataset)[0]
n = input("Enter number of process:") print "Enter the burst time of first five process:" bt = [] p = [] for i in range(0, 5): bt.append(int(input("p%(x)d :" % {"x": i + 1}))) for i in range(0, n): p.append(i + 1) for l in range(5, n): a = str(bt[l - 5]) b = str(bt[l - 4]) c = str(bt[l - 3]) d = str(bt[l - 2]) e = str(bt[l - 1]) ts = UnsupervisedDataSet(5, ) ts.addSample(map(int, [a, b, c, d, e])) x = net.activateOnDataset(ts) bt.append(int(x)) print "----------------------------------------------------------------" print " FCFS " print "----------------------------------------------------------------" wt = [0] total = 0 for i in range(1, n): wt.append(0) for j in range(0, i): wt[i] += bt[j] total += wt[i] avg_wt1 = float(total) / n
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer from pybrain.structure import FullConnection from pybrain.datasets import ClassificationDataSet, SupervisedDataSet, UnsupervisedDataSet from pybrain.supervised.trainers import BackpropTrainer from pybrain.unsupervised.trainers.deepbelief import DeepBeliefTrainer from pybrain.supervised.trainers import Trainer from pybrain.structure.networks.rbm import Rbm from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig, RbmBernoulliTrainer) import csv import numpy # set up a basic feed forward network net = FeedForwardNetwork() ds = ClassificationDataSet(9, 1, nb_classes=2, class_labels=['FRAUD', 'N']) temp_ds = UnsupervisedDataSet(9) # define 3 layers inLayer = LinearLayer(9, "visible") hiddenLayer = SigmoidLayer(16) outLayer = LinearLayer(1) # add layers to network net.addInputModule(inLayer) net.addModule(hiddenLayer) net.addOutputModule(outLayer) # define connections between layers in_to_hidden = FullConnection(inLayer, hiddenLayer) hidden_to_out = FullConnection(hiddenLayer, outLayer)