Ejemplo n.º 1
0
def partialTraining(result_queue, li_pair):
    length = li_pair[0]
    index = li_pair[1]

    print length
    print index

    actualLength = 0
    t_set = []
    for i in range(index, index + length):
        try:
            t_set.append(numpy.loadtxt(filename + str(i + 1) + ".txt"))
            actualLength += 1
        except Exception as e:
            break

    d_set = SupervisedDataSet(window, window)
    for i in range(0, actualLength - 1):
        d_set.addSample(t_set[i], t_set[i + 1])

    network = buildNetwork(window,
                           window - 1,
                           window,
                           outclass=LinearLayer,
                           bias=True,
                           recurrent=True)
    bpTrainer = BackpropTrainer(network, d_set)
    bpTrainer.trainEpochs(100)

    t_s = UnsupervisedDataSet(window, )
    #add the sample to be predicted
    t_s.addSample(t_set[actualLength - 1])

    result = network.activateOnDataset(t_s)
    result_queue.put(result[0])
Ejemplo n.º 2
0
 def __init__(self,inp, words):
     self.length = len(inp)
     UnsupervisedDataSet.__init__(self, self.length)
     self.setField('input', inp)
     self.addField('words',1)
     self.setField('words', words)
     self.annotationSets = {}
Ejemplo n.º 3
0
    def predict_net(self, net, input_table, daily):
        real_plot = []
        predict_plot = []
        error_abs_sum = []
        error_actual_sum = []

        for date, value in input_table.iterrows():
            ts = UnsupervisedDataSet(input_table.shape[1], )
            ts.addSample(input_table.loc[date])
            pre = [int(i) for i in net.activateOnDataset(ts)[0]]
            actual = np.array(daily.loc[date])

            error_abs, error_actual = self.cal_error(pre, actual)
            error_abs_sum.append(error_abs)
            error_actual_sum.append(error_actual)

            for i in range(len(pre)):
                predict_plot.append(pre[i])
                real_plot.append(actual[i])

        mape_error = np.array(error_abs_sum).sum() / np.array(
            error_actual_sum).sum()
        print 'Mape= ', mape_error
        return np.array(error_abs_sum).sum(), np.array(
            error_actual_sum).sum(), real_plot, predict_plot
def get_nn_dom_prediction(train_data,
                          train_truth,
                          test_data,
                          test_truth,
                          hidden=(5, ),
                          weight_decay=0.0):
    # Convert data to capture dominance.
    train_data, test_data = tuple(
        map(_convert_to_individual_alleles, [train_data, test_data]))

    mean = np.mean(train_truth)
    sd = np.std(train_truth)

    # Supervised training dataset.
    ds = SupervisedDataSet(train_data.shape[1], 1)
    ds.setField('input', train_data)
    ds.setField('target', (train_truth[:, np.newaxis] - mean) / sd)

    net = _get_nn(train_data.shape[1], hidden)

    _train_nn(net, ds, weight_decay)

    # Unsupervised (test) dataset.
    test_ds = UnsupervisedDataSet(test_data.shape[1])
    test_ds.setField('sample', test_data)

    predicted = net.activateOnDataset(test_ds) * sd + mean
    return predicted.ravel()
Ejemplo n.º 5
0
def process_symbol(net, symbol):
 settings = load(symbol+'.set')
 if(len(settings)==0):
  return
 yahoo = Share(symbol)
 mp = 2.0*settings['maxc']
 p = float(yahoo.get_price())/mp 
 d = yahoo.get_trade_datetime()
 wd = datetime.datetime.strptime(d[:10],"%Y-%m-%d").weekday()/6.0 
 v = float(yahoo.get_volume())/(2*settings['maxv'])
 ts = UnsupervisedDataSet(3,) 
 ts.addSample((wd,p,v),)
 ret = net.activate([wd,p,v])
 print "IK, K, V ", ret
Ejemplo n.º 6
0
class RAE(object):

	def __init__(self, in_dims, out_dims):
		self.dataset = UnsupervisedDataSet(in_dims)
		cfg = RbmGibbsTrainerConfig()
		cfg.maxIter = 5
		self.model = Rbm.fromDims(in_dims, out_dims)
		self.trainer = RbmBernoulliTrainer(self.model, self.dataset, cfg)

	def add_data(self, data):
		for d in data:
			self.dataset.addSample(d)

	def _train(self, iterations):
		for _ in xrange(iterations):
			self.trainer.train()
Ejemplo n.º 7
0
 def train(self):
     # We will build up a network piecewise in order to create a new dataset
     # for each layer.
     dataset = self.dataset
     piecenet = FeedForwardNetwork()
     piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0]))
     # Add a bias
     bias = BiasUnit()
     piecenet.addModule(bias)
     # Add the first visible layer
     firstRbm = self.iterRbms().next()
     visible = copy.deepcopy(firstRbm.visible)
     piecenet.addModule(visible)
     # For saving the rbms and their inverses
     self.invRbms = []
     self.rbms = []
     for rbm in self.iterRbms():
         self.net.sortModules()
         # Train the first layer with an rbm trainer for `epoch` epochs.
         trainer = self.trainerKlass(rbm, dataset, self.cfg)
         for _ in xrange(self.epochs):
             trainer.train()
         self.invRbms.append(trainer.invRbm)
         self.rbms.append(rbm)
         # Add the connections and the hidden layer of the rbm to the net.
         hidden = copy.deepcopy(rbm.hidden)
         biascon = FullConnection(bias, hidden)
         biascon.params[:] = rbm.biasWeights
         con = FullConnection(visible, hidden)
         con.params[:] = rbm.weights
         
         piecenet.addConnection(biascon)
         piecenet.addConnection(con)
         piecenet.addModule(hidden)
         # Overwrite old outputs
         piecenet.outmodules = [hidden]
         piecenet.outdim = rbm.hiddenDim
         piecenet.sortModules()
         
         dataset = UnsupervisedDataSet(rbm.hiddenDim)
         for sample, in self.dataset:
             new_sample = piecenet.activate(sample)
             dataset.addSample(new_sample)
         visible = hidden
Ejemplo n.º 8
0
    def train(self):
        # We will build up a network piecewise in order to create a new dataset
        # for each layer.
        dataset = self.dataset
        piecenet = FeedForwardNetwork()
        piecenet.addInputModule(copy.deepcopy(self.net.inmodules[0]))
        # Add a bias
        bias = BiasUnit()
        piecenet.addModule(bias)
        # Add the first visible layer
        firstRbm = self.iterRbms().next()
        visible = copy.deepcopy(firstRbm.visible)
        piecenet.addModule(visible)
        # For saving the rbms and their inverses
        self.invRbms = []
        self.rbms = []
        for rbm in self.iterRbms():
            self.net.sortModules()
            # Train the first layer with an rbm trainer for `epoch` epochs.
            trainer = self.trainerKlass(rbm, dataset, self.cfg)
            for _ in xrange(self.epochs):
                trainer.train
            self.invRbms.append(trainer.invRbm)
            self.rbms.append(rbm)
            # Add the connections and the hidden layer of the rbm to the net.
            hidden = copy.deepcopy(rbm.hidden)
            biascon = FullConnection(bias, hidden)
            biascon.params[:] = rbm.biasWeights
            con = FullConnection(visible, hidden)
            con.params[:] = rbm.weights

            piecenet.addConnection(biascon)
            piecenet.addConnection(con)
            piecenet.addModule(hidden)
            # Overwrite old outputs
            piecenet.outmodules = [hidden]
            piecenet.outdim = rbm.hiddenDim
            piecenet.sortModules()

            dataset = UnsupervisedDataSet(rbm.hiddenDim)
            for sample, in self.dataset:
                new_sample = piecenet.activate(sample)
                dataset.addSample(new_sample)
            visible = hidden
Ejemplo n.º 9
0
def polynomialRegression(train_file, predict_file, res):
    X_train, y_train = load_svmlight_file(train_file)
    dim = X_train.shape[1]
    X_test, y_test = load_svmlight_file(predict_file,
                                        n_features=X_train.shape[1])
    train = SupervisedDataSet(dim, 1)
    test = UnsupervisedDataSet(dim)
    trainM = X_train.todense()

    for x, y in zip(trainM, y_train):
        train.addSample(x, y)
    testM = X_test.todense()
    for x in testM:
        test.addSample(x)
    from pybrain.structure import SigmoidLayer, LinearLayer
    from pybrain.tools.shortcuts import buildNetwork
    print X_train.shape[1]
    net = buildNetwork(
        dim,
        100,  # number of hidden units
        1,
        bias=True,
        hiddenclass=SigmoidLayer,
        outclass=LinearLayer)
    #----------
    # train
    #----------
    from pybrain.supervised.trainers import BackpropTrainer
    trainer = BackpropTrainer(net, train, verbose=True)
    trainer.trainUntilConvergence(maxEpochs=100)

    #----------
    # evaluate
    #----------
    result = []
    for x in testM:
        result.append(net.activate(np.asarray(x).flatten())[0])
    print result
    print y_train
    for i in result:
        with open(res, "a") as myfile:
            myfile.write(str(i) + ' ')
Ejemplo n.º 10
0
def polynomialRegression(train_file,predict_file,res):
    X_train, y_train = load_svmlight_file(train_file)
    dim=X_train.shape[1]
    X_test,y_test=load_svmlight_file(predict_file,n_features=X_train.shape[1])
    train = SupervisedDataSet(dim, 1)
    test=UnsupervisedDataSet(dim)
    trainM=X_train.todense()

    for x, y in zip(trainM, y_train):
        train.addSample(x, y)
    testM=X_test.todense()
    for x in testM:
        test.addSample(x)
    from pybrain.structure import SigmoidLayer, LinearLayer
    from pybrain.tools.shortcuts import buildNetwork
    print X_train.shape[1]
    net = buildNetwork(dim,
                   100, # number of hidden units
                   1,
                   bias = True,
                   hiddenclass = SigmoidLayer,
                   outclass = LinearLayer
                   )
#----------
# train
#----------
    from pybrain.supervised.trainers import BackpropTrainer
    trainer = BackpropTrainer(net, train, verbose = True)
    trainer.trainUntilConvergence(maxEpochs = 100)

#----------
# evaluate
#----------
    result=[]
    for x in testM:
        result.append(net.activate(np.asarray(x).flatten())[0])
    print result
    print y_train
    for i in result:
        with open(res, "a") as myfile:
            myfile.write(str(i)+' ')
Ejemplo n.º 11
0
def createUnsupervisedDataSetFromCSVFile(input_file):
	# init the dataset
	print "Creating an unsupervised dataset from", input_file

	ds = UnsupervisedDataSet(nFeatures)

	with open(input_file) as training_data:
	    reader = csv.reader(training_data)

	    for row in reader:
	        row_data = []
	        for data in row:
	            try:
	                row_data.append(float(data))
	            except ValueError:
	            	print "Non-floatable value!"

	        ds.addSample(tuple(row_data[2:])) # drop the Qid and Aid

	print "Dataset created with size", len(ds), "and", ds.dim, "features."

	return ds
def get_nn_dom_prediction(train_data, train_truth, test_data, test_truth, hidden=(5,), weight_decay=0.0): 
    # Convert data to capture dominance.
    train_data, test_data = tuple(map(_convert_to_individual_alleles, [train_data, test_data]))

    mean = np.mean(train_truth)
    sd = np.std(train_truth)

    # Supervised training dataset.
    ds = SupervisedDataSet(train_data.shape[1], 1)
    ds.setField('input', train_data) 
    ds.setField('target', (train_truth[:, np.newaxis] - mean) / sd)

    net = _get_nn(train_data.shape[1], hidden)

    _train_nn(net, ds, weight_decay)

    # Unsupervised (test) dataset.
    test_ds = UnsupervisedDataSet(test_data.shape[1])
    test_ds.setField('sample', test_data)

    predicted = net.activateOnDataset(test_ds) * sd + mean
    return predicted.ravel()
Ejemplo n.º 13
0
    def test_net(self, input_table, daily=None, label=None):
        if self.net_num == 1:

            for date, value in input_table.iterrows():

                ts = UnsupervisedDataSet(input_table.shape[1], )

                ts.addSample(value)

                pred = self.prediction_net[0].activateOnDataset(ts)[0]

                self.predict_plot.append(pred)
                self.result[date] = pred

                actual = np.array(daily.loc[date])

                self.cal_error_for_list(pred, actual)

        else:
            for date, classNo in label.iterrows():
                classNo_int = int(classNo[0])

                # add test sample
                ts = UnsupervisedDataSet(input_table.shape[1], )

                ts.addSample(input_table.loc[date])

                # create prediction result

                pred = self.prediction_net[classNo_int].activateOnDataset(
                    ts)[0]

                self.predict_plot.append(pred)
                self.result[date] = pred

                if isinstance(daily, pd.DataFrame):

                    actual = np.array(daily.loc[date])
                    self.cal_error_for_list(pred, actual)

                else:
                    pass
        if isinstance(daily, pd.DataFrame):

            print "MAPE = ", self.cal_error_sum()
Ejemplo n.º 14
0
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
from pybrain.structure import FullConnection
from pybrain.datasets import ClassificationDataSet, SupervisedDataSet, UnsupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.unsupervised.trainers.deepbelief import DeepBeliefTrainer
from pybrain.supervised.trainers import Trainer
from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
import csv
import numpy

# set up a basic feed forward network
net = FeedForwardNetwork()
ds = ClassificationDataSet(9, 1, nb_classes=2, class_labels=['FRAUD', 'N'])
temp_ds = UnsupervisedDataSet(9)

# define 3 layers
inLayer = LinearLayer(9, "visible")
hiddenLayer = SigmoidLayer(16)
outLayer = LinearLayer(1)

# add layers to network
net.addInputModule(inLayer)
net.addModule(hiddenLayer)
net.addOutputModule(outLayer)

# define connections between layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
Ejemplo n.º 15
0
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet,UnsupervisedDataSet
from pybrain.structure import LinearLayer
ds = SupervisedDataSet(21, 21)
ds.addSample(map(int,'1 2 4 6 2 3 4 5 1 3 5 6 7 1 4 7 1 2 3 5 6'.split()),map(int,'1 2 5 6 2 4 4 5 1 2 5 6 7 1 4 6 1 2 3 3 6'.split()))
ds.addSample(map(int,'1 2 5 6 2 4 4 5 1 2 5 6 7 1 4 6 1 2 3 3 6'.split()),map(int,'1 3 5 7 2 4 6 7 1 3 5 6 7 1 4 6 1 2 2 3 7'.split()))
net = buildNetwork(21, 20, 21, outclass=LinearLayer,bias=True, recurrent=True)
trainer = BackpropTrainer(net, ds)
trainer.trainEpochs(100)
ts = UnsupervisedDataSet(21,)
ts.addSample(map(int,'1 3 5 7 2 4 6 7 1 3 5 6 7 1 4 6 1 2 2 3 7'.split()))
x = [ int(round(i)) for i in net.activateOnDataset(ts)[0]]
print x
Ejemplo n.º 16
0
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet,UnsupervisedDataSet
from pybrain.structure import LinearLayer
ds = SupervisedDataSet(10, 11)
z = map(int,'1 2 4 6 2 3 4 5 1 3 5 6 7 1 4 7 1 2 3 5 6 1 2 5 6 2 4 4 5 1 2 5 6 7 1 4 6 1 2 3 3 6 1 3 5 7 2 4 6 7 1 3 5 6 7 1 4 6 1 2 2 3 7'.split())
obsLen = 10
predLen = 11
for i in xrange(len(z)):
  if i+(obsLen-1)+predLen < len(z):
    ds.addSample([z[d] for d in range(i,i+obsLen)],[z[d] for d in range(i+1,i+1+predLen)])

net = buildNetwork(10, 20, 11, outclass=LinearLayer,bias=True, recurrent=True)
trainer = BackpropTrainer(net, ds)
trainer.trainEpochs(100)
ts = UnsupervisedDataSet(10,)
ts.addSample(map(int,'1 3 5 7 2 4 6 7 1 3'.split()))
print [ int(round(i)) for i in net.activateOnDataset(ts)[0]]
Ejemplo n.º 17
0
    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))

    net.sortModules()
    return net


if __name__ == "__main__":

    import GwData
    data = GwData.GwData()
    xs = get_binary_data(data)
    ys = data.labels_for("50")

    sdataset = SupervisedDataSet(xs.shape[1], 1)
    udataset = UnsupervisedDataSet(xs.shape[1])
    for i, x in enumerate(xs):
        sdataset.addSample(x, ys[i])
        udataset.addSample(x)

    epochs = 100
    layerDims = [xs.shape[1], 300, 100, 2]

    #net = buildNetwork(*layerDims)
    net = custom_build_network(layerDims)

    trainer = DeepBeliefTrainer(net, dataset=udataset)
    #trainer = DeepBeliefTrainer(net, dataset=sdataset)
    trainer.trainEpochs(epochs)
from pybrain.tools.xml import NetworkReader


print 'read dataset'

text_file = open('doc/recog.txt')
lines = text_file.read().split('\n')
text_file.close()

text_file = open('doc/labels.txt')
labels = text_file.read().split('\n')
text_file.close()

network = NetworkReader.readFrom('NN.xml')

for line in lines:

    if not line:
        continue
    line = line.split(' ')
    datas = line[:-1]
    x = []
    for data in datas:
        x.append(float(data))

    data_set = UnsupervisedDataSet(13)
    data_set.addSample(x)

    out = network.activateOnDataset(data_set)
    print labels[np.argmax(out)]
Ejemplo n.º 19
0
n = input("Enter number of process:")
print "Enter the burst time of first five process:"
bt = []
p = []
for i in range(0, 5):
    bt.append(int(input("p%(x)d :" % {"x": i + 1})))
for i in range(0, n):
    p.append(i + 1)
for l in range(5, n):
    a = str(bt[l - 5])
    b = str(bt[l - 4])
    c = str(bt[l - 3])
    d = str(bt[l - 2])
    e = str(bt[l - 1])
    ts = UnsupervisedDataSet(5, )
    ts.addSample(map(int, [a, b, c, d, e]))
    x = net.activateOnDataset(ts)
    bt.append(int(x))

print "----------------------------------------------------------------"
print "                      FCFS                                      "
print "----------------------------------------------------------------"
wt = [0]
total = 0
for i in range(1, n):
    wt.append(0)
    for j in range(0, i):
        wt[i] += bt[j]
    total += wt[i]
avg_wt1 = float(total) / n
Ejemplo n.º 20
0
    def test_network(self, network, data):

        dataset = UnsupervisedDataSet(2)
        dataset.addSample(data)

        return network.activateOnDataset(dataset)[0]
Ejemplo n.º 21
0
	def __init__(self, in_dims, out_dims):
		self.dataset = UnsupervisedDataSet(in_dims)
		cfg = RbmGibbsTrainerConfig()
		cfg.maxIter = 5
		self.model = Rbm.fromDims(in_dims, out_dims)
		self.trainer = RbmBernoulliTrainer(self.model, self.dataset, cfg)
Ejemplo n.º 22
0
    
    net.addConnection(FullConnection(bias, h1))
    net.addConnection(FullConnection(bias, h2))
    net.addConnection(FullConnection(bias, out))
    
    
    net.sortModules()
    return net
    
if __name__ == "__main__":
    
    import GwData
    data = GwData.GwData()
    xs = get_binary_data(data)
    ys = data.labels_for("50")
    
    sdataset = SupervisedDataSet(xs.shape[1], 1)
    udataset = UnsupervisedDataSet(xs.shape[1])
    for i,x in enumerate(xs):
        sdataset.addSample(x, ys[i])
        udataset.addSample(x)
    
    epochs = 100
    layerDims = [xs.shape[1], 300, 100, 2]    
    
    #net = buildNetwork(*layerDims)
    net = custom_build_network(layerDims)

    trainer = DeepBeliefTrainer(net, dataset=udataset)
    #trainer = DeepBeliefTrainer(net, dataset=sdataset)
    trainer.trainEpochs(epochs)
Ejemplo n.º 23
0
    def do_GET(self):
        print self.path
        if self.path == "/":
          self.send_response(200)
          self.send_header('Content-type', 'text/html')
          self.send_header("Access-Control-Allow-Origin", "*")
          self.end_headers()
          with open(os.getcwd() + '/index.html') as f: self.wfile.write(f.read())

        elif self.path.endswith(".png"):
          self.send_response(200)
          self.send_header('Content-type', 'image/png')
          self.send_header("Access-Control-Allow-Origin", "*")
          self.end_headers()
          with open(os.getcwd() + self.path) as f: self.wfile.write(f.read())

        elif self.path.startswith("/train:on"):
          print "training..."
          count = 0

          cap = cv2.VideoCapture(0)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 80)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 50)
          cap.set(cv2.cv.CV_CAP_PROP_FPS, 15)
          time.sleep(1.0)

          HTTPHandler.train = True
          while cap.isOpened() and HTTPHandler.train:
            time.sleep(0.1)
            if HTTPHandler.left > 40 and HTTPHandler.right > 40:
              ret, frame = cap.read()
              gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
              print "frame: ", count, HTTPHandler.left, HTTPHandler.right
              cv2.imwrite("frame_{count:04d}_{left:03d}_{right:03d}.png".format(count=count, left=HTTPHandler.left, right=HTTPHandler.right), gray)
              count = count + 1
          cap.release()

        elif self.path.startswith("/train:off"):
          HTTPHandler.train = False

        elif self.path.startswith("/debug:on"):
          HTTPHandler.debug = True

        elif self.path.startswith("/debug:off"):
          HTTPHandler.debug = False

        elif self.path.startswith("/debug:step"):
          HTTPHandler.debug_step = True

        elif self.path.startswith("/auto:on"):
          self.send_response(200)
          self.send_header('Content-type','multipart/x-mixed-replace; boundary=--jpgboundary')
          self.end_headers()
          print "run"
          file = "net.obj"
          fileObject = open(file, 'r')
          network = pickle.load(fileObject)
          error = 0.02;
          fileObject.close()
          print "..."

          cap = cv2.VideoCapture(0)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 80)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 50)
          cap.set(cv2.cv.CV_CAP_PROP_FPS, 15)
          time.sleep(1.0)

          HTTPHandler.auto = True
          while cap.isOpened() and HTTPHandler.auto:
            while HTTPHandler.debug and not HTTPHandler.debug_step:
              time.sleep(0.1)
            HTTPHandler.debug_step = False

            ret, frame = cap.read()
            gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

            crop = gray[25:,]
            inverted = (255 - crop)
            bw = cv2.threshold(inverted, 128, 255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)[1]
            array = bw.reshape(1, SIZE/2).astype(np.float32)
            dataset = UnsupervisedDataSet(SIZE/2)
            dataset.addSample(array)
            active = network.activateOnDataset(dataset)[0]

            if HTTPHandler.distance >= 10: #cm
              HTTPHandler.left = 85 if active[1] > 0.9 else 0
              HTTPHandler.right = 85 if active[0] > 0.9 else 0
            else:
              HTTPHandler.left = 0
              HTTPHandler.right = 0

            print "auto: " + str(HTTPHandler.left) + ":" + str(HTTPHandler.right)

            #engine left
            GPIO.output(Motor1A, GPIO.LOW)
            GPIO.output(Motor1B, GPIO.HIGH)
            e1.ChangeDutyCycle(HTTPHandler.left)

            #engine right
            GPIO.output(Motor2A, GPIO.LOW)
            GPIO.output(Motor2B, GPIO.HIGH)
            e2.ChangeDutyCycle(HTTPHandler.right)

            steps_image = np.zeros((360, 640), np.uint8)
            steps_image.fill(255)
            steps_image[25+50:50+50+25,    50+25+25:80+25+25+50] = gray
            steps_image[25+50+25:50+25+50, 50+25+85+25:25+80+80+5+25+50] = crop
            steps_image[25+50+25:50+25+50, 50+25+25+160+5+5:80+80+80+5+5+25+25+50] = inverted
            steps_image[25+50+25:50+25+50, 50+25+25+240+5+5+5:80+80+80+80+5+5+5+25+25+50] = bw
            cv2.putText(steps_image, "net: '" + file + "', error: " + str(error), (100, 75), cv2.FONT_HERSHEY_PLAIN, 1.0, 0, 1)
            cv2.putText(steps_image, "activate: " + str(active), (100, 200), cv2.FONT_HERSHEY_PLAIN, 1.0, 0, 1)
            cv2.putText(steps_image, "obstacle: " + str(HTTPHandler.distance) + " cm", (100, 225), cv2.FONT_HERSHEY_PLAIN, 1.0, 0, 1)
            cv2.putText(steps_image, "auto: " + str(HTTPHandler.left) + ", " + str(HTTPHandler.right), (100, 250), cv2.FONT_HERSHEY_PLAIN, 1.0, 0, 1)
            result, buf = cv2.imencode('.jpg', steps_image, [int(cv2.IMWRITE_JPEG_QUALITY), 90])
            assert result

            self.wfile.write("--jpgboundary")
            self.send_header('Content-type','image/jpeg')
            self.send_header('Content-length', str(len(buf)))
            self.end_headers()
            self.wfile.write(bytearray(buf))
            self.wfile.write('\r\n')
          cap.release()

        elif self.path.startswith("/auto:off"):
          HTTPHandler.auto = False

        elif self.path.startswith("/ping"):
          if HTTPHandler.can_measure:
            HTTPHandler.can_measure = False

            distance = sonar_distance()
            if distance >= 2.0 and distance <= 400.0:
              HTTPHandler.distance = int(distance)

            GPIO.output(LED, GPIO.HIGH if HTTPHandler.led else GPIO.LOW)
            HTTPHandler.led ^= True

            _, qual, _, _ = wifi.getStatistics()

            self.send_response(200)
            self.send_header('Content-type', 'text/html')
            self.send_header("Access-Control-Allow-Origin", "*")
            self.end_headers()
            self.wfile.write(', '.join(str(x) for x in [qual.quality, qual.signallevel, qual.noiselevel, HTTPHandler.distance]))
            self.wfile.close()
            HTTPHandler.can_measure = True

        elif self.path.startswith("/forward"):
          HTTPHandler.left = max(0, int(self.path.split(':')[1]))
          HTTPHandler.right = max(0, int(self.path.split(':')[2]))
          print "forward: " + str(HTTPHandler.left) + ":" + str(HTTPHandler.right)

          #engine left
          GPIO.output(Motor1A, GPIO.LOW)
          GPIO.output(Motor1B, GPIO.HIGH)
          e1.ChangeDutyCycle(HTTPHandler.left)

          #engine right
          GPIO.output(Motor2A, GPIO.LOW)
          GPIO.output(Motor2B, GPIO.HIGH)
          e2.ChangeDutyCycle(HTTPHandler.right)

        elif self.path.startswith("/reverse"):
          HTTPHandler.left = max(0, int(self.path.split(':')[1]))
          HTTPHandler.right = max(0, int(self.path.split(':')[2]))
          print "reverse: " + str(HTTPHandler.left) + ":" + str(HTTPHandler.right)

          #engine left
          GPIO.output(Motor1A, GPIO.HIGH)
          GPIO.output(Motor1B, GPIO.LOW)
          e1.ChangeDutyCycle(HTTPHandler.left)

          #engine right
          GPIO.output(Motor2A, GPIO.HIGH)
          GPIO.output(Motor2B, GPIO.LOW)
          e2.ChangeDutyCycle(HTTPHandler.right)

        elif self.path.startswith("/camera:on"):
          status = self.path.split(':')[1]
          HTTPHandler.camera = True
          self.send_response(200)
          self.send_header('Content-type','multipart/x-mixed-replace; boundary=--jpgboundary')
          self.end_headers()

          cap = cv2.VideoCapture(0)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_WIDTH, 640)
          cap.set(cv2.cv.CV_CAP_PROP_FRAME_HEIGHT, 360)
          cap.set(cv2.cv.CV_CAP_PROP_FPS, 30)
          time.sleep(1.0)

          while cap.isOpened() and HTTPHandler.camera:
            ret, frame = cap.read()
            assert ret
            result, buf = cv2.imencode('.jpg', frame, [int(cv2.IMWRITE_JPEG_QUALITY), 60])
            assert result
            self.wfile.write("--jpgboundary")
            self.send_header('Content-type','image/jpeg')
            self.send_header('Content-length', str(len(buf)))
            self.end_headers()
            self.wfile.write(bytearray(buf))
            self.wfile.write('\r\n')
          cap.release()

        elif self.path.startswith("/camera:off"):
          HTTPHandler.camera = False

        elif self.path.startswith("/off"):
          call("halt", shell=True)
Ejemplo n.º 24
0
from __future__ import print_function

#!/usr/bin/env python
""" Miniscule restricted Boltzmann machine usage example """

__author__ = 'Justin S Bayer, [email protected]'

from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
from pybrain.datasets import UnsupervisedDataSet


ds = UnsupervisedDataSet(6)
ds.addSample([0, 1] * 3)
ds.addSample([1, 0] * 3)

cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3

rbm = Rbm.fromDims(6, 1)
trainer = RbmBernoulliTrainer(rbm, ds, cfg)
print(rbm.params, rbm.biasParams)
for _ in range(50):
    trainer.train()

print(rbm.params, rbm.biasParams)
Ejemplo n.º 25
0
from __future__ import print_function

#!/usr/bin/env python
""" Miniscule restricted Boltzmann machine usage example """

__author__ = 'Justin S Bayer, [email protected]'

from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
from pybrain.datasets import UnsupervisedDataSet

ds = UnsupervisedDataSet(6)
ds.addSample([0, 1] * 3)
ds.addSample([1, 0] * 3)

cfg = RbmGibbsTrainerConfig()
cfg.maxIter = 3

rbm = Rbm.fromDims(6, 1)
trainer = RbmBernoulliTrainer(rbm, ds, cfg)
print(rbm.params, rbm.biasParams)
for _ in range(50):
    trainer.train()

print(rbm.params, rbm.biasParams)
Ejemplo n.º 26
0
                while not results.empty():
                    finalTrainingSet.append(results.get())

                for i in range(size - 1):
                    ds.addSample(finalTrainingSet[i], finalTrainingSet[i + 1])

                net = buildNetwork(window,
                                   window - 1,
                                   window,
                                   outclass=LinearLayer,
                                   bias=True,
                                   recurrent=True)
                trainer = BackpropTrainer(net, ds)
                trainer.trainEpochs(100)

                ts = UnsupervisedDataSet(window, )
                ts.addSample(finalTrainingSet[size - 1])

                finalResult = net.activateOnDataset(ts)

                t1 = time.time()

                time_list.append(t1 - t0)
                result_list.append(finalResult[0])

                #for elem in finalResult[0]:
                #    print elem
            print "time average: ", numpy.mean(time_list)
            print "time std deviation", numpy.std(time_list)

            arr = numpy.array(result_list)
Ejemplo n.º 27
0
    maxepochs = 50
    for i in range(0, maxepochs):
        sys.stdout.write('\r' + str(i) + " / " + str(maxepochs))
        aux = trainer.train()

    fileObject = open('trainingData', 'w')
    pickle.dump(net, fileObject)
    fileObject.close()
else:
    print "> Using a model from file"
    fileObject = open('trainingData', 'r')
    net = pickle.load(fileObject)

print "CLASSIFY"

ts = UnsupervisedDataSet(100, )
#input = map(int,'13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33'.split())

df = pd.read_csv('lastDay.csv', delimiter=',', index_col=None)

input = [int(x) for x in df.values]
print "Last day values: %s" % input

minimumLevel = min(input)
maximumLevel = max(input) - minimumLevel
input = [element - minimumLevel for element in input]
input = [element / float(maximumLevel * 2) for element in input]

# ts.addSample(input)
# verID = 175
# input = verificationSamples[verID][0]
Ejemplo n.º 28
0
from pybrain.structure import LinearLayer, SigmoidLayer, TanhLayer, SoftmaxLayer
from pybrain.structure import FullConnection
from pybrain.datasets import ClassificationDataSet, SupervisedDataSet, UnsupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.unsupervised.trainers.deepbelief import DeepBeliefTrainer
from pybrain.supervised.trainers import Trainer
from pybrain.structure.networks.rbm import Rbm
from pybrain.unsupervised.trainers.rbm import (RbmGibbsTrainerConfig,
                                               RbmBernoulliTrainer)
import csv
import numpy

# set up a basic feed forward network
net = FeedForwardNetwork()
ds = ClassificationDataSet(9,1,nb_classes=2, class_labels=['FRAUD', 'N'])
temp_ds = UnsupervisedDataSet(9)

# define 3 layers
inLayer = LinearLayer(9, "visible")
hiddenLayer = SigmoidLayer(16)
outLayer = LinearLayer(1)

# add layers to network
net.addInputModule(inLayer)
net.addModule(hiddenLayer)
net.addOutputModule(outLayer)

# define connections between layers
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
Ejemplo n.º 29
0
def _createUnsupervisedDataSet(X):
    alldata = UnsupervisedDataSet(X.shape[1])
    for i in X:
        alldata.addSample(i)
    return alldata
Ejemplo n.º 30
0
def _createUnsupervisedDataSet(X):
    alldata = UnsupervisedDataSet(X.shape[1])
    for i in X:
        alldata.addSample(i)
    return alldata