示例#1
0
    def loadFromFile(self):
        try:
            if self.major:
                self.net = NetworkReader.readFrom(TRAINED_DATA_FILEPATH_MAJOR)
            else:
                self.net = NetworkReader.readFrom(TRAINED_DATA_FILEPATH_MINOR)

        except:
            print "Could not find or open file"
示例#2
0
    def UpdateWeights(self, f1, f2=None):
        n = NetworkReader.readFrom(f1)
        if f2 != None:
            n2 = NetworkReader.readFrom(f2)

        def DictParams(n):
            l1 = []
            for mod in n.modules:
                l = []
                for conn in n.connections[mod]:

                    if conn.paramdim > 0:

                        l.append([conn.outmod.name, conn.params])
                d = dict(l)
                l1.append([mod.name, d])
            d1 = dict(l1)
            return d1

        d1 = DictParams(n)
        if f2 != None:
            d2 = DictParams(n2)
        d3 = DictParams(self.net)

        params = np.array([])
        if f2 != None:
            for i in d2:
                for j in d2[i]:
                    try:
                        b = d3[i][j][:]
                        b[:d2[i][j].size] = d2[i][j][:]
                        d3[i].update({j: b})
                    except:
                        pass
        for i in d1:
            for j in d1[i]:
                try:
                    b = d3[i][j][:]
                    b[:d1[i][j].size] = d1[i][j][:]
                    d3[i].update({j: b})
                except:
                    pass
        for i in d3["Input"]:
            params = np.hstack((params, d3["Input"][i]))
        for i in xrange(len(self.hiden)):
            for j in d3["hiden%s" % i]:
                params = np.hstack((params, d3["hiden%s" % i][j]))
        self.net._setParameters(params)
示例#3
0
    def __init__(self, port=None, baud=115200):
        print("connecting to OpenBCI...")
        self.board = OpenBCIBoard(port, baud)

        self.bg_thread = None
        self.bg_draw_thread = None
        self.data = np.array([0] * 8)
        self.should_plot = False
        self.control = np.array([0, 0, 0])
        self.control_s = np.array([0, 0, 0])
        self.control_f = np.array([0])
        self.out_sig = np.array([0])
        self.controls = np.array([[0] * 4])
        self.eye_r = np.array([0])
        self.eye_l = np.array([0])
        self.current = "baseline"

        fnn = NetworkReader.readFrom('neural_net.xml')
        self.good_indexes = joblib.load('neural_model_features.pkl')
        # self.eye_l_temp, self.eye_r_temp = joblib.load('eye_blinks.pkl')
        self.model = fnn

        print("connecting to teensy...")
        if TEENSY_ENABLED:
            self.teensy = serial.Serial(TEENSY_PORT, 57600)
    def _InitNet(self):

        # -----------------------------------------------------------------------
        self._pr_line();
        print("| _InitNet(self): \n");
        start_time = time.time();
        # -----------------------------------------------------------------------
        if self._NET_NAME:
            
            # -----------------------------------------------------------------------
            self._SDS = SupervisedDataSet(900, 52); 

            if self._NET_NEW:

                print('| Bulding new NET: '+self._NET_NAME)
                self._NET = buildNetwork(self._SDS.indim, self._NET_HIDDEN, self._SDS.outdim, bias=True); #,hiddenclass=TanhLayer)
                self._SaveNET();
            else:

                print('| Reading NET from: '+self._NET_NAME)
                self._NET = NetworkReader.readFrom(self._NET_NAME)
            # -----------------------------------------------------------------------
            print('| Making AutoBAK: '+str(self._MK_AUTO_BAK))
            
            if self._MK_AUTO_BAK:
                NetworkWriter.writeToFile(self._NET, self._NET_NAME+".AUTO_BAK.xml");
            # -----------------------------------------------------------------------
            print("| Done in: "+str(time.time()-start_time)+'sec');
            # -----------------------------------------------------------------------

        else:
            
            print('| Unknown NET name: >|'+self._NET_NAME+'|<')
            exit();
示例#5
0
def LoadAppleNeuralNetwork(networkXML):
    print "Loading Apple neural network: "+str(networkXML)
    start = timer()
    global apple
    apple = NetworkReader.readFrom(networkXML)
    end = timer()
    print "Time taken to load Apple neural network: " + str(end-start)
示例#6
0
def trainNetwork(epochs, rate, trndata, tstdata, network=None):
    '''
    epochs: number of iterations to run on dataset
    trndata: pybrain ClassificationDataSet
    tstdat: pybrain ClassificationDataSet
    network: filename of saved pybrain network, or None
    '''
    if network is None:
        net = buildNetwork(400, 25, 25, 9, bias=True, hiddenclass=SigmoidLayer, outclass=SigmoidLayer)
    else:
        net = NetworkReader.readFrom(network)

    print "Number of training patterns: ", len(trndata)
    print "Input and output dimensions: ", trndata.indim, trndata.outdim
    print "First sample input:"
    print trndata['input'][0]
    print ""
    print "First sample target:", trndata['target'][0]
    print "First sample class:", trndata.getClass(int(trndata['class'][0]))
    print ""

    trainer = BackpropTrainer(net, dataset=trndata, learningrate=rate)
    for i in range(epochs):
        trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(), trndata['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class'])
        print "epoch: %4d" % trainer.totalepochs, "  train error: %5.2f%%" % trnresult, "  test error: %5.2f%%" % tstresult

    return net
示例#7
0
def getBoardImage(img):
    '''
    Runs an image through processing and neural network to decode digits

    img: an openCV image object

    returns:
        pil_im: a PIL image object with the puzzle isolated, cropped and straightened
        boardString: string representing the digits and spaces of a Sudoku board (left to right, top to bottom)
    '''

    # Process image and extract digits
    pil_im, numbers, parsed, missed = process(img, False)
    if pil_im == None:
        return None, None

    net = NetworkReader.readFrom(os.path.dirname(os.path.abspath(__file__))+'/network.xml')
    boardString = ''

    for number in numbers:
        if number is None:
            boardString += ' '
        else:
            data=ClassificationDataSet(400, nb_classes=9, class_labels=['1','2','3','4','5','6','7','8','9'])
            data.appendLinked(number.ravel(),[0])
            boardString += str(net.activateOnDataset(data).argmax(axis=1)[0]+1)
    return pil_im, boardString
示例#8
0
def startTrials(ds, maxTrials = 2, maxExperiments = 2):
	"""start and run the trials"""
	hpCount = []
	for i in range(0, maxExperiments):
		for j in range(0, maxTrials):
			enemyTestPos = runExperiments.makeTestDataset()
			net = NetworkReader.readFrom("net.xml")

			netResults = net.activate([val for pair in normalize(enemyTestPos) for val in pair])
			netIter = iter(netResults)
			allyTestPos = zip(netIter, netIter)
			#undo normalization
			allyTestPos = map(lambda p: (abs(p[0]*640), abs(p[1]*720)), allyTestPos)
			print(allyTestPos)
			runExperiments.writeTestData(allyTestPos)
			runExperiments.run()

			with open("exp_results_raw.txt", "r") as resultsFile:
				lines = resultsFile.readlines()
				if "Zerg_Zergling" in lines[1]:
					x = normalize(enemyTestPos)
					y = normalize(allyTestPos)
					x = [val for pair in x for val in pair]
					y = [val for pair in y for val in pair]
					ds.addSample(x, y)
					lineSplit = lines[1].split("Zerg_Zergling")[-1]
					hpCount.append(lineSplit.split(" ")[2])
		trainer = BackpropTrainer(net, ds)
        trainer.trainUntilConvergence()
	return hpCount
示例#9
0
def train(X, y):
    """ Trains and predicts dataset with a Neural Network classifier """

    ds = ClassificationDataSet(len(X.columns), 1, nb_classes=2)
    for k in xrange(len(X)):
        ds.addSample(X.iloc[k], np.array(y[k]))
    tstdata, trndata = ds.splitWithProportion(0.20)
    trndata._convertToOneOfMany()
    tstdata._convertToOneOfMany()
    input_size = len(X.columns)
    target_size = 1
    hidden_size = 5
    fnn = None
    if os.path.isfile('fnn.xml'):
        fnn = NetworkReader.readFrom('fnn.xml')
    else:
        fnn = buildNetwork(trndata.indim,
                           hidden_size,
                           trndata.outdim,
                           outclass=SoftmaxLayer)
    trainer = BackpropTrainer(fnn,
                              dataset=trndata,
                              momentum=0.05,
                              learningrate=0.1,
                              verbose=False,
                              weightdecay=0.01)

    trainer.trainUntilConvergence(verbose=False,
                                  validationProportion=0.15,
                                  maxEpochs=100,
                                  continueEpochs=10)
    NetworkWriter.writeToFile(fnn, 'oliv.xml')
    predictions = trainer.testOnClassData(dataset=tstdata)
    return tstdata['class'], predictions
 def __init__(self, port=None, baud=115200):
     print("connecting to OpenBCI...")
     self.board = OpenBCIBoard(port, baud)
     
     self.bg_thread = None
     self.bg_draw_thread = None
     self.data = np.array([0]*8)
     self.should_plot = False
     self.control = np.array([0,0,0])
     self.control_s = np.array([0,0,0])
     self.control_f = np.array([0])
     self.out_sig = np.array([0])
     self.controls = np.array([[0]*4])
     self.eye_r = np.array([0])
     self.eye_l = np.array([0])
     self.current = "baseline"
     
     fnn = NetworkReader.readFrom('neural_net.xml')
     self.good_indexes = joblib.load('neural_model_features.pkl')
     # self.eye_l_temp, self.eye_r_temp = joblib.load('eye_blinks.pkl')
     self.model = fnn
     
     print("connecting to teensy...")
     if TEENSY_ENABLED:
         self.teensy = serial.Serial(TEENSY_PORT, 57600)
def usebp():
    patterns = [

        [[3158, 3503, 3342, 644, 937, 750, 546, 503, 593], [4751]],
        [[3092, 3011, 3217, 675, 882, 881, 543, 598, 564], [4445]],
        [[3180, 3043, 3031, 785, 830, 799, 448, 517, 564], [4514]],
        [[3389, 3469, 3450, 794, 933, 804, 544, 556, 578], [4755]],
        [[3224, 3201, 3433, 904, 737, 772, 522, 591, 585], [4864]],
        [[3503, 3342, 3410, 937, 750, 725, 503, 593, 616], [4646]],
        [[3011, 3217, 3143, 882, 881, 701, 598, 564, 601], [0]],
        [[3043, 3031, 3209, 830, 799, 701, 517, 564, 604], [0]],
        [[3469, 3450, 3446, 933, 804, 756, 556, 578, 553], [0]],
        [[3201, 3433, 3436, 737, 772, 817, 591, 585, 611], [0]],
        [[3342, 3410, 3277, 750, 725, 837, 593, 616, 532], [0]],

    ]

    net = NetworkReader.readFrom('/home/wtq/BigData-MachineLearning/Bpnn/BusWorkNet.xml')
    for p in patterns:
        testInput = p[0]
        targetOut = p[1]
        testInput = tuple(map(lambda n: float(n) / 6000, testInput))
        out = net.activate(testInput)
        print"out->", (out * 6000)
        distance = list(map(lambda x: 6000 * x[0] - x[1], zip(out, targetOut)))
        print(distance)
示例#12
0
def LoadBananaNeuralNetwork(networkXML):
    print "Loading Banana neural network: "+str(networkXML)
    start = timer()
    global banana
    banana = NetworkReader.readFrom(networkXML)
    end = timer()
    print "Time taken to load Banana neural network: " + str(end-start)
示例#13
0
def LoadCucumberNeuralNetwork(networkXML):
    print "Loading Cucumber neural network: "+str(networkXML)
    start = timer()
    global cucumber
    cucumber = NetworkReader.readFrom(networkXML)
    end = timer()
    print "Time taken to load Cucumber neural network: " + str(end-start)
 def load(self, path):
     """
     This function loads the neural network.
     Args:
     :param path (String): the path where the neural network is going to be loaded from.
     """
     self.network = NetworkReader.readFrom(path)
示例#15
0
 def predict(main_words):
     cluster_to_words = pickle.load(open('myW2VModel_claster_1000.p', 'rb'))
     if len(main_words.split()) > 7:
         row_vector_array = [0] * 1000
         for w in main_words.split():
             if w in w2v_model.vocab:
                 row_vector_array[get_cluster_number(w, cluster_to_words)] = 1
         net = NetworkReader.readFrom('trained_network_continue5.xml')
         return net.activate(row_vector_array)
示例#16
0
def create_or_read_from_file(training_data, filename='oliv.xml'):
    if os.path.isfile(filename):
        nn = NetworkReader.readFrom(filename)
    else:
        nn = buildNetwork(training_data.indim,
                          64,
                          training_data.outdim,
                          outclass=SoftmaxLayer)
    return nn
示例#17
0
 def LoadNet(self, fname):
     self.net = NetworkReader.readFrom(fname)
     tree = ET.parse(fname)
     x = tree.getroot()
     l = []
     for modules in x.findall('Network/Modules/SigmoidLayer/dim'):
         l.append(int(modules.get("val")))
     self.hiden = l[:]
     self.inputsize = self.net.indim
     self.outputsize = self.net.outdim
示例#18
0
def monitorAudio(mySerialPort, numOfPoints, workingDir):
    # Load the neural network model
    net = NetworkReader.readFrom(workingDir + '/fnn.xml')
    while True:
        # Record audio sample
        dataFFT = listenToSerial(mySerialPort, numOfPoints)
        # Process new sample thru NN
        result = net.activate(dataFFT)
        # Present prediction result
        setLED(result)
    def load(self, filename=None):
        """Loads a persisted network from XML file. Cannot load pickled networks yet.
	"""

        if filename is None:  # if not given a filename
            filename = self.filename  # use filename given at init

        if filename != self.filename:  # if new filename has been given
            self.filename = filename  # store new filename

        self.network = NetworkReader.readFrom(filename)  # load the network
示例#20
0
 def onTextEntered(self):
     model_name = "clean_text_model"
     w2v_model = Word2Vec.load(model_name)
     new_post_text = self.entry.get(1.0, END)
     row_vector_array = []
     for w in new_post_text.split():
         if w in w2v_model.vocab:
                 row_vector_array.extend(w2v_model[w])
     net = NetworkReader.readFrom('trained_network1.xml')
     result = net.activate(row_vector_array[:100])
     print result
     self.labelVariable.set("Result : " + str(result))
    def _readNetworkFromFile(self):

        # -----------------------------------------------------------------------
        self._pr_line();
        print("| _readNetworkFromFile("+self._NET_NAME+"): \n");
        start_time = time.time();
        # -----------------------------------------------------------------------

        self._NET = NetworkReader.readFrom(self._NET_NAME);


        # -----------------------------------------------------------------------
        print("| Done in: "+str(time.time()-start_time)+'sec');
示例#22
0
def checkPerformanceTestSet(tstFileName,numF,numC,minVals,maxVals,nnFile,threshold):
  
  data = np.genfromtxt(tstFileName)
  tstIn = data[:,0:5]
  tstOut   = data[:,6]
  tstOut = [int(val) for val in tstOut]

  for i in range(0,len(tstIn)):
    for j in range(0,numF):
      tstIn[i,j] = (tstIn[i,j]-minVals[j])/(maxVals[j]-minVals[j])

  myNetwork = NetworkReader.readFrom(nnFile)  
  return checkPerformance(myNetwork,tstIn,tstOut,numC,threshold)    
示例#23
0
def read():
    parser = argparse.ArgumentParser(description='Face detection using Neural Networks')
    parser.add_argument('-t', '--train-faces', help='Receives a directory with files to train with', nargs='+')
    parser.add_argument('-f', '--train-non-faces', help='Receives a directory with files to train with', nargs='+')
    parser.add_argument('-p', '--test', help='Receives a list of images (testing set)', nargs='+')
    parser.add_argument('-r', '--read', help='Read the file with the already trained network object', nargs=1)
    parser.add_argument('-w', '--write', help='Write the network to the specified file (format is .xml)', nargs=1)

    args = parser.parse_args()

    # Read the Neural Network Object
    if args.read:
        net = NetworkReader.readFrom(args.read[0])
    else:
        net = buildNetwork(400, 5, 2, bias=True, outclass=SoftmaxLayer)
        # net = buildNetwork(400, 80, 16, 1, bias=True, hiddenclass=TanhLayer)

    # If there are some files to train with
    if (args.train_faces or args.train_non_faces):

        if args.train_faces:
            faces = get_files(args.train_faces[0])
        else:
            faces = []

        if args.train_non_faces:
            non_faces = get_files(args.train_non_faces[0])
        else:
            non_faces = []

        # Expected targets
        faces     = map(lambda path: (path, [1]), faces)
        non_faces = map(lambda path: (path, [0]), non_faces)

        training_files = faces + non_faces
    else:
        training_files = None

    # If there are some files to test with
    if args.test:
        testing_imgs = open_imgs(args.test)
    else:
        testing_imgs = None

    # If there is a writing file
    if args.write:
        write_file = args.write[0]
    else:
        write_file = None

    return net, training_files, testing_imgs, write_file
示例#24
0
        def predict_class(self,_x,_y,test_file,epochs,steps):
                print("Iniciando funcao predict_class() .............")


                traindata = self.ReadTrainFile(_x,_y)
                #testdata = self.ReadTestFile( test_file, len(_x[0]) )
                
                print ("____________________________________________________________________________")
                print ("A matrix de treino tem ", len(traindata),"linhas de dados")
                print ("Dimensoes de Input e Output : ", traindata.indim, traindata.outdim)
                print ("____________________________________________________________________________\n")
                

                print("convertendo arquivos .................")

                traindata._convertToOneOfMany( )
                #testdata._convertToOneOfMany( )

                import os.path
                if os.path.exists('rede_animal.xml'):
                    print(" Carregando a rede de treinos do arquivo rede_animal.xml *************** ")
                    fnn = NetworkReader.readFrom('rede_animal.xml')
                else:
                    print(" Criando rede de treinos no arquivo rede_animal.xml *************** ")
                    fnn = buildNetwork( traindata.indim, 5, traindata.outdim, outclass=SoftmaxLayer )

                trainer = BackpropTrainer( fnn, dataset=traindata, momentum=0.1, verbose=True, weightdecay=0.01)

                print("Treinando .............")
                
                for i in range(epochs):
                        print("Treinando epoca ", i)
                        trainer.trainEpochs( steps )
                        NetworkWriter.writeToFile(fnn, 'rede_animal.xml')
                        print(" Rede salva em rede_animal.xml (Ok) ")

                print("Lendo arquivo de teste e classificando ..........")
                print("Gerando resultados em ANIMAL_OUTPUT.CSV ..........")
                output = open('animal_output.csv', 'wb')
                i=1
                output.write("ID,Adoption,Died,Euthanasia,Return_to_owner,Transfer\n")
                for line in open(test_file, 'r'):
                        x = ast.literal_eval(line)
                        output.write( "{},{},{},{},{},{} \n".format(i,fnn.activate( x )[0],fnn.activate( x )[1],fnn.activate( x )[2],fnn.activate( x )[3],fnn.activate( x )[4]) )
                        i=i+1   
                print("Concluido")

                
示例#25
0
    def __init__(self, network_tuple=None, epochs=1, save='', load='',
                 scale=1000, max_error=0):

        if not network_tuple and not load:
            raise TypeError('Network tuple or load must be provided.')

        self.network = NetworkReader.readFrom(load) if load else \
            buildNetwork(*network_tuple)
        self.ds = SupervisedDataSet(inp=2, target=1)
        self.scale = scale
        self.training = Thread(target=self.train, args=(epochs, max_error))
        self.training.daemon = True
        self.done = False
        self.save = save
        self.max_error = max_error
        self.epochs = epochs
示例#26
0
    def __init__(self, datadir, insize=None, outsize=None, paramfile=None):
        self.datadir = datadir
        if insize == None:
            g = runner.Game()
            ip = self._game2input(g)
            self.insize = len(ip)
        else:
            self.insize = insize
        if outsize == None:
            self.outsize = 1
        else:
            self.outsize = outsize
        if paramfile:
            f = os.path.join(self.datadir, paramfile)
            self.nn = NetworkReader.readFrom(f)
            try:
                self.name = re.search("(.*)-bestof-(.*)", paramfile).group(1)
            except AttributeError:
                self.name = "blondie-%s" % (datetime.datetime.now())
        else:
            self.nn = FeedForwardNetwork()
            tmpname = "blondie-%s" % (datetime.datetime.now())
            self.name = re.sub("[.: ]", "-", tmpname)

            inLayer = LinearLayer(self.insize)
            hiddenLayer1 = SigmoidLayer(self.insize)
            hiddenLayer2 = SigmoidLayer(self.insize)
            outLayer = LinearLayer(self.outsize)

            self.nn.addInputModule(inLayer)
            self.nn.addModule(hiddenLayer1)
            self.nn.addModule(hiddenLayer2)
            self.nn.addOutputModule(outLayer)

            in_to_hidden1 = FullConnection(inLayer, hiddenLayer1)
            hidden1_to_hidden2 = FullConnection(hiddenLayer1, hiddenLayer2)
            hidden2_to_out = FullConnection(hiddenLayer2, outLayer)

            self.nn.addConnection(in_to_hidden1)
            self.nn.addConnection(hidden1_to_hidden2)
            self.nn.addConnection(hidden2_to_out)

            self.nn.sortModules()
示例#27
0
def validates(net_path, validation_set):
    """
    Compute the average euclidean distance activating
    the model over a validation set

    :param net_path: Path to the model
    :type net_path: str
    :param validation_set: Validation set
    :type validation_set: list[tuple(list[float], list[float])]
    :return: average euclidean distance
    :rtype: float
    """
    net = NetworkReader.readFrom(net_path)
    dist = 0
    for example in validation_set:
        res = net.activate(example[0])
        res = array(res)
        target = array(example[1])
        dist += distance.euclidean(res, target)
    return dist / len(validation_set)
示例#28
0
def validates(net_path, validation_set):
    """
    Compute the average euclidean distance activating
    the model over a validation set

    :param net_path: Path to the model
    :type net_path: str
    :param validation_set: Validation set
    :type validation_set: list[tuple(list[float], list[float])]
    :return: average euclidean distance
    :rtype: float
    """
    net = NetworkReader.readFrom(net_path)
    dist = 0
    for example in validation_set:
            res = net.activate(example[0])
            res = array(res)
            target = array(example[1])
            dist += distance.euclidean(res, target)
    return dist/len(validation_set)
def usebpnn():
    patterns = [
        [[1813, 1839, 1625], [1537]],
        [[1565, 1463, 1215], [1433]],
        [[1839, 1625, 1537], [1660]],
        [[1463, 1215, 1433], [1482]],
        [[1625, 1537, 1660], [1256]],
        [[1215, 1433, 1482], [1391]],
        [[1537, 1660, 1256], [0]]
    ]

    net = NetworkReader.readFrom('/home/wtq/BigData-MachineLearning/Bpnn/BusHolidyNet.xml')

    for p in patterns:
        testInput = p[0]
        targetOut = p[1]
        testInput = tuple(map(lambda n: float(n) / 4000, testInput))
        out = net.activate(testInput)
        # print(out * 1000)
        distance = list(map(lambda x: 4000 * x[0] - x[1], zip(out, targetOut)))
        print(distance)
示例#30
0
    def __init__(self,
                 network_tuple=None,
                 epochs=1,
                 save='',
                 load='',
                 scale=1000,
                 max_error=0):

        if not network_tuple and not load:
            raise TypeError('Network tuple or load must be provided.')

        self.network = NetworkReader.readFrom(load) if load else \
            buildNetwork(*network_tuple)
        self.ds = SupervisedDataSet(inp=2, target=1)
        self.scale = scale
        self.training = Thread(target=self.train, args=(epochs, max_error))
        self.training.daemon = True
        self.done = False
        self.save = save
        self.max_error = max_error
        self.epochs = epochs
示例#31
0
def main():
	emotion={}
	dataset__generator(emotion)
	print('dataset generated')
	tstdata,trndata=ds.splitWithProportion(0.50)
	print('data splitted')
	#ds.getLength()
	trndata._convertToOneOfMany( )
	tstdata._convertToOneOfMany( )
	emotion={}
	if os.path.isfile('train.xml'):
		fnn=NetworkReader.readFrom('train.xml')
	else:
		fnn=buildNetwork(1292,3,2,outclass=SoftmaxLayer)
	NetworkWriter.writeToFile(fnn, 'train.xml')
	print('starting training')
	trainer=BackpropTrainer(fnn,dataset=trndata,momentum=0.1,verbose=True,weightdecay=0.01)	
	
	print('epoch level '+str(1000))
	i=10
	j1=range(10,200)
	temp=[]
	t=1
	while t<10:
		t=t+1
		i=random.choice(j1)
		temp.append(i)
		print('starting '+str(i))
		time.sleep(1)
		trainer.trainEpochs(i)
		NetworkWriter.writeToFile(fnn, 'train.xml')
		trnresult=percentError(trainer.testOnData(),trndata['class'])
		tstresult=percentError(trainer.testOnClassData(dataset=tstdata),tstdata['class'])
		temp.append([trnresult,tstresult])
		r_server.set('errortest'+str(i),tstresult)
		r_server.set('errortrain'+str(i),trnresult)
		
	for i in temp:
		print(i)
示例#32
0
def getSent():
	if request.method!='POST':
		return '{emotion:invalid request method submit a post request}'
	voice=request.files['file']
	filename=secure_filename(voice.filename)	
	format_audio=filename.split('.')[-1]
	print(format_audio)
	path=os.path.join(app.config['UPLOAD_FOLDER'],filename)
	voice.save(path)
	temp="AudioSegment.from_"+str(format_audio)+"('"+path+"')"
	temp=eval(temp)
	os.remove(path)
	temp.export(path,format="mp3",bitrate="64k")
	try:
		y,sr=librosa.load(path)
	except:
		return "{emotion:error}"
	os.remove(path)	
	print('y sr computed')
	mfcc=librosa.feature.mfcc(y=y,sr=sr,n_mfcc=13)
	librosa.display.specshow(mfcc, x_axis='time')
	plt.savefig(os.path.join(SAVE_PATH,filename+".png"))
	plt.close()
	mfcc=np.array(map(mapper,mfcc))
	mfcc=mfcc.max(axis=0)
	fnn=NetworkReader.readFrom(FNN_PATH)
	fnn=fnn.activate(mfcc)
	max_index=0
	max_value=fnn[0]
	for foo in range(1,len(fnn)):
		if fnn[foo]>max_value:
			max_index=foo
			max_value=fnn[foo]
	if max_index==0:
		return '{"emotion":"Happy","image":"'+filename+'.png"}'
	elif max_index==1:
		return '{"emotion":"Angry","image":"'+filename+'.png"}'
示例#33
0
def main():
	audio=capture()
	audio=np.transpose(audio)[0]
	#sd.play(audio,44100,blocking=True)
	mfcc=librosa.feature.mfcc(y=audio,sr=22050,n_mfcc=13)
	#print(np.shape(mfcc))
	mfcc=np.array(map(mapper,mfcc))
	#print(mfcc)
	mfcc=mfcc.max(axis=0)
	#print(np.shape(mfcc))
	#print(mfcc)
	fnn=NetworkReader.readFrom(FNN_PATH)
	#print(fnn)
	fnn=fnn.activate(mfcc)
	max_index=0
	max_value=fnn[0]
	for foo in range(1,len(fnn)):
		if fnn[foo]>max_value:
			max_index=foo
			max_value=fnn[foo]
	if max_index==0:
		print '{"emotion":"Happy"}'
	elif max_index==1:
		print '{"emotion":"Angry"}'
示例#34
0
def begin2():

    cbf = readFromCsv("cbf2")
    numdataset = np.array(cbf, dtype=np.float64)
    #训练数据,验证数据,今天的数据
    tgdataset, vadataset, tydata = dataSplit(numdataset)
    #归一的参数
    gydata, dmean, dstd = gyData(tgdataset)

    #验证和今天的数据
    gyvadata = calFeature(vadataset, dmean, dstd)
    gytydata = calFeature(tydata, dmean, dstd)

    tset = buildTrainingSet(gyvadata)

    net = NetworkReader.readFrom("../netv3/zxtx_8l_100t_6_0.785714285714.xml")
    trainer = BackpropTrainer(net, tset)
    trainer.trainEpochs(epochs=100)

    li = []
    for ele in gytydata[0]:
        li.append(ele)

    print(dec2int(net.activate(li[:-1])))
示例#35
0
for epoch in range(200):
    trainer.trainEpochs( 1 )
    trnresult = percentError( trainer.testOnClassData(),
                              trndata['class'] )
    tstresult = percentError( trainer.testOnClassData(
           dataset=tstdata ), tstdata['class'] )
    print "epoch: %4d" % trainer.totalepochs, \
          "  train error: %5.2f%%" % trnresult, \
          "  test error: %5.2f%%" % tstresult
    if epoch == 0:
        x=[epoch]
        y=[trnresult]
        z=[tstresult]
    else:
        x.append(epoch)
        y.append(trnresult)
        z.append(tstresult)
    
plot(x,y,label='Training')
plot(x,z,label='Testing')
xlabel('Epochs')
ylabel('percentError')
legend(loc='upper right')
 
confmat(net,tstdata)
  
NetworkWriter.writeToFile(net,'best_network.xml')
net = NetworkReader.readFrom('best_network.xml')
    
ioff()
show()
示例#36
0
#load trained network and run matchups. 
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure.modules import SoftmaxLayer
from pybrain.tools.xml.networkreader import NetworkReader
from utils import buildTeamlist

teamlist = buildTeamlist()
nn=buildNetwork(8,5,2,outclass=SoftmaxLayer)
nn=NetworkReader.readFrom('recurrentNetwork.xml')
#nn=NetworkReader.readFrom('savedNetwork.nn')

print "loaded Network, now running matchups..."
fp =open('data/matchups.csv','r')
for line in fp:
  attList=[]
  line = line.strip().split(',')
  for team in teamlist:
    if int(team._id) == int(line[0]):
      print team.name,
      attList.append(team.rypg)
      attList.append(team.pypg)
      attList.append(team.drypg)
      attList.append(team.dpypg)
  for team in teamlist:
    if int(team._id) == int(line[1]):
      print " vs. "+team.name
      attList.append(team.rypg)
      attList.append(team.pypg)
      attList.append(team.drypg)
      attList.append(team.dpypg)
  print nn.activate(attList)
 def loadNetwork(self, filename):
     self.n = NetworkReader.readFrom(filename)
示例#38
0
def main():
  # Create dataset
  
  inputs = ["jrms", "rms", "mean_10", "mean_100", "max_10", "max_100", "stddev_10", "stddev_100"]
#  inputs = ["max_100", "rms"]
#  inputs = ["max_100", "mean_100"]
#  inputs = ["rms"]
  mode = "train"

  if (len(sys.argv) >= 2):
    if (sys.argv[1] == "test"):
      mode = "test"
  
  print "Create dataset"
  data = load_data("out.txt")
  columnNames = load_column_names("out.txt")
  ds = create_data_set(data, columnNames, inputs, "jumping")
  
  targets = ds['target']
  #print "sum of targest: " + str(targets.sum())
  
  # Create network
  if (mode == "train"):
    print "Build network"
    if (len(sys.argv) >= 3):
      nEpochs = int(sys.argv[2])
    else:
      nEpochs = 1
    net = buildNetwork(len(inputs), N_HIDDEN, 1, bias=True, hiddenclass=TanhLayer)
    trainer = BackpropTrainer(net, ds, verbose = True)
    #net = buildNetwork(len(inputs), N_HIDDEN, 1, bias=True, hiddenclass=TanhLayer)

    print "Train"
    print "Base error: " + str(trainer.testOnData())
    results = testOnClassData(trainer)
    print "Percentage error: " + str(relativePercentError( results, targets )) + "%"
    print "Training started"
    trainer.trainEpochs(nEpochs)
    print "Training done: Saving model"
    NetworkWriter.writeToFile(net, "model.xml")
  else:
    net = NetworkReader.readFrom("model.xml")
    trainer = BackpropTrainer(net, ds, verbose = True)
  
  #print "Final error: " + str(trainer.testOnData())
  
  results = testOnClassData(trainer)
  print "Percentage error (final): " + str(relativePercentError( results, targets )) + "%"
  
  
  # Plot
  # Data
  frames = data[:,1]
  nplots = 3
  subplot(nplots, 1, 1)
  dataType = inputs[0]
  title("Data (" + dataType + ")")
  setp(plot(frames, ds['input'][:,0], color="black", marker=".", linestyle='None'))
  xlabel("frame")
  ylabel(dataType)
#  nplots = len(inputs) + 2
#  for i in range(0, len(inputs)):
#    subplot(nplots, 1, i+1)
#    dataType = inputs[i]
#    title("Data (" + dataType + ")")
#    setp(plot(frames, ds['input'][:,i], color="black", marker=".", linestyle='None'))
#    xlabel("frame")
#    ylabel(inputs[i])
  
  # Correct classification (target)
  subplot(nplots, 1, nplots-1)
  title("Target classification")
  targets = targets[:,0]
  setp(plot(frames, targets, color="blue"))

  # Classification (from NN)
  subplot(nplots, 1, nplots)
  title("NN classification and errors")
  setp(plot(frames, results), color="blue")
  errors = [[],[]]
  for i in range(len(results)):
    if (results[i] != targets[i]):
      errors[0].append(frames[i])
      errors[1].append(0.5)
  setp(plot(errors[0], errors[1]), color="red", marker='.', linestyle='None', alpha=0.5)
  
  savefig("figure.png")
示例#39
0
import sys
import pybrain
from pybrain.tools.xml.networkreader import NetworkReader

net = NetworkReader.readFrom('C:\Python27\myScripts\Trained_NNv4.xml')

x1 = float(sys.argv[1])
x2 = float(sys.argv[2])
x3 = float(sys.argv[3])
x4 = float(sys.argv[4])
x5 = float(sys.argv[5])
x6 = float(sys.argv[6])
x7 = float(sys.argv[7])
x8 = float(sys.argv[8])
x9 = float(sys.argv[9])
x10 = float(sys.argv[10])

a = net.activate((x1, x2, x3, x4, x5, x6, x7, x8, x9, x10))


def numbers_to_strings(argument):
    switcher = {
        0: 1,
        1: 0,
        2: -1,
    }
    return switcher.get(argument, "nothing")


# print a
# print a.argmax()
     standardizer(arr_temp[:, 2]), standardizer(arr_temp[:, 3]),
     standardizer(arr_temp[:, 4]), standardizer(arr_temp[:, 5])))

arr_testT, arr_valT = arr_tempA[:len(arr_test)], arr_tempA[len(arr_test):]

#the four sets of experiments are given separately. These are their start and end points
set_lengths = ((0, 13), (13, 26), (26, 39), (39, 52))
#third set is separated
#an enchanced array that will contain the noisy data

#different deltaT.Normalized. Should be 12,24,36,72
#print cross_val

#Open networks

network_2 = NetworkReader.readFrom(net_fold + 'network_Type2H1NewSTD.xml')
network_4 = NetworkReader.readFrom(net_fold + 'network_Type2H2NewSTD.xml')
#normalize the cross validation set

#normalize the other data with custom function. Remove cross validation and rearrange

#print pd.DataFrame(array_data,columns=lista_col1)

#------------------------------------------Test on  cross validation set 3--------------------------------------------------------------------
arr_valT2 = np.column_stack(
    (standardizer(arr_val[:, 0]), normalizer(standardizer(arr_val[:, 1])),
     standardizer(arr_val[:, 2]), standardizer(arr_val[:, 3]),
     standardizer(arr_val[:, 4]), standardizer(arr_val[:, 5])))
beta = pd.DataFrame(arr_valT2, columns=lista_col2)
beta.drop('Time', axis=1, inplace=True)
#theta is the pre-treatment dataframe
示例#41
0
文件: refs.py 项目: wasuaje/ondina
def run():
    """Es la clase principal en el cual se sigue la secuencia del procesamiento"""
    a = Alarma()
    """Al inicializar genera un sonido inidicado queel dispositivo esa funcionando
    sin contratiempos"""
    a.inicio()

    #Crear Red Neuronal
    red1 = rn.crearRN()
    red2 = rn.crearRN()
    red3 = rn.crearRN()

    #Se verifica si el archivo xml que contiene la red neuronal entrenada existe
    path = os.path.dirname(__file__)

    if os.path.isfile('rna_somnolencia.xml'):
        red_somnolencia = NetworkReader.readFrom('rna_somnolencia.xml')
    else:
        print "No existe la red neuronal solicitada"

    if os.path.isfile('rna_atento.xml'):
        red_atento = NetworkReader.readFrom('rna_atento.xml')
    else:
        print "No existe la red neuronal solicitada"

    if os.path.isfile('rna_operador.xml'):
        red_operador = NetworkReader.readFrom('rna_operador.xml')
    else:
        print "No existe la red neuronal solicitada"

    #Se la camara con la que se va a trabajar
    #try:
    #    camara = v.capturarVideo()
    #except:
    #    print "no camara",sys.exc_info()[0],sys.exc_info()[1]
    #    a.noCamara()

    camara = cv2.VideoCapture(0)
    #camara.set(15,8.0)
    camara.set(cv2.cv.CV_CAP_PROP_BRIGHTNESS, 160)
    camara.set(cv2.cv.CV_CAP_PROP_EXPOSURE, 20.0)
    time.sleep(3)
    while True:
        Somnolencia = 0.00001
        Atencion = 0.00001
        Operador = 0.00001
        #print "leyendo camara"
        is_sucessfully_read, img = camara.read()

        # is_sucessfuly_read retorna falso cuando no puede apturar de la camara
        if (is_sucessfully_read):
            cv2.imshow("Camera Feed", img)
        else:
            print "No se pudo detectar entrada de video desde %s. Saliendo..." % capture
            break

        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

        #print "procesando imagen"
        frame = img
        #    #Procesar imagenes del video
        improcesada = pi.procesarImagen(frame)
        cara = d.deteccionFacial(improcesada)
        try:
            if not cara:
                print "No hay operador frente el Monitor"
                a.ajeno()
        except ValueError:
            #print "procesando somno"
            Somnolencia = rn.estimularRN(red_somnolencia, cara.flatten())
            #print "procesando atencion"
            Atencion = rn.estimularRN(red_atento, cara.flatten())
            #print "procesando operador"
            Operador = rn.estimularRN(red_operador, cara.flatten())
            print "Somnolencia: %s  Atencion: %s  Operador:%s" % (float(
                Somnolencia[0]), float(Atencion[0]), float(Operador[0]))
            if float(Operador[0]) < 5:
                print "Estado de la alarma: Persona no reconocida"
                a.ajeno()
            if float(Atencion[0]) < 5:
                print "Estado de la alarma: Distraido"
                a.distraido()
            if float(Somnolencia[0]) < 9.82:
                print "Estado de la alarma: Somnoliento"
                a.somnoliento()
            # try:
            #     Somnolencia = rn.estimularRN(red_somnolencia,cara.flatten())
            #     Atencion = rn.estimularRN(red_atento,cara.flatten())
            #     Operador = rn.estimularRN(red_operador,cara.flatten())

            # except:
            #     print "Nadie frente el Monitor"
            # Colocar junto a la linea de arriba para ver codigo de error ,sys.exc_info()[0],sys.exc_info()[1]
        #time.sleep(2)
    cv2.destroyAllWindows()
    camara.release()
示例#42
0
	def loadFromFile(self, file='config.xml'):
		self.net = NetworkReader.readFrom(file)
示例#43
0
    def predict_class(self, _x, _y, test_file, epochs, steps):
        print("Iniciando funcao predict_class() .............")

        traindata = self.ReadTrainFile(_x, _y)
        #testdata = self.ReadTestFile( test_file, len(_x[0]) )

        print(
            "____________________________________________________________________________"
        )
        print("A matrix de treino tem ", len(traindata), "linhas de dados")
        print("Dimensoes de Input e Output : ", traindata.indim,
              traindata.outdim)
        print(
            "____________________________________________________________________________\n"
        )

        print("convertendo arquivos .................")

        traindata._convertToOneOfMany()
        #testdata._convertToOneOfMany( )

        import os.path
        if os.path.exists('rede_animal.xml'):
            print(
                " Carregando a rede de treinos do arquivo rede_animal.xml *************** "
            )
            fnn = NetworkReader.readFrom('rede_animal.xml')
        else:
            print(
                " Criando rede de treinos no arquivo rede_animal.xml *************** "
            )
            fnn = buildNetwork(traindata.indim,
                               5,
                               traindata.outdim,
                               outclass=SoftmaxLayer)

        trainer = BackpropTrainer(fnn,
                                  dataset=traindata,
                                  momentum=0.1,
                                  verbose=True,
                                  weightdecay=0.01)

        print("Treinando .............")

        for i in range(epochs):
            print("Treinando epoca ", i)
            trainer.trainEpochs(steps)
            NetworkWriter.writeToFile(fnn, 'rede_animal.xml')
            print(" Rede salva em rede_animal.xml (Ok) ")

        print("Lendo arquivo de teste e classificando ..........")
        print("Gerando resultados em ANIMAL_OUTPUT.CSV ..........")
        output = open('animal_output.csv', 'wb')
        i = 1
        output.write("ID,Adoption,Died,Euthanasia,Return_to_owner,Transfer\n")
        for line in open(test_file, 'r'):
            x = ast.literal_eval(line)
            output.write("{},{},{},{},{},{} \n".format(i,
                                                       fnn.activate(x)[0],
                                                       fnn.activate(x)[1],
                                                       fnn.activate(x)[2],
                                                       fnn.activate(x)[3],
                                                       fnn.activate(x)[4]))
            i = i + 1
        print("Concluido")
示例#44
0
#print arr_temp
print arr_temp_means
arr_temp_stdD = np.std(arr_temp, axis=0, dtype=np.float64)
print arr_temp_stdD
arr_temp_std_min = [min(standardizer(arr_temp[:, i])) for i in xrange(6)]
arr_temp_std_maxS = [
    max(standardizer(arr_temp[:, i])) - arr_temp_std_min[i] for i in xrange(6)
]
print len(arr_temp_std_maxS)
print len(arr_temp_means)
#the four sets of experiments are given separately. These are their start and end points
sLht = 12

#Open networks

network_2 = NetworkReader.readFrom(net_fold + 'network_Type2H1_Both.xml')
network_4 = NetworkReader.readFrom(net_fold + 'network_Type2H2_Both.xml')
#normalize the cross validation set

#------------------------------------------Test on  cross validation set 3--------------------------------------------------------------------
#Use arr_val
#beta is the standardized-normalized dataframe
beta = pd.DataFrame(arr_valT, columns=lista_col2)
beta.drop('Time', axis=1, inplace=True)
#theta is the pre-treatment dataframe
theta = pd.DataFrame(arr_val, columns=lista_col2)
theta.drop('Time', axis=1, inplace=True)
#for each network do the round
#do the k
#this takes out the cross validation set for ease of use.
beta_Val = beta.values
示例#45
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import time
from pybrain.tools.xml.networkreader import NetworkReader
from stock_functions import *


url = "http://aktien.zwen-aus-zwota.de/update.php"
method = "POST"
handler = urllib2.HTTPHandler()
opener = urllib2.build_opener(handler)

symbols = load_file("symbols.json")
net = NetworkReader.readFrom('net200.xml')
for symbol in symbols:
 print symbol
 settings = load_file(symbol+'.set')
 y = yahoo_get_200(symbol)
 if len(y)!=200:
  continue
 #200tage vefügbar
 inp = []
 for line in y:
  cl = float(line['Close'])/(settings['maxc']*2.0)
  vol = float(line['Volume'])/(settings['maxv']*2.0)
  inp.append(cl)
  inp.append(vol)
 ret = net.activate(inp)
 a = 'IK'
示例#46
0
#load trained network and run matchups.
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure.modules import SoftmaxLayer
from pybrain.tools.xml.networkreader import NetworkReader
from utils import buildTeamlist

teamlist = buildTeamlist()
nn = buildNetwork(8, 5, 2, outclass=SoftmaxLayer)
nn = NetworkReader.readFrom('recurrentNetwork.xml')
#nn=NetworkReader.readFrom('savedNetwork.nn')

print "loaded Network, now running matchups..."
fp = open('data/matchups.csv', 'r')
for line in fp:
    attList = []
    line = line.strip().split(',')
    for team in teamlist:
        if int(team._id) == int(line[0]):
            print team.name,
            attList.append(team.rypg)
            attList.append(team.pypg)
            attList.append(team.drypg)
            attList.append(team.dpypg)
    for team in teamlist:
        if int(team._id) == int(line[1]):
            print " vs. " + team.name
            attList.append(team.rypg)
            attList.append(team.pypg)
            attList.append(team.drypg)
            attList.append(team.dpypg)
    print nn.activate(attList)
示例#47
0
GPIO.setup(red, GPIO.OUT)
GPIO.setup(yellow, GPIO.OUT)
GPIO.setup(green, GPIO.OUT)
GPIO.setup(button, GPIO.IN, pull_up_down=GPIO.PUD_UP)

GPIO.output(red, 0)
GPIO.output(yellow, 0)
GPIO.output(green, 0)

print "Loading network..."
write_to_file("Loading network...")

GPIO.output(yellow, 1)
if os.path.isfile('/home/pi/Documents/Scripts/AutonomousCar/network.xml'):
    fnn = NetworkReader.readFrom(
        '/home/pi/Documents/Scripts/AutonomousCar/network.xml')
    print "Loaded..."
    write_to_file("Loaded...")
else:
    print "Network not present..."
    write_to_file("Network not present...")
    exit(0)
GPIO.output(yellow, 0)

while (True):
    GPIO.output(red, 1)
    while (GPIO.input(button) == True):
        print "Waiting to start..."
        write_to_file("Waiting to start...")
        continue
    GPIO.output(red, 0)
     normalizer(arr_temp[:, 2]), normalizer(arr_temp[:, 3]),
     normalizer(arr_temp[:, 4]), normalizer(arr_temp[:, 5])))

arr_testT, arr_valT = arr_tempA[:len(arr_test)], arr_tempA[len(arr_test):]

#the four sets of experiments are given separately. These are their start and end points
set_lengths = ((0, 13), (13, 26), (26, 39), (39, 52))
#third set is separated
#an enchanced array that will contain the noisy data

#different deltaT.Normalized. Should be 12,24,36,72
#print cross_val

#Open networks

network_2 = NetworkReader.readFrom(net_fold +
                                   'network_Type2H1New_LessNoise2.xml')
network_4 = NetworkReader.readFrom(net_fold +
                                   'network_Type2H2New_LessNoise2.xml')
#normalize the cross validation set

#normalize the other data with custom function. Remove cross validation and rearrange

#print pd.DataFrame(array_data,columns=lista_col1)

#------------------------------------------Test on  cross validation set 3--------------------------------------------------------------------
#Use arr_val
#beta is the standardized-normalized dataframe
arr_valT2 = np.column_stack((normalizer(standardizer(arr_val[:, 0])),
                             normalizer(standardizer(arr_val[:, 1])),
                             normalizer(standardizer(arr_val[:, 2])),
                             normalizer(standardizer(arr_val[:, 3])),
        global finalImage
        finalImage = clone

        avgX = (startX + endX) / 2
        avgY = (startY + endY) / 2

for i in range (0, 3):
    t = threading.Thread(target=callDetect, args=())
    t.start()

ScreenWidth = game.get_screen_width()
ScreenHeight = game.get_screen_height()
# sleep_time = 0.028
sleep_time = 0.028

net = NetworkReader.readFrom('net.xml')

episodes = 60
avgScore = 0.0
for i in range(episodes):

    print("Episode #" + str(i + 1))

    start = time.time()
    # Starts a new episode. It is not needed right after init() but it doesn't cost much. At least the loop is nicer.
    game.new_episode()

    while not game.is_episode_finished():

        cv2.imshow("final", finalImage)
        cv2.waitKey(1)
示例#50
0
def opennetwork(epoch):
    r = open(str(epoch) + '/rhymelist', 'r')
    rhymelist = ast.literal_eval(str(r.read()))
    net = NetworkReader.readFrom(str(epoch) + "/network.xml")
    return net, rhymelist
示例#51
0
def main():
	
	try:
		r =  request.files['image']
		r.save('postimage.JPG')

	except:
		pass
	retrain = False
	if sys.platform != "darwin":
		BGIMAGE = 'postimage.JPG'
	else:
		BGIMAGE = 'bad2.JPG'

	if retrain:
		net = buildNetwork(4, 16, 2, bias=True)
	else:
		net = NetworkReader.readFrom('network.xml') 
	simplecvimg = Image(BGIMAGE).scale(600,600).rotate(270)
	# blue = simplecvimg.colorDistance((2,7,63)) * 2  #scale up
	
	blue = simplecvimg.colorDistance((29,69,160)) * 1.3  #scale up
	red = simplecvimg.colorDistance((255,0,0)) * 2

	blueBlobs = blue.findBlobs()

	l1 = DrawingLayer((simplecvimg.width, simplecvimg.height))


	big, second, = None, None
	maxx, twomaxx = 0,0

	for b in blueBlobs:
		if b.area() > maxx:
			twomaxx = maxx
			maxx = b.area()
			second = big
			big = b
	second.show()

	# cv.WaitKey(5000)
	screen = second.crop().invert()

	# cv.WaitKey(10000)
	# red = simplecvimg.colorDistance((62,5,13)) 

	# green = simplecvimg.colorDistance((140,190,40))
	# red.show()
	# cv.WaitKey(5000)
	screen = screen.crop(screen.width/2, screen.height/2, screen.width-50, screen.height-20, centered=True)
	if mac:
		screen.show()
	if screen == None:
		return jsonify({"Error": "CANNOT EXTRACT"})
	screen.save("cropped.JPG")
	print "SAVED"
	w = screen.width * 1.0
	h = screen.height * 1.0
	elements = screen.findBlobs()


	if elements == None:
		return jsonify({"Error": "No elements"})
	circles = [x for x in elements if x.isCircle(tolerance=0.65)]
	rectangles = [x for x in elements if x.isRectangle(tolerance=0.15)]
		
	circles = [x for x in circles if x not in rectangles]


	for b in circles:
		if mac:
			b.show(color=(255,0,0))
		print "Coordinates: " + str(b.x/w) + ", " + str(b.y/h)
		# cv.WaitKey(10)
	for b in rectangles:
		if mac:
			b.show(color=(0,255,0))
		print "Coordinates: " + str(b.x/w) + ", " + str(b.y/h)
		# cv.WaitKey(10)
	centers = []
	for x in rectangles + circles:
		cr = circles + rectangles
		cr.remove(x)
		for y in cr:
			c1 = x.centroid()
			h = x.minRectHeight()
			w = x.minRectWidth()
			c2 = y.centroid()
			if c2[0] < (c1[0] + w) and c2[0] >(c1[0] - w)  and c2[1] < (c1[1] + h)  and c2[1] > (c1[1] - h)  and y.area() < x.area():
				# x.show(color=(200,100,200))
				# y.show(color=(50,50,255))
				if x in rectangles:
					centers.append([y,x,'rec'])
				else:
					centers.append([y,x,'cir'])
				if x in circles:
					circles.remove(x)
				elif x in rectangles:
					rectangles.remove(x)

	# cv.WaitKey(10000)
	# centers = list((set(circles + rectangles)) - set(centers))
	allFeatures = []
	if retrain:
		ds = SupervisedDataSet(4, 2)
		for b in centers:
			old = b
			b = b[0]
			features = []
			print b.width
			i = b.blobImage().binarize()
			if mac:
				b.blobImage().show()
			i1 = raw_input()
			if i1 == "0":
				end = [0,1]
			else:
				end = [1,0]
			print end
			for x in range(0,2):
				for y in range(0,2):
					print i.width*x,i.width * (x+1),i.height*y,i.height * (y+1)
					z = i.crop((i.width/2) * x, (i.height/2) * y, i.width/2, i.height/2, centered=True)
					features.append(z.meanColor())
			features = [x[0] for x in features]
			allFeatures.append(features)
			ds.addSample(features,end)
		
		trainer  = BackpropTrainer(net, ds)
		t = 10
		while t > .01:
			t = trainer.train()
			print t
		NetworkWriter.writeToFile(net, 'network.xml')
	class1, class2, class3, class4 = [],[],[],[]
	circles = [x for x in circles if x not in [y[0] for y in centers]]
	for b in centers:
		old = b
		b = b[0]
		features = []

		i = b.blobImage().binarize()
		
		for x in range(0,2):
			for y in range(0,2):
				z = i.crop((i.width/2) * x, (i.height/2) * y, i.width/2, i.height/2, centered=True)
				features.append(z.meanColor())
		features = [x[0] for x in features]
		
		v = net.activate(features)
		print v
		if v[0] > v[1]:
			if mac:
				b.show(color=(0,0,255))
			if old[2] == 'rec':
				class1.append(old[1])
			else:
				class2.append(old[1])
			pass
		else:
			if mac:
				b.show(color=(0,255,255))
			if old[2] == 'rec':
				class3.append(old[1])
			else:
				class4.append(old[1])

		# print "Internal Shape Coordinates: " + str(b.x/w) + ", " + str(b.y/h)
		# cv.WaitKey(100)


	# while True:
	# 	cv.WaitKey(10)
	 
	class3 = list(set(class3) - set(class1 + class2  +[x[0] for x in centers]))

	class4 = list(set(class4) - set(class1 + class2 + class3 +[x[0] for x in centers]))

	class5 = set(circles) - set(class1 + class2 + class3 + class4 + [x[0] for x in centers])
	class6 = set(rectangles) - set(class1 + class2 + class3 + class4 + [x[0] for x in centers])


	classes = [class1,class2,class3,class4,class5,class6]
	for x in classes:
		x1 = [(z.centroid(), z.width()/float(screen.width),z.height()/float(screen.height)) for z in x]
		x1 = [(z[0][0]/screen.width,z[0][1]/screen.height, z[1],z[2]) for z in x1]
		classes[classes.index(x)] = x1

	classes = [list(set(x)) for x in classes]

	for c in classes:
		print len(c)

	retValues = {'entities': {"class"+str(classes.index(c)) : c for c in classes}}
	print retValues

	while True:
		cv.WaitKey(10)
	return jsonify(retValues)
示例#52
0
def loadRecurrentNetwork(fname):
    nn = NetworkReader.readFrom(fname)
    return nn
示例#53
0
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure import SigmoidLayer
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.tools.xml.networkreader import NetworkReader
import csv

def get_words():
    result = []
    with open('wordlist.csv', 'r') as wordlist:
       wordreader = csv.reader(wordlist)
       for word in wordreader:
           result += word
    return result

def count_vector(tweet, words):
    return [ int(word in tweet) for word in words ]

if __name__ == '__main__':
    net = NetworkReader.readFrom('tweet_network.xml')
    words = get_words()
    while True:
        text = raw_input('Enter tweet--> ')
        if text.lower() == 'done':
            break
        meanness = str(int(net.activate(count_vector(text, words))[0]*5))
        print('Meaness: ' + meanness)

示例#54
0
import numpy as np
import pandas as pd
from pybrain.tools.xml.networkreader import NetworkReader

window_size = 10
# minus 2 because 1 is for target and 1 less because diffs
input_size = window_size - 2


def windows(data, window_len):
    for i in xrange(0, len(data) - window_len + 1):
        yield [data[j] for j in xrange(i, i + window_len)]


def diff_percent(a):
    return np.true_divide(np.diff(a), a[:-1])


data = pd.io.parsers.read_csv(
    '../../data/spy-1994-2014.csv')['Adj Close'].values
net = NetworkReader.readFrom(
    '/home/mark/workspace/final-year-project/work/neural-nets/spy-1994-2014-network.xml'
)

for window in windows(data, window_size):
    dp = diff_percent(window)
    window_inputs = dp[:-1]
    window_target = dp[-1]
    net_result = net.activate(window_inputs)
    print net_result
示例#55
0
#add the contents of digits to a dataset
daSet = ClassificationDataSet(64, 1)
for k in xrange(len(X)):
    daSet.addSample(X.ravel()[k], y.ravel()[k])

#split the dataset into training and testing
testData, trainData = daSet.splitWithProportion(0.40)

#convert the data into 10 separate digits
trainData._convertToOneOfMany()
testData._convertToOneOfMany()

#check for the save file and load
if os.path.isfile('dig.xml'):
    net = NetworkReader.readFrom('dig.xml')
    net.sorted = False
    net.sortModules()
else:
    # net = FeedForwardNetwork()
    net = buildNetwork(64,
                       37,
                       10,
                       hiddenclass=SigmoidLayer,
                       outclass=SoftmaxLayer,
                       bias=True)

# create a backprop trainer
trainer = BackpropTrainer(net,
                          dataset=trainData,
                          momentum=0.0,
示例#56
0
if __name__ == "__main__":
    HOST, PORT = "localhost", 9999

    # Create the server, binding to localhost on port 9999
    server = SocketServer.TCPServer((HOST, PORT), MyTCPHandler)

    # Load the neural networks
    # thread.start_new_thread(LoadAppleNeuralNetwork,('VegeTable_PyBrain_Neural_Network_Apple.xml',))
    # thread.start_new_thread(LoadCucumberNeuralNetwork,('VegeTable_PyBrain_Neural_Network_Cucumber.xml',))
    # thread.start_new_thread(LoadBananaNeuralNetwork,('VegeTable_PyBrain_Neural_Network_Banana.xml',))

    print "Loading Banana neural network: "+str(bananaXML)
    start = timer()
    global banana
    banana = NetworkReader.readFrom(bananaXML)
    end = timer()
    print "Time taken to load Banana neural network: " + str(end-start)

    print "Loading Apple neural network: "+str(appleXML)
    start = timer()
    global apple
    apple = NetworkReader.readFrom(appleXML)
    end = timer()
    print "Time taken to load Apple neural network: " + str(end-start)

    print "Loading Cucumber neural network: "+str(cucumberXML)
    start = timer()
    global cucumber
    cucumber = NetworkReader.readFrom(cucumberXML)
    end = timer()