Beispiel #1
0
    def __init__(self, loadWeightsFromFile, filename):
        #neural network as function approximator
        #Initialize neural network
        if loadWeightsFromFile:
	    self.nn = NetworkReader.readFrom(filename)
	else:
	    self.nn = buildNetwork(NODE_INPUT, NODE_HIDDEN, NODE_OUTPUT, bias = True)
Beispiel #2
0
	def buildNet(self):
		print "Building a network..."
		if  os.path.isfile(self.path): 
			self.trained = True
 			return NetworkReader.readFrom(self.path) 
		else:
 			return buildNetwork(self.all_data.indim, self.d[self.path]['hidden_dim'], self.all_data.outdim, outclass=SoftmaxLayer)
 def runClassifier(self):
     out = []
     true = []
     #BatIDToAdd = [1, 2, 3, 5, 6, 10, 11, 12, 14, 8, 9] #1-14 are bats; 8 is noise; 9 is something else
     print "Loading Network.."
     net = NetworkReader.readFrom("SecondStageClassifier.xml")
     print "Loading feature data with FSC = 1 (Bat calls)"
     minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target, path = self.getDistrubedTestDataRUNVERSION()
     SAMPLE_SIZE = len(minFreq)
     for i in range(0, SAMPLE_SIZE):
         ClassifierOutput = net.activate([minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i]])
         ClassifierOutputID = np.argmax(ClassifierOutput)
         currentTarget = self.convertIDMultiSingle(target[i])
         out.append(ClassifierOutputID)
         true.append(currentTarget)
         #MAPPING FROM BATID TO TSC value:
         SSC_value = ClassifierOutputID
         # Metadata Setup, get path and write: TSC = value
         ds = self.HDFFile[path[i]]
         ds.attrs["SSC"] = SSC_value
     # Close HDF5 file to save to disk. This is also done to make sure the next stage classifier can open the file
     self.HDFFile.flush()
     self.HDFFile.close()
     self.ConfusionMatrix = self.CorrectRatio(out, true)
     return self.ConfusionMatrix
    def runThirdStageClassifier(self):
        out = []
        true = []
        #SingleBatIDToAdd = [1, 2, 3, 5, 6] # for single
        Correct = 0
        print "Loading Network.."
        net = NetworkReader.readFrom("C:\Users\Anoch\PycharmProjects\BatClassification\ThirdStageClassifier.xml")
        print "Loading feature data with SSC = 1 (Single call type)"
        minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target, path = self.getDistrubedTestDataRUNVERSIONTSC()
        SAMPLE_SIZE = len(minFreq)
        for i in range(0, SAMPLE_SIZE):
            ClassifierOutput= net.activate([minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i], pixelAverage[i]])

            ClassifierOutputID = np.argmax(ClassifierOutput)
            currentTarget = self.convertIDSingleTSC(target[i])
            out.append(ClassifierOutputID)
            true.append(currentTarget)

            #MAPPING FROM BATID TO TSC value:
            TSC_value = ClassifierOutputID
            # Metadata Setup, get path and write: TSC = value
            ds = self.HDFFile[path[i]]
            ds.attrs["TSC"] = TSC_value
        self.HDFFile.flush()
        self.ConfusionMatrix =  self.CorrectRatio(out, true)
        return self.ConfusionMatrix
 def getPersistedData(self, name):
     pathToData = self.relPathFromFilename(name)
     if os.path.isfile(pathToData):
         with open(pathToData, "rb") as f:
             data = pickle.load(f)
         if name == NEURAL_NET_DUMP_NAME:
             data.net = NetworkReader.readFrom(self.relPathFromFilename(name + DATA_DUMP_NN_EXT))
         return data
def testNets():
    ds = SupervisedDataSet.loadFromFile('SynapsemonPie/boards')
    net20 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer20.xml') 
    net50 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer50.xml') 
    net80 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer80.xml') 
    net110 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer110.xml') 
    net140 = NetworkReader.readFrom('SynapsemonPie/synapsemon_primer140.xml') 
    trainer20 = BackpropTrainer(net20, ds)
    trainer50 = BackpropTrainer(net50, ds)
    trainer80 = BackpropTrainer(net80, ds)
    trainer110 = BackpropTrainer(net110, ds)
    trainer140 = BackpropTrainer(net140, ds)
    print trainer20.train()
    print trainer50.train()
    print trainer80.train()
    print trainer110.train()
    print trainer140.train()
Beispiel #7
0
def main():
    train_file = 'data/train.csv'
    # validation_file = 'data/validation.csv'
    output_model_file = 'model.xml'

    # hidden_size = 4
    epochs = 500

    # load data
    # def loadData():
    train = np.loadtxt(train_file, delimiter=' ')
    Input = train[0:,0:3]
    Output = train[0:,3:5]

    # validation = np.loadtxt(validation_file, delimiter=',')
    # train = np.vstack((train, validation))

    # x_train = train[:, 0:-1]
    # y_train = train[:, -1]
    # y_train = y_train.reshape(-1, 1)

    # input_size = x_train.shape[1]
    # target_size = y_train.shape[1]

    # prepare dataset
    # def prepare dataset(input_size, target_size):
    ds = SDS(Input,Output)
    # ds.addSample(input_size)
    # ds.setField('input', x_train)
    # ds.setField('target', y_train)

    # init and train
    # def initTrain(input_size, hidden_size, input, output):
    # net = buildNetwork(input_size, hidden_size, target_size, bias=True)
    net = buildNetwork(3,  # input layer
                                 4,  # hidden0
                                 2,  # output
                                 hiddenclass=SigmoidLayer,
                                 outclass=SigmoidLayer,
                                 bias=True
                                 )
    net = NetworkReader.readFrom('model.xml')
    for i,o in zip(Input,Output):
        ds.addSample(i,o)
        print i, o

    trainer = BackpropTrainer(net, ds)
        
    print "training for {} epochs...".format(epochs)

    for i in range(epochs):
        mse = trainer.train()
        rmse = sqrt(mse)
        print "training RMSE, epoch {}: {}".format(i + 1, rmse)
        if os.path.isfile("../stopfile.txt") == True:
            break
    
    NetworkWriter.writeToFile(net, output_model_file)
def nfq_action_value(network_fname, state=[0, 0, 0, 0, 0]):
    # TODO generalize away from 9 action values. Ask the network how many
    # discrete action values there are.
    n_actions = 9
    network = NetworkReader.readFrom(network_fname)
    actionvalues = np.empty(n_actions)
    for i_action in range(n_actions):
        network_input = r_[state, one_to_n(i_action, n_actions)]
        actionvalues[i_action] = network.activate(network_input)
    return actionvalues
Beispiel #9
0
 def __init__(self):
     print "start a new instance"
     self.loaded=False
     self.has_data_source=False
     try:
         self.net=NetworkReader.readFrom('pickled_ANN')
         print "ANN has been found from an ash jar"
         self.loaded=True
     except IOError:
         print "ash jar is empty, use train() to start a new ANN"
 def exoplanet_search(self,
                      find=default_find):
      """
      This method searches for exoplanets.
      The output will have the format:
          (exostar1_streak, exostar2_streak, ...)
      where an exostar is a star with an exoplanet, and a streak is
      a list of states in which the exostar was observed to have exoplanetary
      behaviour.
      At least 5 stars must be tracked.
      """
      stars, deleted = self.find_objects(find=find)
      print str(deleted / len(self.photos)) + "% of the data was ignored"
      """
      There must be an integer multiple of 5 stars
      in stars, and the stars must be grouped together in lumps
      of 5.
      """
      exostreaks = []
      net = NetworkReader.readFrom("../../Identifier/network.xml")
      for starnum in range(0, len(stars), 5):
          search_stars = stars[starnum: starnum + 5]
          start_time = search_stars[0].states[0].time
          stop_time = search_stars[0].states[-1].time
          for photonum in range(start_time, stop_time + 1, 10):
              print self.photos[photonum]
              photonum = min(photonum, stop_time - 10)
              intensities = []
              for slide in range(photonum, photonum + 10):
                  intensities.append([])
                  photo = self.photos[slide]
                  photo.load()
                  for star in search_stars:
                      state = star.track(slide)
                      brightness = photo.intensity(state.position, state.radius)
                      intensities[-1].append(brightness)
                  photo.close()
              inpt = []
              for starothernum in range(5):
                  lightcurve = []
                  for slides_from_zero in range(10):
                      lightcurve.append(intensities[slides_from_zero][starothernum])
                  array_version = array(lightcurve)
                  array_version /= average(array_version)
                  inpt += list(array_version)
              nnet_output = net.activate(tuple(inpt))
              for o in range(5):
                  if nnet_output[o] > 0.5:
                      exostreak = []
                      for slide in range(photonum, photonum + 10):
                          state = search_stars[o].track(slide)
                          exostreak.append(state)
                      exostreaks.append(exostreak)
      return exostreaks
 def __init__(self, data, machineID, eta, lmda, netPath, input_size=30, epochs=20, train_str_index=1000, train_end_index=3000):
     '''
     Constructor
     '''
     self.data = data
     self.machineID = machineID
     self.eta = eta
     self.lmda = lmda
     self.INPUT_SIZE = input_size
     self.epochs = epochs
     self.str_train = train_str_index
     self.end_train = train_end_index
     self.net = NetworkReader.readFrom(netPath)
Beispiel #12
0
    def load_network_from_file(self, filename):
        """Load Network from File

        Using a NetworkWriter written file, data from the saved network
        will be reconstituted into a new PathPlanningNetwork class.
        This is used to load saved networks.

        Arguments:
            filename: The filename of the saved xml file.
        """
        self._network = NetworkReader.readFrom(filename)

        return
Beispiel #13
0
def trainNetwork():
	print "[Training] Network has Started..."
	inputSize = 0
	with open('file1.txt', 'r') as f:			#automatically closes file at the end of the block
  		#first_line = f.readline()
  		#inputSize = len(first_line)
		dataset = SupervisedDataSet(4, 1)	 #specify size of data and target
		f.seek(0) 							#Move back to beginnning of file
		#iterate through the file. 1 picture per line
		for line in f:
			mylist = json.loads(line)		#list object
	    	target = mylist[-1]				#retrieve and then delete the target classification
	    	del mylist[-2:]
	    	#print target
	    	dataset.addSample(tuple(mylist), (target,))
	        #print json.loads(line)
	if os.path.isfile('annModel.xml'):
		skynet = NetworkReader.readFrom('annModel.xml')#for use if individual sample files used
	else:
		skynet = buildNetwork(dataset.indim, 8, dataset.outdim, bias=True, hiddenclass=TanhLayer) #input,hidden,output
	#SoftmaxLayer, SigmoidLayer, LinearLayer, GaussianLayer
	#Note hidden neuron number is arbitrary, can try 1 or 4 or 3 or 5 if this methods doesnt work out
	trainer = BackpropTrainer(skynet, dataset,learningrate = 0.3, weightdecay = 0.01,momentum = 0.9)
	#trainer.trainUntilConvergence()
	for i in xrange(1000):
		trainer.train()
    #trainer.trainEpochs(1000)
    #Save the now trained neural network
	NetworkWriter.writeToFile(skynet,'annModel.xml')
	print "[Network] has been Written"

################## SVM Method #######################
#Change append method in write method for target persistence
	dataX = []
	datay = []
	with open(writeFile, 'r') as f:
		for line in f:
			mylist = json.loads(line)
			target2 = mylist[-1]
			dataX.append(mylist[:-2])
			datay.append(target2)
	#datay = [target2] * len(dataX) #Targets, size is n_samples, for use with indiviual sample files with same target
	print [target2]
	print dataX
	print datay
	clf = svm.LinearSVC()
	clf.fit(dataX,datay)
    #Persist the trained model
	joblib.dump(clf,'svmModel.pkl')
 def __init__(self, data, machineID, netPath, eta, lmda, input_size=30, epochs=20, train_str_index=1000, train_end_index=3000):
     '''
     Constructor
     '''
     self.cpuData = data[0]
     self.memData = data[1]
     self.machineID = machineID
     self.eta = eta
     self.lmda = lmda
     self.INPUT_SIZE = input_size
     self.epochs = epochs
     self.str_train = train_str_index
     self.end_train = train_end_index
     self.net = NetworkReader.readFrom(netPath)
     
     self.memForecasts = np.genfromtxt("d:/data/memory_fnn/"+machineID.replace("cpu", "memory"),delimiter=',').ravel()
    def LoadNetwork(self):
        """
        Loading network dump from file.
        """
        FCLogger.debug('Loading network from PyBrain xml-formatted file...')
        net = None

        if os.path.exists(self.networkFile):
            net = NetworkReader.readFrom(self.networkFile)

            FCLogger.info('Network loaded from dump-file: {}'.format(os.path.abspath(self.networkFile)))

        else:
            FCLogger.warning('{} - file with Neural Network configuration not exist!'.format(os.path.abspath(self.networkFile)))

        self.network = net
Beispiel #16
0
def init():
	print("Intitializing")
	global neuralNetwork, inputDataSet, outputData, atexit
	atexit.register(pickleWeights)
	load = True if input("Load file? (y/n) ").lower() == "y" else False
	if load:
		filename = input("File name? (Please do not indicate the extension)\n") + ".xml"
		print("Loading", filename)
		if isfile(filename):
			neuralNetwork = NetworkReader.readFrom(filename)
		else:
			print("There is no such file. Creating a new network.")
	else:
		neuralNetwork = buildNetwork(3, 3, 3, 1, hiddenclass=TanhLayer, outclass=TanhLayer)
	pretrain = True if input("Pretrain with default set? (y/n) ").lower() == "y" else False
	if pretrain:
		preTrain()
Beispiel #17
0
def main():
    global data_dir
    app = QApplication([])
    p = argparse.ArgumentParser(description='PyBrain example')
    p.add_argument('-d',
                   '--data-dir',
                   default="./",
                   help="Path to dir, containing data")
    p.add_argument('-n',
                   '--hidden-count',
                   default="4",
                   help="Neuron on hidden layer")
    p.add_argument(
        '-e',
        '--epochs',
        default="1000",
        help="Number of epochs for teach, use 0 for learning until convergence"
    )
    args = p.parse_args()
    data_dir = os.path.abspath(args.data_dir)
    learn_path = data_dir + "/learn/"
    test_path = data_dir + "/test/"
    if not os.path.exists(learn_path):
        print("Error: Learn directory not exists!")
        sys.exit(1)
    if not os.path.exists(test_path):
        print("Error: Test directory not exists!")
        sys.exit(1)
    learn_data = loadData(learn_path)
    test_data = loadData(test_path)
    # net = init_brain(learn_data, int(args.epochs), TrainerClass=RPropMinusTrainer)
    try:
        net = NetworkReader.readFrom(data_dir + "/net.xml")
    except FileNotFoundError:
        print("Create net")
        net = init_brain(learn_data,
                         int(args.epochs),
                         int(args.hidden_count),
                         TrainerClass=BackpropTrainer)
    print("Now we get working network. Let's try to use it on learn_data.")
    print("Here comes a tests on learn-data!")
    test_brain(net, learn_data)
    print("Here comes a tests on test-data!")
    test_brain(net, test_data)
    return 0
Beispiel #18
0
 def xforecast(self):
     net = NetworkReader.readFrom('xtrainedinfo.xml')
     activate_in = []
     with open('xtraindata.csv') as tf:
         xforecast = []
         for line in tf:
             data = [x for x in line.strip().split(',') if x]
             for i in range(1, 10):
                 activate_in.append(float(data[i]))
             # print activate_in
             if float(net.activate((activate_in))) > 4.84e-06:
                 xforecast.append(2)
             elif float(net.activate((activate_in))) > 3.5e-06:
                 xforecast.append(1)
             else:
                 xforecast.append(0)
             activate_in = []
     return xforecast
Beispiel #19
0
def perceptron(hidden_neurons=20, weightdecay=0.01, momentum=0.1):
    INPUT_FEATURES = 200
    CLASSES = 15
    HIDDEN_NEURONS = hidden_neurons
    WEIGHTDECAY = weightdecay
    MOMENTUM = momentum
    
    g = generate_data()
    alldata = g['d']
    testdata = generate_Testdata(g['index'])['d']
    #tstdata, trndata = alldata.splitWithProportion(0.25)
    #print type(tstdata)

    trndata = _convert_supervised_to_classification(alldata,CLASSES)
    tstdata = _convert_supervised_to_classification(testdata,CLASSES)
    trndata._convertToOneOfMany()  
    tstdata._convertToOneOfMany()
    #fnn = buildNetwork(trndata.indim, HIDDEN_NEURONS, trndata.outdim,outclass=SoftmaxLayer)
    fnn = NetworkReader.readFrom('GCM(200+70.87).xml')
    trainer = BackpropTrainer(fnn, dataset=trndata, momentum=MOMENTUM,verbose=True, weightdecay=WEIGHTDECAY,learningrate=0.01)
    result = 0;
    ssss = 0;
    for i in range(1):
        #trainer.trainEpochs(1)
        trnresult = percentError(trainer.testOnClassData(),trndata['class'])
        tstresult = percentError(trainer.testOnClassData(dataset=tstdata), tstdata['class'])
        out = fnn.activateOnDataset(tstdata)
        ssss = out
        out = out.argmax(axis=1)
        result = out
    df = pd.DataFrame(ssss)
    df.to_excel("GCMout.xls")
    df = pd.DataFrame(result)
    df.insert(1,'1',tstdata['class'])
    df.to_excel("GCM.xls")
    error = 0;
    for i in range(len(tstdata['class'])):
        if tstdata['class'][i] != result[i]:
            error = error+1
    #print (len(tstdata['class'])-error)*1.0/len(tstdata['class'])*100
    print AAC(result,tstdata['class'])
    print AUC(np.transpose(tstdata['class'])[0],result.transpose())
    print Fscore(np.transpose(tstdata['class'])[0],result.transpose())
    NetworkWriter.writeToFile(fnn, 'GCM.xml')
def improve_network(trainer=default_trainer, transit=default_transit):
    """
    Author: Xander
    This function improves an existing neural net
    capable of detecting exoplanets in lightcurves.
    It writes the network to network.xml
    The input, output pairs should be of the 
    format generate() generates them in.
    A good rule-of-thumb for telling whether the network detects an exoplanet
    is to see if the output is above 0.5.
    """
    print "Retreiving network..."
    net = NetworkReader.readFrom("../network.xml")
    print "Retreiving current performance..."
    f = open("../network_info.txt")
    first_line = f.readlines()[0]
    best_fraction = float(first_line.split("%")[0])
    f.close()
    train_network(net, best_fraction, trainer=trainer, transit=transit)
Beispiel #21
0
    def __init__(self,df=0.9):
        self.inputSize = 80
        self.hiddenSize = 100
        self.outputSize = 1
        self.df = df

        if (os.path.isfile("nn/neural-network.xml")):
            ##print("Loading Network from file")
            self.net = NetworkReader.readFrom('nn/neural-network.xml')
            self.ds = SupervisedDataSet(self.inputSize, self.outputSize)
            self.loadDataSet()
            self.trainer = BackpropTrainer(self.net, self.ds)
        else:
            print("Building Network")
            self.net = buildNetwork(self.inputSize,self.hiddenSize,self.outputSize, bias=True)
            self.ds = SupervisedDataSet(self.inputSize, self.outputSize)
            self.loadDataSet()
            self.trainer = BackpropTrainer(self.net, self.ds)
            self.train()
            self.saveNet()
 def runFirstStageClassifier(self):
     out = []
     true = []
     BatIDToAdd = [1, 2, 3, 5, 6, 10, 11, 12, 14, 8, 9] #1-14 are bats; 8 is noise; 9 is something else
     print "Loading Network.."
     net = NetworkReader.readFrom("C:\Users\Anoch\PycharmProjects\BatClassification\FirstStageClassifier.xml")
     print "Loading feature data..."
     minFreq, maxFreq, Durantion, fl1, fl2, fl3, fl4, fl5, fl6, fl7, fl8, fl9, fl10, pixelAverage, target, path = self.getDistrubedTestDataRUNVERSIONFSC(BatIDToAdd)
     SAMPLE_SIZE = len(minFreq)
     for i in range(0, SAMPLE_SIZE):
         ClassifierOutput = net.activate([minFreq[i], maxFreq[i], Durantion[i], fl1[i], fl2[i], fl3[i], fl4[i], fl5[i], fl6[i], fl7[i], fl8[i], fl9[i], fl10[i]])
         ClassifierOutputID = np.argmax(ClassifierOutput)
         currentTarget = self.convertIDFSC(target[i])
         out.append(ClassifierOutputID)
         true.append(currentTarget)
         #MAPPING FROM BATID TO TSC value:
         FSC_value = ClassifierOutputID
         # Metadata Setup, get path and write: TSC = value
         ds = self.HDFFile[path[i]]
         ds.attrs["FSC"] = FSC_value
         ds.attrs["SSC"] = -1
         ds.attrs["TSC"] = -1
     # Close HDF5 file to save to disk. This is also done to make sure the next stage classifier can open the file
     self.HDFFile.flush()
Beispiel #23
0
import time
import theanets
import vision_definitions
from numpy.random.mtrand import randint
from numpy import argmax
from random import randint
from scipy.interpolate import interp1d

BallLiftJoint = np.loadtxt("../../20fpsFullBehaviorSampling/BallLift/JointData.txt").astype(np.float32)
BallRollJoint = np.loadtxt("../../20fpsFullBehaviorSampling/BallRoll/JointData.txt").astype(np.float32)
BellRingLJoint = np.loadtxt("../../20fpsFullBehaviorSampling/BellRingL/JointData.txt").astype(np.float32)
BellRingRJoint = np.loadtxt("../../20fpsFullBehaviorSampling/BellRingR/JointData.txt").astype(np.float32)
BallRollPlateJoint = np.loadtxt("../../20fpsFullBehaviorSampling/BallRollPlate/JointData.txt").astype(np.float32)
RopewayJoint = np.loadtxt("../../20fpsFullBehaviorSampling/Ropeway/JointData.txt").astype(np.float32)

LSTMClassificationNet = NetworkReader.readFrom("153sigmoid/TrainUntilConv.xml")
print "Loaded 20 LSTM Trained Network!"

twentylstmaccdata = []
twentylstmstddata = []
twentylstmstderror = []

predictedBLLabels = []
predictedBRLabels = []
predictedBRLLabels = []
predictedBRRLabels = []
predictedBRPLabels = []
predictedRWLabels = []

offset = 100
accuracyOverall = []
def rnn():
    # load dataframe from csv file
    df = pi.load_data_frame('../../data/NABIL.csv')
    # column name to match with indicator calculating modules
    # TODO: resolve issue with column name
    df.columns = [
        'Transactions',
        'Traded_Shares',
        'Traded_Amount',
        'High',
        'Low',
        'Close']
     
    data = df.Close.values
    # TODO: write min_max normalization
    # normalization
    # cp = dataframe.pop(' Close Price')
    # x = cp.values
    temp = np.array(data).reshape(len(data),1)
    min_max_scaler = preprocessing.MinMaxScaler()
    data = min_max_scaler.fit_transform(temp)
    # dataframe[' Close Price'] = x_scaled
     
    # prepate sequential dataset for pyBrain rnn network
    ds = SequentialDataSet(1, 1)
    for sample, next_sample in zip(data, cycle(data[1:])):
        ds.addSample(sample, next_sample)
     
    # build rnn network with LSTM layer
    # if saved network is available
    if(os.path.isfile('random.xml')):
        net = NetworkReader.readFrom('network.xml')
    else:
        net = buildNetwork(1, 20, 1, 
                           hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
     
    # build trainer
    trainer = RPropMinusTrainer(net, dataset=ds, verbose = True)
    train_errors = [] # save errors for plotting later
    EPOCHS_PER_CYCLE = 5
    CYCLES = 5
    EPOCHS = EPOCHS_PER_CYCLE * CYCLES
    for i in range(CYCLES):
        trainer.trainEpochs(EPOCHS_PER_CYCLE)
        train_errors.append(trainer.testOnData())
        epoch = (i+1) * EPOCHS_PER_CYCLE
        print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
        sys.stdout.flush()
    # save the network
    NetworkWriter.writeToFile(net,'network.xml')
        
    print()
    print("final error =", train_errors[-1])
     
    predicted = []
    for dat in data:
        predicted.append(net.activate(dat)[0])
    # data = min_max_scaler.inverse_transform(data)
    # predicted = min_max_scaler.inverse_transform(predicted)
    predicted_array = min_max_scaler.inverse_transform(np.array(predicted).reshape(-1,1))
    print(predicted_array[-1])
    plt.figure()
     
    legend_actual, = plt.plot(range(0, len(data)),temp, label = 'actual', linestyle = '--', linewidth = 2, c = 'blue')
    legend_predicted, = plt.plot(range(0, len(data)), predicted_array, label = 'predicted', linewidth = 1.5, c='red')
    plt.legend(handles=[legend_actual, legend_predicted])
    plt.savefig('error.png')
    plt.show()
#A checkers AI implementation based on TD Backgammon 

from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.datasets import SupervisedDataSet                                  
from pybrain.supervised.trainers import BackpropTrainer

# Constants
BLACK, WHITE = 0, 1
#neural net 
net = NetworkReader.readFrom('CheckersMini/synapsemon_random_black_mini_140.xml') 

def move_function(board):
    global net  
    best_max_move = None 
    max_value = -1000
    best_min_move = None
    min_value = 1000

    #value is the chance of black winning
    for m in board.get_moves():
        nextboard = board.peek_move(m)
        value = net.activate(board_to_input(nextboard))
        if value > max_value: 
            max_value = value
            best_max_move = m 
        if value < min_value:
            min_value = value
            best_min_move = m

    ds = SupervisedDataSet(97, 1)
#A checkers AI implementation based on TD Backgammon 

from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.datasets import SupervisedDataSet                                  
from pybrain.supervised.trainers import BackpropTrainer

# Constants
BLACK, WHITE = 0, 1
#neural net 
net = NetworkReader.readFrom('CheckersMini/synapsemon_arthur_white_mini_140.xml') 

def move_function(board):
    global net  
    best_max_move = None 
    max_value = -1000
    best_min_move = None
    min_value = 1000

    #value is the chance of black winning
    for m in board.get_moves():
        nextboard = board.peek_move(m)
        value = net.activate(board_to_input(nextboard))
        if value > max_value: 
            max_value = value
            best_max_move = m 
        if value < min_value:
            min_value = value
            best_min_move = m

    ds = SupervisedDataSet(97, 1)
type = int( sys.argv[1] ) # 1 = Uncert&Salience, 2 = Salience, 3 = Uncert, 4 = Activation
env = DistractorRatio() # Create an instance of the D-R task
# Create an action/value neural net with an state space of 100 and an action space of 8
if type == 1:
    learner = QLambda_LinFA( 8, 100 )
else:
    learner = QLambda_LinFA( 5, 52 )

learner.batchMode = True
learner.learningRate = 0.05 # Default is 0.5
agent = HumanAgent_LinearFA( learner, type ) # Create an agent that learns with QLambda_LinFA
experiment = CustomEpisodicExperiment( env, agent ) # Put the agent in the environment

if len( sys.argv ) == 4:
    print 'Loading saved net...'
    module.network = NetworkReader.readFrom( sys.argv[3] )

def save( history ):
    """
    This function gets called after each training/testing block or when the
    script gets closed. It saves the neural net and RL history of the agent so
    that it can be restored or reused in another model.
    """
    base = os.path.splitext( sys.argv[2] )[0]
    fileObject = open( base + '.history', 'w' )
    pickle.dump( history, fileObject )
    fileObject.close()

# This registers a function that will get called when the script closes.
atexit.register( save, agent.history )
# A checkers AI implementation based on TD Backgammon

from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.datasets import SupervisedDataSet
from pybrain.supervised.trainers import BackpropTrainer

# Constants
BLACK, WHITE = 0, 1
# neural net
net = NetworkReader.readFrom("SynapsemonPie/synapsemon_random_black.xml")


def move_function(board):
    global net
    best_max_move = None
    max_value = -1000
    best_min_move = None
    min_value = 1000

    # value is the chance of black winning
    for m in board.get_moves():
        nextboard = board.peek_move(m)
        value = net.activate(board_to_input(nextboard))
        if value > max_value:
            max_value = value
            best_max_move = m
        if value < min_value:
            min_value = value
            best_min_move = m
Beispiel #29
0
robotIP="192.168.0.108"
tts=ALProxy("ALTextToSpeech", robotIP, 9559)
motion = ALProxy("ALMotion", robotIP, 9559)
memory = ALProxy("ALMemory", robotIP, 9559)
posture = ALProxy("ALRobotPosture", robotIP, 9559)
camProxy = ALProxy("ALVideoDevice", robotIP, 9559)
resolution = 0    # kQQVGA
colorSpace = 11   # RGB

###########################################
# For long behavior with 8 classes - BLIT #
###########################################
# First set up the figure, the axis, and the plot element we want to animate
fig1 = plt.figure(2, figsize=(10, 5), dpi=90, facecolor='w', edgecolor='k')

LSTMClassificationNet = NetworkReader.readFrom("20LSTMCell(1)/TrainUntilConv.xml")
print 'Loaded Trained Network!'
RShoulderPitchTestData = memory.getData("Device/SubDeviceList/RShoulderPitch/Position/Sensor/Value")
RShoulderRollTestData = memory.getData("Device/SubDeviceList/RShoulderRoll/Position/Sensor/Value")
RElbowRollTestData = memory.getData("Device/SubDeviceList/RElbowRoll/Position/Sensor/Value")
RElbowYawTestData = memory.getData("Device/SubDeviceList/RElbowYaw/Position/Sensor/Value")
RWristYawTestData = memory.getData("Device/SubDeviceList/RWristYaw/Position/Sensor/Value")
          
LShoulderPitchTestData = memory.getData("Device/SubDeviceList/LShoulderPitch/Position/Sensor/Value")
LShoulderRollTestData = memory.getData("Device/SubDeviceList/LShoulderRoll/Position/Sensor/Value")
LElbowRollTestData = memory.getData("Device/SubDeviceList/LElbowRoll/Position/Sensor/Value")
LElbowYawTestData = memory.getData("Device/SubDeviceList/LElbowYaw/Position/Sensor/Value")
LWristYawTestData = memory.getData("Device/SubDeviceList/LWristYaw/Position/Sensor/Value")

       
LSTMNet_output = LSTMClassificationNet.activate([RShoulderPitchTestData, RShoulderRollTestData, RElbowRollTestData, RElbowYawTestData, RWristYawTestData, 
Beispiel #30
0
BallLiftJoint = np.loadtxt('../../20fpsFullBehaviorSampling/BallLift/JointData.txt').astype(np.float32)
BallRollJoint = np.loadtxt('../../20fpsFullBehaviorSampling/BallRoll/JointData.txt').astype(np.float32)
BellRingLJoint = np.loadtxt('../../20fpsFullBehaviorSampling/BellRingL/JointData.txt').astype(np.float32)
BellRingRJoint = np.loadtxt('../../20fpsFullBehaviorSampling/BellRingR/JointData.txt').astype(np.float32)
BallRollPlateJoint = np.loadtxt('../../20fpsFullBehaviorSampling/BallRollPlate/JointData.txt').astype(np.float32)
RopewayJoint = np.loadtxt('../../20fpsFullBehaviorSampling/Ropeway/JointData.txt').astype(np.float32)

jointRemap = interp1d([-2.2,2.2],[-1,1])
BallLiftJoint = jointRemap(BallLiftJoint)
BallRollJoint = jointRemap(BallRollJoint)
BellRingLJoint = jointRemap(BellRingLJoint)
BellRingRJoint = jointRemap(BellRingRJoint)
BallRollPlateJoint = jointRemap(BallRollPlateJoint)
RopewayJoint = jointRemap(RopewayJoint)

tdnnclassifier = NetworkReader.readFrom('25sigmoid/TrainUntilConv.xml')
print 'Loaded 25 sigmoid TDNN Trained Network!'

twentylstmaccdata = []
twentylstmstddata = []
twentylstmstderror = []

predictedBLLabels = []
predictedBRLabels = []
predictedBRLLabels = []
predictedBRRLabels = []
predictedBRPLabels = []
predictedRWLabels = []
print "1st Iteration, noiseless test data"
offset = 100
accuracyOverall = []
 def train(self, X, y):
     if self._generate:
         self._model = train_network(X, y)
         NetworkWriter.writeToFile(self._model, self._filename)
     else:
         self._model = NetworkReader.readFrom(self._filename)
Beispiel #32
0
flag1 = 0
semicount = 0
semiwincnt = 0
while (True):
    #sets a ClassificationDataSet with 16 inputs and 10 outputs
    ds = ClassificationDataSet(192,64,nb_classes=64)

    #create network with no hidden layers
    nn = FeedForwardNetwork()
    #checks to see if there is already a network created
    if os.path.isfile("othelloNetwork4.xml"):
		if flag1 == 0:
			flag1 = 1		
			print "Getting network from file..."

		nn =  NetworkReader.readFrom("othelloNetwork4.xml")
		
    else:
        print "No network present, building new one..."
        inLayer = LinearLayer(192)
        hiddenLayer1 = SigmoidLayer(50)
        hiddenLayer2 = SigmoidLayer(250)
        hiddenLayer3 = SigmoidLayer(150)
        hiddenLayer4 = SigmoidLayer(200)
        hiddenLayer5 = SigmoidLayer(100)
        outLayer = SoftmaxLayer(64)
        nn.addInputModule(inLayer)
        nn.addOutputModule(outLayer)
        nn.addModule(hiddenLayer1)
        nn.addModule(hiddenLayer2)
        nn.addModule(hiddenLayer3)