Example #1
0
    def train_nn(self):

        connection_rate = 1
        iterations_between_reports = 100000

        ann = libfann.neural_net()
        if self.num_hidden2 == 0:
            ann.create_standard_array(
                (self.num_input, self.num_hidden1, self.num_output))
        elif self.num_hidden2 > 0:
            ann.create_standard_array((self.num_input, self.num_hidden1,
                                       self.num_hidden2, self.num_output))
        ann.set_learning_rate(self.learning_rate)

        #        ann.set_activation_function_hidden(libfann.SIGMOID)
        #        ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC)
        ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
        #        ann.set_activation_function_output(libfann.LINEAR)
        #        ann.set_activation_function_output(libfann.ELLIOT_SYMMETRIC)
        ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)

        ann.train_on_file(self.train_file, self.max_iterations,
                          iterations_between_reports, self.desired_error)

        ann.save(self.net_file)
Example #2
0
def sumscore_classifier_ann_single(X, Y):
    #approxFun1 =  (np.vectorize(class1))
    #approxFun2 =  (np.vectorize(class2))
    ann = libfann.neural_net()
    ann.create_from_file("project_data/ann"+str(max(Y))+".net")
    nr_features = max(X.shape)
    mat1 = np.array([])
    for i in range(nr_features):
        out_vec = ann.run(X[i,:])
        out_vec_rounded = map(int, np.round(out_vec))
        out_vec_rounded = np.array(out_vec_rounded)
        cl = np.where(out_vec_rounded==1)[0]
        if not len(cl)==1:
            print 'error in sumscore: more than one or no output unit was 1; there were ',len(cl),' 1-output units.'
            cl = np.where(out_vec==np.max(out_vec))[0][0]
        mat1 = np.append(mat1, cl)
    return sumscore_single(Y[:, 0], mat1)
Example #3
0
def train_nn(data_file,net_file):

    connection_rate = 1
    learning_rate = 0.7
    num_input = 2
    num_hidden = 4
    num_output = 1

    desired_error = 0.0001
    max_iterations = 10000
    iterations_between_reports = 10000
    ann = libfann.neural_net()
    ann.create_standard_array((7,10,10,10,10,10,10,10,1))
    ann.set_learning_rate(0.7)
    ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)

    ann.train_on_file(data_file, max_iterations, iterations_between_reports, desired_error)

    ann.save(net_file)
Example #4
0
def predict_and_print_ann(name, X, nnfilename):
    ann = libfann.neural_net()
    ann.create_from_file(nnfilename)

    Ypred = [0];
    nrFeatures = max(X.shape)
    for i in range(nrFeatures):
        out_vec = ann.run(X[i,:])
        out_vec_rounded = map(int, np.round(out_vec))
        out_vec_rounded = np.array(out_vec_rounded)
        cl = np.where(out_vec_rounded==1)[0]
        if not len(cl)==1:
            print 'error in predict&print: more than one or no output unit was 1; there were ',len(cl),' 1-output units.'
            cl = np.where(out_vec==np.max(out_vec))[0][0]
        Ypred = np.append(Ypred, [cl], axis=0)

    Ypred=Ypred[1:]
    #X = X.tolist()
    #Ypred =[class1(X), class2(X)]
    np.savetxt('project_data/' + name + '.txt', Ypred.T, fmt='%i', delimiter=',')
	def trainNetwork(dataFilename, netFilename, numInput, numOutput):
		ann = libfann.neural_net()
		#ann.create_sparse_array(NNWrapper.connection_rate, (numInput, 6, 4, numOutput)) #TODO: is this what we want? # the one that works in 40 seconds 4, 10, 6, 1.  the one that trained in 30 secs was 6,6
		ann.create_sparse_array(NNWrapper.connection_rate, (numInput, 200, 80, 40, 20, 10, numOutput))
		ann.set_learning_rate(NNWrapper.learning_rate)
		ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
		ann.set_bit_fail_limit(.2)
		#ann.randomize_weights(0,0)

		t0 = time.clock()
		ann.train_on_file(dataFilename, NNWrapper.max_iterations, NNWrapper.iterations_between_reports, NNWrapper.desired_error)
		t1 = time.clock()
		seconds = t1-t0

		m, s = divmod(seconds, 60)
		h, m = divmod(m, 60)
		print "Time to train:"
		print "%d:%02d:%02d" % (h, m, s)

		ann.save(netFilename)
Example #6
0
 def train_nn(self, data_file,net_file):
 
     connection_rate = 1
     learning_rate = 0.7
     num_input = 2
     num_hidden = 4
     num_output = 1
 
     desired_error = 0.0001
     max_iterations = 100000
     iterations_between_reports = 100000
     ann = libfann.neural_net()
     ann.create_standard_array((3,25,6,1))
     ann.set_learning_rate(0.3)
     ann.set_activation_function_hidden(libfann.SIGMOID)
     ann.set_activation_function_output(libfann.LINEAR)
 
     ann.train_on_file(data_file, max_iterations, iterations_between_reports, desired_error)
 
     ann.save(net_file)
Example #7
0
    def train_nn(self, data_file, net_file):

        connection_rate = 1
        learning_rate = 0.7
        num_input = 2
        num_hidden = 4
        num_output = 1

        desired_error = 0.0001
        max_iterations = 100000
        iterations_between_reports = 100000
        ann = libfann.neural_net()
        ann.create_standard_array((3, 25, 6, 1))
        ann.set_learning_rate(0.3)
        ann.set_activation_function_hidden(libfann.SIGMOID)
        ann.set_activation_function_output(libfann.LINEAR)

        ann.train_on_file(data_file, max_iterations,
                          iterations_between_reports, desired_error)

        ann.save(net_file)
	def testNet(testSet, netFilename, labelHandler):
		if NNWrapper.numThatActuallyHaveLabel == None:
			NNWrapper.numThatActuallyHaveLabel = [0]*len(labelHandler.labelIdsToLabels)
			NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled = [0]*len(labelHandler.labelIdsToLabels)

		testingSummaryFile = open(NNWrapper.testingSummaryFilename, "a")

		ann = libfann.neural_net()
		ann.create_from_file(netFilename)
		#ann.print_connections()

		numTested = 0
		numLabeledCorrectly = 0
		for pair in testSet:
			featureVec = pair[0]
			actualLabelId = pair[1].index(1)
			actualLabel = labelHandler.labelIdsToLabels[actualLabelId]

			result = ann.run(featureVec)
			#print result, actualLabel
			numTested += 1
			winningIndex = result.index(max(result))
			NNWrapper.numThatActuallyHaveLabel[actualLabelId] += 1
			#testingSummaryFile.write(labelHandler.labelIdsToLabels[winningIndex]+","+actualLabel+"\n")
                        testingSummaryFile.write(actualLabel + ";" +  str(actualLabelId) + ";" + str(result) + "\n")
			if winningIndex == actualLabelId:
				numLabeledCorrectly += 1
				NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled[actualLabelId] += 1

		print "numTested", numTested
		print "numLabeledCorrectly", numLabeledCorrectly
		NNWrapper.totalTested += numTested
		NNWrapper.totalCorrect += numLabeledCorrectly
		print "totalTested", NNWrapper.totalTested
		print "totalCorrect", NNWrapper.totalCorrect
		print "percentageCorrect", float(NNWrapper.totalCorrect)/NNWrapper.totalTested
		print "*****************"
		for i in range(len(NNWrapper.numThatActuallyHaveLabel)):
			print labelHandler.labelIdsToLabels[i], NNWrapper.numThatActuallyHaveLabel[i], NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled[i], float(NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled[i])/NNWrapper.numThatActuallyHaveLabel[i]
		testingSummaryFile.close()
Example #9
0
    def train_nn(self):

        connection_rate = 1
        iterations_between_reports = 100000

        ann = libfann.neural_net()
        if self.num_hidden2 == 0:
            ann.create_standard_array((self.num_input,self.num_hidden1,self.num_output))
        elif self.num_hidden2 > 0:
            ann.create_standard_array((self.num_input,self.num_hidden1,self.num_hidden2,self.num_output))
        ann.set_learning_rate(self.learning_rate)

#        ann.set_activation_function_hidden(libfann.SIGMOID)
#        ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC)
        ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)
#        ann.set_activation_function_output(libfann.LINEAR)
#        ann.set_activation_function_output(libfann.ELLIOT_SYMMETRIC)
        ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)

        ann.train_on_file(self.train_file, self.max_iterations, iterations_between_reports, self.desired_error)

        ann.save(self.net_file)
Example #10
0
def makeANN(training, hidden, error):
	with open(FILENAME + ".data", "w") as fout:
		numInOutPairs = str(len(training))
		numIns = str(len(training[0][:-1]))
		numOuts = str(len(training[0][0]))

		header = " ".join((numInOutPairs, numIns, numOuts))

		fout.write(header + "\n")
		for line in training:
			secLine = line.pop()

			line = " ".join(line)
			secLine = " ".join(secLine)

			fout.write(line + "\n" + secLine + "\n")

#==================================================================================

	connection_rate = 1
	num_input = 4
	num_hidden = hidden
	num_output = 3

	desired_error = error
	max_iterations = 100000
	iterations_between_reports = 1000

	print "\n\nMaking ANN\n\n"
	ann = libfann.neural_net()

	ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output))
	ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
	ann.set_activation_function_hidden(libfann.SIGMOID_SYMMETRIC_STEPWISE)

	ann.train_on_file(FILENAME + ".data", max_iterations, iterations_between_reports, desired_error)

	ann.save(FILENAME + ".net")
	def trainNetwork(dataFilename, netFilename, layerSizes, max_iterations, desired_error):
		# layerSizes should look something like this: (numInput, 200, 80, 40, 20, 10, numOutput)
		ann = libfann.neural_net()
		#ann.create_sparse_array(NNWrapper.connection_rate, (numInput, 6, 4, numOutput)) #TODO: is this what we want? # the one that works in 40 seconds 4, 10, 6, 1.  the one that trained in 30 secs was 6,6
		ann.create_standard_array(layerSizes)
		ann.set_learning_rate(NNWrapper.learning_rate) # rprop doesn't use learning rate
		ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)
                #ann.set_training_algorithm(libfann.TRAIN_RPROP)
		ann.set_training_algorithm(libfann.TRAIN_QUICKPROP)
                ann.set_bit_fail_limit(.3)
		#ann.randomize_weights(0,0)

		t0 = time.time()
		ann.train_on_file(dataFilename, max_iterations, NNWrapper.iterations_between_reports, desired_error)
		t1 = time.time()
		seconds = t1-t0
		print "Seconds: ", seconds

		m, s = divmod(seconds, 60)
		h, m = divmod(m, 60)
		print "Time to train:"
		print "%d:%02d:%02d" % (h, m, s)

		ann.save(netFilename)
Example #12
0
def create_and_train_nn(XTrain, YTrain, datafile, nr_its):
    data_size = max(XTrain.shape)
    nr_output = len(np.unique(YTrain))
    connection_rate = 1
    steepness_out = 1.0
    #nr_hidden1 = 500
    #nr_hidden2 = 75
    nr_hidden = 100
    desired_error = 0.01
    max_iterations = 3*nr_its
    its_per_round = nr_its
    ann = libfann.neural_net()
    ann.create_sparse_array(connection_rate, (min(XTrain.shape), nr_hidden, nr_output)) #create...(rate, (in, hidden1, hidden2, out))
    ann.set_training_algorithm(libfann.TRAIN_QUICKPROP)
    ann.set_activation_function_output(libfann.SIGMOID)  #output units are between 1 or 0; for execution, use libfann.THRESHOLD
    ann.set_activation_steepness_hidden(steepness_out)


    nndat_all = libfann.training_data()
    nndat_all.read_train_from_file(datafile)
    if split_training_data:
        nndat_all.shuffle_train_data()
        nndat_train = libfann.training_data(nndat_all)
        nndat_test = libfann.training_data(nndat_all)
        nndat_train.subset_train_data(0, int(np.ceil(split_percentage_train*data_size)))
        nndat_test.subset_train_data(int(np.ceil(split_percentage_train*data_size)), min(int(np.floor(split_percentage_test*data_size)), int(np.floor((1-split_percentage_train)*data_size))))
    else:
        nndat_train = nndat_all
    #ann.set_input_scaling_params(nndat_train, -1., 1.)
    #ann.set_input_scaling_params(nndat_test, -1., 1.)
    testE = 5; # mean square error
    counter = 0; max_counter = 2;   #if more than max_counter times the error on testing data increased, stop
    overfit = False
    i = 0 #nr of runs of network updates

    ## statistics on ann.train_on_data:
    #
    # ~2000 input units, 10 output units, output function: sigmoid
    #   - 1 hidden layer of 20 units, 100 epochs:
    #       TRAIN_QUICKPROP:        1 min   , error: ~0.06
    #       TRAIN_RPROP:            < 5 sec , error: ~0.09
    #       TRAIN_RPROP, 2nd run:   1 min , error:   ~0.08
    #   - 1 hidden layer of 20 units, 1000 epochs:
    #       TRAIN_RPROP:        stopped it after 3 or 4 mins



    while i < max_iterations & ~overfit:
        ann.train_on_data(nndat_train, its_per_round, 0, desired_error)
        trainE_new = ann.test_data(nndat_train)
        if split_training_data:
            testE_new = ann.test_data(nndat_test)
            if testE_new > testE:
                counter = counter+1
                if counter > max_counter:
                    overfit = True
            trainE = trainE_new
            testE = testE_new
        i = i + its_per_round
        if i % 10 == 0:
            print 'nr of epochs so far: %d; error on training set: %f; on test set: %f' % (i, trainE, testE)
    print 'network has been trained in %d epochs; error on training set: %f; on test set: %f' % (i, trainE, testE)
    print 'overfitting: '+str(overfit)+" (test data exists: "+str(split_training_data)+")"
    ann.save("project_data/ann"+str(max(YTrain))+".net")

    return "project_data/ann"+str(max(YTrain))+".net"
Example #13
0
    def test_each(self, net_file, test_data):

        ann = libfann.neural_net()
        ann.create_from_file(net_file)

        return ann.run(test_data)[0]
Example #14
0
def test_each(net_file, price, ma5, ma10, vol5, vol10, macd, vol):

    ann = libfann.neural_net()
    ann.create_from_file(net_file)

    return ann.run([price, ma5, ma10, vol5, vol10, macd, vol])[0]
import libfann

connection_rate = 1
learning_rate = 0.7
num_input = 2
num_hidden = 4
num_output = 1

desired_error = 0.0001
max_iterations = 100000
iterations_between_reports = 1000

ann = libfann.neural_net()
ann.create_sparse_array(connection_rate, (num_input, num_hidden, num_output))
ann.set_learning_rate(learning_rate)
ann.set_activation_function_output(libfann.SIGMOID_SYMMETRIC_STEPWISE)

ann.train_on_file('E:/FANN_workspace/xor.txt', max_iterations, iterations_between_reports, desired_error)

ann.save("E:/xor_py.net")
Example #16
0
def runANN(L):
	ann = libfann.neural_net()

	ann.create_from_file(FILENAME + ".net")

	return ann.run(L)
Example #17
0
def test_each(net_file,price,ma5,ma10,vol5,vol10,macd,vol):

    ann = libfann.neural_net()
    ann.create_from_file(net_file)

    return ann.run([price,ma5,ma10,vol5,vol10,macd,vol])[0]
	def testNet(datasetRaw, netFilename, labelHandler):
		if NNWrapper.numThatActuallyHaveLabel == None:
			NNWrapper.numThatActuallyHaveLabel = {}
			NNWrapper.numThatActuallyHaveLabelCorrectlyLabeled = {}

		try:
			os.remove(testingSummaryFilename)
		except:
			print "already no such file"

		testingSummaryFile = open(NNWrapper.testingSummaryFilename, "a")

		ann = libfann.neural_net()
		ann.create_from_file(netFilename)
		#ann.print_connections()

		labelLen = labelHandler.numLabels

		stats = {}
		documents = relationToDocuments(datasetRaw, labelHandler)
		for document in documents:
                        print document
			boxes = documents[document]
			boxPairs = itertools.permutations(boxes, 2)

			for pair in boxPairs:

				featureVec = pair[0][0]+pair[1][0]

                                actualLabelsBox1 = labelHandler.getLabelsForXInNRep(pair[0][1])
                                actualLabelsBox2 = labelHandler.getLabelsForXInNRep(pair[1][1])

				result = ann.run(featureVec)

				testingSummaryFile.write(str(pair[0][1])+"\t;"+str(pair[1][1])+"\t;"+str(result))

				guessedLabelsBox1 = labelHandler.labelsFromNetAnswer(result[:labelLen])
				guessedLabelsBox2 = labelHandler.labelsFromNetAnswer(result[labelLen:])

				for actualLabelBox1 in actualLabelsBox1:
					box1Stats = stats.get(actualLabelBox1, {"left": {}, "right": {}})
					for guessedLabelBox1 in guessedLabelsBox1:
						box1Stats["left"][guessedLabelBox1] = box1Stats["left"].get(guessedLabelBox1, 0) + 1
					stats[actualLabelBox1] = box1Stats

				for actualLabelBox2 in actualLabelsBox2:
					box2Stats = stats.get(actualLabelBox2, {"left": {}, "right": {}})
					for guessedLabelBox2 in guessedLabelsBox2:
						box2Stats["right"][guessedLabelBox2] = box2Stats["right"].get(guessedLabelBox2, 0) + 1
					stats[actualLabelBox2] = box2Stats

		for key in stats:
			print key, "left"
			print "*******************"
			for label in labelHandler.labelIdsToLabels:
				count = stats[key]["left"].get(label, 0)
				print label, "\t\t\t", count
			print key, "right"
			print "*******************"
			for label in labelHandler.labelIdsToLabels:
				count = stats[key]["right"].get(label, 0)
				print label, "\t\t\t", count
Example #19
0
 def test_each(self, net_file,test_data):
 
     ann = libfann.neural_net()
     ann.create_from_file(net_file)
 
     return ann.run(test_data)[0]