def get_density_with_ann(self):
     obj = libfann.fann_create_standard_array(2, (3, 1))
     ann = fann.fann_class(obj)
     patterns = fann.read_train_from_file("train_data.txt")
     ann.train_on_data(patterns, 1000, 1, 0.0)
     return [
         line[3]
         for line in map(lambda x: [x.density, x.bytes, len(x.text), x.text], self.lines)
         if ann.run(line[0:2])[0] > 0
     ]
Exemple #2
0
def fann_test(trainingText):
    """ The function to using fast artifitial neural network."""
    # using "fann" package in machine learning
    from pyfann import fann, libfann
    # This creates a new single-layer perceptron with 1 output and 3 inputs.
    obj = libfann.fann_create_standard_array(2, (3, 1))
    ann = fann.fann_class(obj)
    # Load the data we described above.
    patterns = fann.read_train_from_file('training.txt')
    ann.train_on_data(patterns, 1000, 1, 0.0)
    # Then test it with different data.
    for datin, datout in validation_data:
	result = ann.run(datin)
	print 'Got:', result, ' Expected:', datout
Exemple #3
0
def main():
    if (len(sys.argv) != 3):
        print "bad number of argument. got: " + str(len(sys.argv)) + " expected: 3"
        print "usage: extract_text.py <newswatch-db-file> <training_file>"
        sys.exit(1)

    page = load_page(load_html(sys.argv[1]))
    manual_training(page, sys.argv[2])
    
# This creates a new single-layer perceptron with 1 output and 3 inputs.
    obj = libfann.fann_create_standard_array(2, (3, 1))
    ann = fann.fann_class(obj)

# Load the data we described above.
    if (os.path.exists(sys.argv[2]) == True):
        patterns = fann.read_train_from_file(sys.argv[2])
    ann.train_on_data(patterns, 1000, 1, 0.0)
 
# Then test it with different data.
    for index, line in enumerate(page.lines):
        if (index == 0):
            input = [0.0, 0.0, 0.0]
        else:
            input = [page.lines[index - 1].density, 
                     page.lines[index - 1].bytes,
                     len(page.lines[index - 1].text)]
        input = [line.density. line.bytes, len(line.text)]
        if (index + 1 >= len(page.lines) - 1):
            input = [0.0, 0.0, 0.0]
        else:
            input = [page.lines[index + 1].density, 
                     page.lines[index + 1].bytes,
                     len(page.lines[index + 1].text)]

        result = ann.run(input)
        print line.text + "\n" + 'Got:', result
def print_callback(epochs, error):
	print "Epochs     %8d. Current MSE-Error: %.10f\n" % (epochs, error)
	return 0

# initialize network parameters
connection_rate = 1
learning_rate = 0.7
num_neurons_hidden = 32
desired_error = 0.000001
max_iterations = 300
iterations_between_reports = 1

# create training data, and ann object
print "Creating network."	
train_data = fann.read_train_from_file(os.path.join("..","..","benchmarks","datasets","mushroom.train"))
ann = fann.create(connection_rate, (train_data.get_num_input(), num_neurons_hidden, train_data.get_num_output()))
ann.set_learning_rate(learning_rate)


# start training the network
print "Training network"
ann.set_activation_function_hidden(fann.SIGMOID_SYMMETRIC_STEPWISE)
ann.set_activation_function_output(fann.SIGMOID_STEPWISE)
ann.set_training_algorithm(fann.TRAIN_INCREMENTAL)
	
ann.train_on_data(train_data, max_iterations, iterations_between_reports, desired_error)
	
# test outcome
print "Testing network"
test_data = fann.read_train_from_file(os.path.join("..","..","benchmarks","datasets","mushroom.test"))