Exemplo n.º 1
0
    network.set_neurons_drop_out_probabilities(neurons_drop_out_probabilities)
    experiment_set_selected_weights(network)

    ####
    # if disable_2nd_output_neuron:
    #     second_output_neuron = network.layers[-1].neurons[1]
    #     second_output_neuron.activation_function = ConstantOutput()
    ####

    print "\n\nNet BEFORE Training\n", network

    data_collector = NetworkDataCollector(network, data_collection_interval)

    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set, error_criterion,
                                            max_epochs,
                                            data_collector)  # sop call
    # epoch_and_MSE = network.backpropagation(training_set, 0.0000001, max_epochs, data_collector)
    results.append(epoch_and_MSE[0])

    # save the network
    network.save_to_file("trained_configuration.pkl")
    # load a stored network
    # network = NeuralNet.load_from_file( "trained_configuration.pkl" )

    df_weights = post_process(seed_value, data_collector.extract_weights,
                              df_weights)
    df_netinputs = post_process(seed_value, data_collector.extract_netinputs,
                                df_netinputs)

    print "\n\nNet AFTER Training\n", network, "\n"
Exemplo n.º 2
0
dfs_concatenated = DataFrame([])

for seed_value in range(n_trials):
    print "seed = ", seed_value,
    random.seed(seed_value)
        
    # initialize the neural network
    network = NeuralNet(n_neurons_for_each_layer, neurons_ios, weight_init_functions, learning_rate_functions)
    experiment_set_selected_weights(network)
    print "\n\nNet BEFORE Training\n", network
    
    data_collection_interval = 1000
    data_collector = NetworkDataCollector(network, data_collection_interval)
    
    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set, error_criterion, max_epochs, data_collector)  # sop call
    # epoch_and_MSE = network.backpropagation(training_set, 0.0000001, max_epochs, data_collector)
    results.append(epoch_and_MSE[0])

    #print "\n\nNet After Training\n", network

    # save the network
    network.save_to_file( "trained_configuration.pkl" )
    # load a stored network
    # network = NeuralNet.load_from_file( "trained_configuration.pkl" )
   
    dfs_concatenated = intermediate_post_process_weights(seed_value, data_collector, dfs_concatenated)

    # print out the result
    for example_number, example in enumerate(training_set):
        inputs_for_training_example = example.features
Exemplo n.º 3
0
    print "seed = ", seed_value,
    random.seed(seed_value)

    # initialize the neural network
    network = NeuralNet(n_neurons_for_each_layer, neurons_ios,
                        weight_init_functions, learning_rate_functions)

    print "\n\nNetwork State just after creation\n", network

    experimental_weight_setting_function(network)

    data_collection_interval = 1000
    data_collector = NetworkDataCollector(network, data_collection_interval)

    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set, 0.01, 3000,
                                            data_collector)
    results.append(epoch_and_MSE[0])

    # save the network
    network.save_to_file("trained_configuration.pkl")
    # load a stored network
    # network = NeuralNet.load_from_file( "trained_configuration.pkl" )

    print "\n\nNetwork State after backpropagation\n", network, "\n"

    # print out the result
    # print out the result
    for example_number, example in enumerate(training_set):
        inputs_for_training_example = example.features
        network.inputs_for_training_example = inputs_for_training_example
        output_from_network = network.calc_networks_output()
results = []

for seed_value in range(10):
    print "seed = ", seed_value,
    random.seed(seed_value)
        
    # initialize the neural network
    network = NeuralNet(n_neurons_for_each_layer, neurons_ios, weight_init_functions, learning_rate_functions)

    #print "\n\nNetwork State just after creation\n", network
    
    data_collection_interval = 1000
    data_collector = NetworkDataCollector(network, data_collection_interval)
    
    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set, 0.01, 3000, data_collector)
    results.append(epoch_and_MSE[0])

    # save the network
    network.save_to_file( "trained_configuration.pkl" )
    # load a stored network
    # network = NeuralNet.load_from_file( "trained_configuration.pkl" )
   
    print "\n\nNetwork State after backpropagation\n", network, "\n"

    # print out the result
    for example_number, example in enumerate(training_set):
        inputs_for_training_example = example.features
        network.inputs_for_training_example = inputs_for_training_example
        output_from_network = network.calc_networks_output()
        print "\tnetworks input:", example.features, "\tnetworks output:", output_from_network, "\ttarget:", example.targets
results = []
dfs_concatenated = DataFrame([])

for seed_value in range(10):
    print "seed = ", seed_value,
    random.seed(seed_value)
        
    # initialize the neural network
    network = NeuralNet(n_neurons_for_each_layer, neurons_ios, weight_init_functions, learning_rate_functions)

    print "\n\nNetwork State just after creation\n", network

    data_collector = NetworkDataCollector(network, data_collection_interval=1000)
    
    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set, error_limit=0.0000001, max_epochs=6000, data_collector=data_collector)

    results.append(epoch_and_MSE[0])

    dfs_concatenated = intermediate_post_process(seed_value, data_collector, dfs_concatenated)

    # print out the result
    for example_number, example in enumerate(training_set):
        inputs_for_training_example = example.features
        network.inputs_for_training_example = inputs_for_training_example
        output_from_network = network.calc_networks_output()
        print "\tnetworks input:", example.features, "\tnetworks output:", output_from_network, "\ttarget:", example.targets


print results
print
Exemplo n.º 6
0
for seed_value in range(10):
    print "seed = ", seed_value,
    random.seed(seed_value)

    # initialize the neural network
    network = NeuralNet(n_neurons_for_each_layer, neurons_ios,
                        weight_init_functions, learning_rate_functions)

    print "\n\nNetwork State just after creation\n", network

    data_collector = NetworkDataCollector(network,
                                          data_collection_interval=1000)

    # start training on test set one
    epoch_and_MSE = network.backpropagation(training_set,
                                            error_limit=0.0000001,
                                            max_epochs=6000,
                                            data_collector=data_collector)

    results.append(epoch_and_MSE[0])

    dfs_concatenated = intermediate_post_process(seed_value, data_collector,
                                                 dfs_concatenated)

    # print out the result
    for example_number, example in enumerate(training_set):
        inputs_for_training_example = example.features
        network.inputs_for_training_example = inputs_for_training_example
        output_from_network = network.calc_networks_output()
        print "\tnetworks input:", example.features, "\tnetworks output:", output_from_network, "\ttarget:", example.targets

print results