Exemple #1
0
def differentiate(point):
    # This will be the output of the function
    partials = []

    # The -1 index of 'point' is the label
    label = point[-1]

    # Prediction from neural network
    prediction = neural_network(point, network_weights, bias)

    # Create the 'weight symbols', which will be used
    # to represent the variables in the cost equation
    weight_symbols = []
    # Represents the bias term
    b = sym.symbols('b')
    for i in range(len(network_weights)):
        symbol = 'w' + str(i)
        symbol = sym.symbols(symbol)
        weight_symbols.append(symbol)

    # Adds all the network weights and bias to the prediction,
    # which then goes into the sigmoid function
    pred = 0
    for i in range(len(network_weights)):
        pred += weight_symbols[i] * network_weights[i]
    pred += b

    # Take sigmoid of the prediction
    sig = 1 / (1 + sym.exp(-pred))
    #print(sig)

    # Final cost function
    f_cost = (sig - label)**2
    #print(len(network_weights))
    # Take partial derivative of final cost function
    # with respect to all the weights
    for i in range(len(network_weights) + 1):
        # The -1 index of the output will be the
        # partial with respect to the bias term
        if (i == len(network_weights)):
            alg_dcost_dweight = sym.diff(f_cost, b)
        # Algebraic expression for derivative
        else:
            alg_dcost_dweight = sym.diff(f_cost, weight_symbols[i])
        alg_dcost_dweight = alg_dcost_dweight.subs(b, bias)

        # Simplified number for derivative
        simp_dcost_dweight = alg_dcost_dweight.subs({
            weight_symbols[j]: network_weights[j]
            for j in range(len(network_weights))
        })

        answer = simp_dcost_dweight.evalf()
        partials.append(answer)
    # Take partial derivative of final cost function
    # with respect to the bias term
    #print(partials)
    #for i in range(len(network_weights)-1):
    return partials
Exemple #2
0
############### Name: Shubham Pareek ############
############### UBID: spareek        ############

from logistic_regression import *
from linear_regression import *
from neural_network import *
from preprocessing import *

X1, y1 = get_feature_matrix(data='hod', method='concatenate')
X2, y2 = get_feature_matrix(data='hod', method='subtract')
X3, y3 = get_feature_matrix(data='gsc', method='concatenate')
X4, y4 = get_feature_matrix(data='gsc', method='subtract')

logistic_regression(X1, y1)
logistic_regression(X2, y2)
logistic_regression(X3, y3)
logistic_regression(X4, y4)

linear_regression(X1, y1)
linear_regression(X2, y2)
linear_regression(X3, y3)
linear_regression(X4, y4)

neural_network(X1, y1)
neural_network(X2, y2)
neural_network(X3, y3)
neural_network(X4, y4)
Exemple #3
0
    window.pushButton_3.clicked.connect(window.run_pso)
    window.pushButton_5.clicked.connect(window.auto_fill)
    window.pushButton_7.clicked.connect(window.shut_down_plot)
    window.pushButton_8.clicked.connect(window.go_on_nn)
    window.pushButton_10.clicked.connect(window.retrieve)
    window.pushButton_11.clicked.connect(window.suspend_pso)
    window.pushButton_12.clicked.connect(window.go_on_pso)
    window.pushButton_9.clicked.connect(window.reset_nn)
    window.pushButton_13.clicked.connect(window.suspend_nn)
    window.pushButton_13.clicked.connect(window.pause_plot)

    t = time_e()  # 相当于初始化__init__函数
    t.update_time.connect(window.handle_display_time)
    t.start()  # 相当于执行run()函数

    nn = neural_network()
    nn.update_msg.connect(window.handle_display)
    nn.update_info.connect(window.handle_display)
    nn.update_acc.connect(window.handle_display_acc)
    nn.update_top_acc.connect(window.handle_display_top_acc)
    nn.stop_time.connect(t.handle_stop_signal)
    nn.update_plot_acc.connect(window.handle_display_plot)
    nn.start()  # 单独在这里运行线程,不能放在主进程里,否则会被视为主进程的一部分(?)

    cbr_system = CBR()
    cbr_system.transmit_data.connect(window.show_table)
    cbr_system.best_case.connect(window.set_init_global_best)
    cbr_system.transmit_span.connect(window.set_span)
    cbr_system.best_distance.connect(window.set_best_distance)
    cbr_system.start()
#ACCURACY :
#Rule : must be a float, the minimum accuracy at which the algorithm will stop iteratig (represent a minimum fitness,
#that has to be reached by the best individual).
ACCURACY = 0.01  #default : 0.01

if len(sys.argv) != 3:
    print(
        'Please notice that this program need 2 arguments, the first being the path to the csv file containing the '
        'samples. The second being the number of output of these sample.')
    exit()

samples = read_data_obj(filename=sys.argv[1],
                        number_of_output=int(sys.argv[2]))
reseau = neural_network(training_data=samples,
                        function_by_layer=FUNCTION_TO_USE_BY_LAYER,
                        layer_structure=LAYER_STRUCTURE,
                        max_generations=MAX_GENERATIONS,
                        number_of_individuals=NUMBER_OF_INDIVIDUALS,
                        number_of_children=NUMBER_OF_CHILDREN)
print(reseau)

#RESULTS OUTPUT :
results = reseau.train()
print('Success :', results[1])
print('Nb of generation :', results[2])
print('Best individual :\n', results[0])
weight_output = []
for a in reseau.produce_output():
    weight_output.append(a)
print('Output for this weight :', weight_output)
print('Ideal output :', reseau.goal_output)
from initialize_data import *
from initialize_weights import *
from training_loop import *
from sigmoid import *
from neural_network import *
from partial_derivatives import *

# Import dependencies
import sympy as sym
import time
start_time = time.time()

# Train neural network
train(45)

# Test neural network
accuracy = 0
for j in range(45, 90):
    # Creates an array with all the features of the given
    # data point. The -1 index of 'point' is the label
    point = []
    for k in range(number_of_features):
        point.append(testing_labels[k][j])
    pred = neural_network(point, network_weights, bias)
    print(point)
    print("Result for " + str(j) + " is: " + str(pred) + "\n")
    if round(pred) == point[-1]:
        accuracy += 1
print("The accuracy of the neural network is: " + str(accuracy / 45 * 100) +
      "%")
print("--- %s seconds ---" % (time.time() - start_time))
from evolutive_algorithm_class import *
from neural_network import *
from functions import *
from sample_production import *
import graphical

samples = read_data_obj(
    filename=
    r'C:\Users\Crowbar\PycharmProjects\BIA\trainNNwithEvolutiveAlgo\samples.txt',
    number_of_output=1)
reseau = neural_network(training_data=samples,
                        function_by_layer=[function_sigmoid, function_sigmoid],
                        layer_structure=[2, 1],
                        max_generations=2000,
                        range_w=[-10, 10])
print(reseau)

results = reseau.train()
print('Success :', results[1])
print('Nb of generation :', results[2])
print('Best individual :\n', results[0])
weight_output = []
for a in reseau.produce_output():
    weight_output.append(a)
print('Output for this weight :', weight_output)
print('Ideal output :', reseau.goal_output)

graphical.display_evolution(results[3],
                            'Evolution of the global MSE for each generation')
graphical.display_evolution(results[4],
                            'Evolution of the best FF for each generation')