Ejemplo n.º 1
0
def test_whether_HDNN_learns_for_very_small_dataset():
    node_list = [70, 11, 11, 1]  #contains the layer sizes
    activations = ['sigmoid', 'sigmoid', 'linear']
    nn_Ti_test5 = NeuralNetwork(node_list, activations)
    nn_O_test5 = NeuralNetwork(node_list, activations)
    print('Ti --', nn_Ti_test5, '\n', 'O --', nn_O_test5)

    file_name_list = [
        'structure0005.txt', 'structure0004.txt', 'structure0003.txt',
        'structure0002.txt', 'structure0001.txt'
    ]  #,'structure1249.txt']
    X_list = [
        np.loadtxt(os.path.join('./symmetry_txt', '%s') % file_name)
        for file_name in file_name_list
    ]
    A_list = [x.reshape(len(x), 1, 70) for x in X_list]
    E_ref_list = [[-19960.74194513], [-19960.78597929], [-19960.75811714],
                  [-19960.69526834], [-19960.66173260]]  #,[-4987.12739129]]

    cost_variation1, _ = stochastic_gradient_descent(nn_Ti_test5,
                                                     nn_O_test5,
                                                     A_list,
                                                     E_ref_list,
                                                     learning_rate=5e-5,
                                                     epochs=50)
    assert (abs(cost_variation1[-1]) < 0.5)
Ejemplo n.º 2
0
def test_forward_prop_small_network():
    '''testing forward prop in 4-3-3-1 network'''
    x = np.array([1, 1, 1, 1])  #input
    y = [0.94533048]  #preknown output
    node_list = [4, 3, 3, 1]
    activations = ['sigmoid', 'sigmoid', 'sigmoid']
    nn_test1 = NeuralNetwork(node_list, activations)
    weights_init(nn_test1)  #Setting weights to constant value = 1
    y_test = nn_test1.forward_prop(x)
    assert np.isclose(y_test, y)
Ejemplo n.º 3
0
def test_gradient_checking_big_neural_network_2():
    node_list = [70, 15, 7, 1]
    activations = ['sigmoid', 'sigmoid', 'sigmoid']
    x1 = np.full((1, 70), 5).reshape(1, node_list[0])
    y1 = np.array([1000]).reshape(1, node_list[-1])
    nn_test_2 = NeuralNetwork(node_list, activations)
    e_nn = nn_test_2.forward_prop(x1)
    derivative_analytical = nn_test_2.analytical_gradients(x1, e_nn, y1)
    derivative_numerical = numerical_gradients(nn_test_2, x1, e_nn, y1)
    assert np.allclose(derivative_analytical, derivative_numerical, atol=1e-5)
Ejemplo n.º 4
0
def test_forward_prop_big_network_3():
    '''testing forward prop in 70-2-2-1 network
    sigmoid--sigmoid--linear'''
    x = np.ones((1, 70))  #input
    y = [1.76159416]  #preknown output
    node_list = [70, 2, 2, 1]
    activations = ['sigmoid', 'sigmoid', 'linear']
    nn_test4 = NeuralNetwork(node_list, activations)
    weights_init(nn_test4)  #Setting weights to constant value = 1
    y_test = nn_test4.forward_prop(x)
    assert np.isclose(y_test, y)
Ejemplo n.º 5
0
def test_forward_prop_big_network_2():
    '''testing forward prop in 70-10-10-1 network
    sigmoid--sigmoid--linear'''
    x = np.ones((1, 70))  #input
    y = [9.99954602]  #preknown output
    node_list = [70, 10, 10, 1]
    activations = ['sigmoid', 'sigmoid', 'linear']
    nn_test3 = NeuralNetwork(node_list, activations)
    weights_init(nn_test3)  #Setting weights to constant value = 1
    y_test = nn_test3.forward_prop(x)
    assert abs(y_test - y) < 1e-8
Ejemplo n.º 6
0
def test_gradient_checking_small_neural_network_1():
    #declare structure of the neural network(no of nodes,activations of layers)
    node_list = [2, 3, 3, 1]
    activations = ['ReLU', 'ReLU', 'ReLU']
    #set inputs and output
    x = np.array([1, 2]).reshape(1, node_list[0])
    y = np.array([10]).reshape(1, node_list[-1])
    nn_test_1 = NeuralNetwork(node_list, activations)
    e_nn = nn_test_1.forward_prop(x)
    derivative_analytical = nn_test_1.analytical_gradients(x, e_nn, y)
    derivative_numerical = numerical_gradients(nn_test_1, x, e_nn, y)
    assert np.isclose(derivative_analytical, derivative_numerical).all()
Ejemplo n.º 7
0
def test_forward_prop_big_network_1():
    '''testing forward prop in 70-10-10-1 network
    sigmoid--sigmoid--sigmoid'''

    x = np.ones((1, 70))  #input
    y = [0.99995458]  #preknown output
    node_list = [70, 10, 10, 1]
    activations = ['sigmoid', 'sigmoid', 'sigmoid']
    nn_test2 = NeuralNetwork(node_list, activations)
    weights_init(nn_test2)  #Setting weights to constant value = 1
    y_test = nn_test2.forward_prop(x)
    assert np.isclose(y_test, y)
Ejemplo n.º 8
0
def test_forward_prop_big_network_4():
    '''testing forward prop in 70-10-10-1 network
    linear--linear--linear'''
    x = np.ones((1, 70))  #input
    y = [
        3500
    ]  #preknown output(when input and weights are array of ones and activation is linear,the output will be the product of number of nodes in each layer)
    node_list = [70, 5, 10, 1]
    activations = ['linear', 'linear', 'linear']
    nn_test5 = NeuralNetwork(node_list, activations)
    weights_init(nn_test5)  #Setting weights to constant value = 1
    y_test = nn_test5.forward_prop(x)
    assert np.isclose(y_test, y)
def test_whether_NN_trained_on_a_single_dataset_overfits_2():
    file_name = 'structure0001.txt'
    x = np.loadtxt(os.path.join('./symmetry_txt', '%s') % file_name)
    n = len(x)
    a = x.reshape(n, 1, 70)
    E_ref = [[-19960.66173260]]
    node_list = [70, 13, 9, 1]  #contains the layer sizes
    activations = ['sigmoid', 'sigmoid', 'linear']
    nn_Ti_test2 = NeuralNetwork(node_list, activations)
    nn_O_test2 = NeuralNetwork(node_list, activations)
    index = nn_switcher(a)
    for i in range(1450):
        train(nn_Ti_test2, nn_O_test2, a, E_ref, learning_rate)

    predicted_energy = sum(structure_forward_prop(a, nn_Ti_test2, nn_O_test2))

    assert np.isclose(predicted_energy, E_ref)
def test_whether_NN_trained_on_a_single_dataset_overfits_1():
    file_name = 'structure1249.txt'
    #learning_rate = 0.001
    x = np.loadtxt(os.path.join('./symmetry_txt', '%s') % file_name)
    n = len(x)
    a = x.reshape(n, 1, 70)
    E_ref = [[-4987.12739129]]
    node_list = [70, 11, 11, 1]  #contains the layer sizes
    activations = ['sigmoid', 'sigmoid', 'linear']
    nn_Ti_1a = NeuralNetwork(node_list, activations)
    nn_O_1a = NeuralNetwork(node_list, activations)

    index = nn_switcher(a)
    #training hte NN
    for i in range(1670):
        train(nn_Ti_1a, nn_O_1a, a, E_ref, learning_rate)
    #making the NN to predict the energy of same dataset
    predicted_energy = sum(structure_forward_prop(a, nn_Ti_1a, nn_O_1a))
    cost = MSE_basic(predicted_energy, E_ref)
    assert np.isclose(predicted_energy, E_ref)