示例#1
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_L_model_forward_t1(self):
     X = np.random.randn(3, 6)
     params = forward.initialize_parameters([3, 5, 7, 8, 2])
     use_batchnorm = False
     forward_result = forward.L_model_forward(X, params, use_batchnorm)[0]
     self.assertTrue(forward_result.shape == (2, 6))
     np.testing.assert_allclose(np.sum(forward_result, axis=0), np.ones(6))
示例#2
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_initialize_parameters_t5(self):
     dim = []
     expected_dim = {}
     actual_dim = {
         key: val.shape
         for key, val in forward.initialize_parameters(dim).items()
     }
     self.assertTrue(expected_dim == actual_dim)
示例#3
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_initialize_parameters_t2(self):
     dim = [10, 1]
     expected_dim = {'W1': (1, 10), 'b1': (1, )}
     actual_dim = {
         key: val.shape
         for key, val in forward.initialize_parameters(dim).items()
     }
     self.assertTrue(expected_dim == actual_dim)
示例#4
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_initialize_parameters_t1(self):
     dim = [3, 3, 2]
     expected_dim = {'W1': (3, 3), 'W2': (2, 3), 'b1': (3, ), 'b2': (2, )}
     actual_dim = {
         key: val.shape
         for key, val in forward.initialize_parameters(dim).items()
     }
     self.assertTrue(expected_dim == actual_dim)
示例#5
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_initialize_parameters_t3(self):
     dim = [3, 8, 4, 100, 7, 5, 1]
     expected_dim = {
         'W1': (8, 3),
         'W2': (4, 8),
         'W3': (100, 4),
         'W4': (7, 100),
         'W5': (5, 7),
         'W6': (1, 5),
         'b1': (8, ),
         'b2': (4, ),
         'b3': (100, ),
         'b4': (7, ),
         'b5': (5, ),
         'b6': (1, )
     }
     actual_dim = {
         key: val.shape
         for key, val in forward.initialize_parameters(dim).items()
     }
     self.assertTrue(expected_dim == actual_dim)
示例#6
0
def L_layer_model(X, Y, layers_dims, learning_rate, num_iterations,
                  batch_size):
    """
    Implements a L-layer neural network.
    All layers but the last  have the ReLU activation function,
    and the final layer apply the softmax activation function
    :param X: the input data, a numpy array of shape (height*width , number_of_examples)
    :param Y: the “real” labels of the data, a vector of shape (num_of_classes, number of examples)
    :param layers_dims: a list containing the dimensions of each layer, including the input
    :param learning_rate: the learning rate used to update the parameters (the “alpha”)
    :param num_iterations:
    :param batch_size: the number of examples in a single training batch
    :return parameters: the parameters learnt by the system during the training
                        (the same parameters that were updated in the update_parameters function).
    :return costs: the values of the cost function.
                    One value is to be saved after each 100 training iterations (e.g. 3000 iterations -> 30 values).

    """
    stop_rate = 0.001

    train_x, train_y, validation_x, validation_y = generate_validation_data(
        X, Y)
    del X, Y

    combined_data = np.concatenate([train_x, train_y], axis=0)
    m = train_x.shape[1]

    num_batches = m // batch_size + (1 if m % batch_size else 0)

    params = forward.initialize_parameters(layers_dims)
    costs = []

    steps_cnt = 0
    while len(costs) < 2 or np.abs(costs[-2] - costs[-1]) > stop_rate:
        if not steps_cnt % num_batches:
            np.random.shuffle(combined_data.T)
            batches = np.array_split(combined_data,
                                     indices_or_sections=num_batches,
                                     axis=1)

        X_batch = batches[steps_cnt % num_batches][0:train_x.shape[0], :]
        Y_batch = batches[steps_cnt % num_batches][train_x.shape[0]:, :]

        prediction, caches = forward.L_model_forward(
            X_batch, params, use_batchnorm=BATCHNORM_USAGE)
        grads = backward.L_model_backward(prediction, Y_batch, caches)
        params = backward.update_parameters(params, grads, learning_rate)

        steps_cnt += 1

        if not steps_cnt % num_iterations:
            prediction, _ = forward.L_model_forward(
                validation_x, params, use_batchnorm=BATCHNORM_USAGE)
            cost = forward.compute_cost(prediction, validation_y)
            costs.append(cost)
            print(f'\tStep number: {steps_cnt} - Cost {cost:.3f}')

    steps_str = '' if not steps_cnt % num_batches else f' and {steps_cnt % num_batches} steps'
    print(f'\nRan over {steps_cnt // num_batches} epochs' + steps_str)

    print(
        f"\nTrain Accuracy: {predict(X=train_x, Y=train_y, parameters=params)*100:.2f}%"
    )
    print(
        f"Validation Accuracy: {predict(X=validation_x, Y=validation_y, parameters=params) * 100:.2f}%"
    )

    return params, costs
示例#7
0
文件: Tests.py 项目: Liavbapp/DL_Ass1
 def test_update_parameters(self):
     parameters = forward.initialize_parameters([2, 5, 6, 8, 4])
     grads = self.test_L_model_backward_t1()
     backward.update_parameters(parameters, grads, 1e-05)