Exemple #1
0
def test_model_forward():
    rand = RandomState(6)
    X = rand.randn(5, 4)
    W1 = rand.randn(4, 5)
    b1 = rand.randn(4, 1)
    W2 = rand.randn(3, 4)
    b2 = rand.randn(3, 1)
    W3 = rand.randn(1, 3)
    b3 = rand.randn(1, 1)

    parameters = {
        "W": {
            1: W1,
            2: W2,
            3: W3
        },
        "b": {
            1: b1,
            2: b2,
            3: b3
        },
    }

    AL, caches = model_forward(X, parameters, keep_prob=1)
    ans = np.array([[0.03921668, 0.70498921, 0.19734387, 0.04728177]])

    assert_allclose(AL, ans)
    assert (len(caches["A"]) == 4)
Exemple #2
0
    def fit_params(self,
                   X,
                   Y,
                   layers_dims,
                   num_iterations,
                   learning_rate=0.0075,
                   alpha=0,
                   keep_prob=1,
                   verbose=True):
        """
        fits model to parameters X, Y

        Arguments:

        X -- input of shape (num_features, m)

        Y -- labels for data of shape (m, 1)

        layers_dims -- len(layers_dims) determines depth of network,
        layer_dims[l] determines number of nodes in layer l

        num_iterations -- number of iterations to run the train

        learning_rate -- learning rate of gradient descent

        alpha -- l2 regularization term

        keep_prob -- dropout probability

        verbose -- print cost every 20 iterations
        """

        self.X = X
        self.Y = Y
        self.layers_dims = layers_dims
        self.parameters = initialize_parameters(layers_dims)
        self.learning_rate = learning_rate,
        self.alpha = alpha
        self.keep_prob = keep_prob
        self.costs = []

        for i in range(0, num_iterations):
            AL, caches = model_forward(self.X, self.parameters, self.keep_prob)
            grads = model_backward(AL, self.Y, self.parameters, caches,
                                   self.alpha, self.keep_prob)
            self.parameters = update_parameters(self.parameters, grads,
                                                learning_rate)
            cost = cross_entropy(AL, self.Y, self.parameters, self.alpha)
            self.costs.append(cost)
            if verbose and i % 20 == 0:
                print(str(i), 'iterations:', str(cost))
Exemple #3
0
def test_model_forward_dropout():
    rand = RandomState(1)
    X = rand.randn(3, 5)
    W1 = rand.randn(2, 3)
    b1 = rand.randn(2, 1)
    W2 = rand.randn(3, 2)
    b2 = rand.randn(3, 1)
    W3 = rand.randn(1, 3)
    b3 = rand.randn(1, 1)

    parameters = {'W': {1: W1, 2: W2, 3: W3}, 'b': {1: b1, 2: b2, 3: b3}}

    np.random.seed(1)
    AL, caches = model_forward(X, parameters, keep_prob=0.7)
    ans = np.array(
        [[0.36974721, 0.00305176, 0.04565099, 0.49683389, 0.36974721]])

    assert_allclose(AL, ans, rtol=1e-05)
    assert (len(caches["A"]) == len(caches["D"]) + 1)
Exemple #4
0
    def predict(self, X):
        AL, _ = model_forward(X, self.parameters, keep_prob=1)

        return AL >= 0.5
Exemple #5
0
    def verify_cost(self, X_test, Y_test):
        AL, _ = model_forward(X_test, self.parameters, keep_prob=1)
        cost = cross_entropy(AL, Y_test, self.parameters, self.alpha)

        return cost