Exemplo n.º 1
0
def check_sigmoid():
    X, Y = input_data.loadHandwritingTrainingData(
    )  #X.shape=(1024,387)  Y.shape=(1,387)
    layer_types = ['relu', 'tanh', 'sigmoid']
    layer_dims = [X.shape[0], 100, 50, Y.shape[0]]
    parameters = nn_model.init_params(layer_dims)
    gradient_check(X, Y, layer_dims, layer_types, parameters, lambd=1)
Exemplo n.º 2
0
def check_dropout():
    X, Y, Y_onehot = input_data.loadRandomData()
    layer_types = [
        'softmax',
    ]
    layer_dims = [X.shape[0], Y_onehot.shape[0]]
    parameters = nn_model.init_params(layer_dims)
    gradient_check_with_dorpout(X,
                                Y,
                                layer_dims,
                                layer_types,
                                parameters,
                                num_params=2)
Exemplo n.º 3
0
def check_softmax():
    X, Y, Y_onehot = input_data.loadRandomData()
    # X.shape=(2, 300), Y.shape=(1,300), Y_onehot=(3,300)
    # number of examples = 300, number of classes = 3

    layer_types = [
        'softmax',
    ]
    layer_dims = [X.shape[0], Y_onehot.shape[0]]
    parameters = nn_model.init_params(layer_dims)
    gradient_check(X,
                   Y_onehot,
                   layer_dims,
                   layer_types,
                   parameters,
                   epsilon=1e-7,
                   num_params=2,
                   lambd=1)