Example #1
0
 def test_normalize_rows():
     assert normalize_rows(np.array([[3.0, 4.0],[1, 2]])), [[0.6, 0.8], [0.4472, 0.8944]]
Example #2
0
        cost, grad_pred, grad = f(*params, **kws)
        return cost, grad

    gradcheck_naive(lambda vec: g_func_wrapper1(softmax_cost_and_gradient, vec, 0, dummy_vectors[1:], dataset, parameters=parameters), 
        dummy_vectors[0])
    gradcheck_naive(lambda vec: g_func_wrapper2(softmax_cost_and_gradient, dummy_vectors[0], 0, vec, dataset, parameters=parameters), 
        dummy_vectors[1:])

    print "==== Gradient check for neg_sampling_max_cost_and_gradient ===="
    print "test 1"
    gradcheck_naive(lambda vec: g_func_wrapper1(neg_sampling_cost_and_gradient, vec, 0, dummy_vectors[1:], dataset, parameters=parameters), 
        dummy_vectors[0], verbose=False)
    print "test 2"
    gradcheck_naive(lambda vec: g_func_wrapper2(neg_sampling_cost_and_gradient, dummy_vectors[0], 0, vec, dataset, parameters=parameters), 
        dummy_vectors[1:])

    parameters = AttrDict(
    {
    'context_size' : 1,
    'sgd' : {'batch_size': 1},
    'dataset' : {}
    }
    )
    random.seed(31415)
    np.random.seed(9265)
    dummy_vectors = normalize_rows(np.random.randn(10,3))
    dummy_tokens = dict([("a",0), ("b",1), ("c",2), ("d",3), ("e",4)])



Example #3
0
 def vectors():
     return normalize_rows(np.random.randn(10, 3))
Example #4
0
File: test.py Project: framr/ml
    labels = np.zeros((N, dimensions[2]))
    for i in xrange(N):
        labels[i,random.randint(0, dimensions[2]-1)] = 1

    params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (dimensions[1] + 1) * dimensions[2], )
    print "Dimensionality of parameter vector", params.shape

    # Perform gradcheck on your neural network
    print "=== Neural network gradient check 1==="
    check_res = gradcheck_naive(lambda params: back_prop1(data, labels, params, dimensions), params)

    print "=== Neural network gradient check 2===" 
    #check_res = gradcheck_naive(lambda params: back_prop2(data, labels, params, dimensions), params)

    print "=== normalize rows ==="
    print normalize_rows(np.array([[3.0, 4.0],[1, 2]]))  # the result should be [[0.6, 0.8], [0.4472, 0.8944]]


    # Interface to the dataset for negative sampling
    dataset = type('dummy', (), {})()
    def dummySampleTokenIdx():
        return random.randint(0, 4)
    def getRandomContext(C, parameters=None):
        tokens = ["a", "b", "c", "d", "e"]
        return tokens[random.randint(0,4)], [tokens[random.randint(0,4)] for i in xrange(2*C)]
    dataset.sample_token_idx =  dummySampleTokenIdx
    dataset.get_context = getRandomContext

    parameters = AttrDict(
            {
            'context_size' : 3,