Beispiel #1
0
        return vectors[1:]

    @pytest.fixture(params=[softmax_cost_and_gradient, neg_sampling_cost_and_gradient]) 
    def test_cost_and_grad_func_inputvec(input_vectors, output_vectors, parameters):
        grad_func = lambda w: param(w, )


    print "==== Gradient check for soft_max_cost_and_gradient ===="
    def g_func_wrapper1(f, *params, **kws):
        cost, grad_pred, grad = f(*params, **kws)
        return cost, grad_pred
    def g_func_wrapper2(f, *params, **kws):
        cost, grad_pred, grad = f(*params, **kws)
        return cost, grad

    gradcheck_naive(lambda vec: g_func_wrapper1(softmax_cost_and_gradient, vec, 0, dummy_vectors[1:], dataset, parameters=parameters), 
        dummy_vectors[0])
    gradcheck_naive(lambda vec: g_func_wrapper2(softmax_cost_and_gradient, dummy_vectors[0], 0, vec, dataset, parameters=parameters), 
        dummy_vectors[1:])

    print "==== Gradient check for neg_sampling_max_cost_and_gradient ===="
    print "test 1"
    gradcheck_naive(lambda vec: g_func_wrapper1(neg_sampling_cost_and_gradient, vec, 0, dummy_vectors[1:], dataset, parameters=parameters), 
        dummy_vectors[0], verbose=False)
    print "test 2"
    gradcheck_naive(lambda vec: g_func_wrapper2(neg_sampling_cost_and_gradient, dummy_vectors[0], 0, vec, dataset, parameters=parameters), 
        dummy_vectors[1:])

    parameters = AttrDict(
    {
    'context_size' : 1,
    'sgd' : {'batch_size': 1},
Beispiel #2
0
Datei: test.py Projekt: framr/ml
class DatasetWrapper(object):
    def __init__(self, dataset, parameters=None):
        self._dataset = dataset
        self._parameters = parameters
        self.get_context = self.get_random_context
        self.sample_token_idx = self._dataset.sampleTokenIdx
    def get_random_context(self):
        return self._dataset.getRandomContext()


if __name__ == '__main__':

     # Sanity check for the gradient checker
    quad = lambda x: (np.sum(x ** 2), x * 2)

    gradcheck_naive(quad, np.array(123.456))      # scalar test
    gradcheck_naive(quad, np.random.randn(3,))    # 1-D test
    gradcheck_naive(quad, np.random.randn(4,5))   # 2-D test


    # Set up fake data and parameters for the neural network
    N = 20
    dimensions = [10, 5, 10]
    data = np.random.randn(N, dimensions[0])   # each row will be a datum
    labels = np.zeros((N, dimensions[2]))
    for i in xrange(N):
        labels[i,random.randint(0, dimensions[2]-1)] = 1

    params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (dimensions[1] + 1) * dimensions[2], )
    print "Dimensionality of parameter vector", params.shape