def make_loss_and_grad(net): X_b = inps[0] #cgt.matrix(dtype=cgt.floatX) y_onehot = cgt.matrix(dtype='i4') outputs = [logprobs] loss = nn.crossent(outputs[0], y_onehot) / b_size #gradloss = cgt.grad(loss, params) gradloss = cgt.grad(loss, param_list) # XXX use flatcat function grad = cgt.concatenate([x.flatten() for x in gradloss]) #grad = gradloss return cgt.make_function([X_b, y_onehot], [loss, grad, logprobs])
def runTest(self): if cgt.get_config()["backend"] != "python": cgt.utils.warn("Skipping test -- only works for backend=python") return x = cgt.scalar() with cgt.debug_context() as dbg: cgt.assert_(cgt.equal(x, 1), "yoyoyo") cgt.dbg_call(myfunc, x) print "dbg", dbg.nodes # cgt.assert_(cgt.equal(x, 2)) f = cgt.make_function([x], [x], dbg=dbg) f(1) with self.assertRaises(AssertionError): f(2)
def runTest(self): if cgt.get_config()["backend"] != "python": cgt.utils.warn("Skipping test -- only works for backend=python") return x = cgt.scalar() with cgt.debug_context() as dbg: cgt.assert_(cgt.equal(x, 1),"yoyoyo") cgt.dbg_call(myfunc, x) print "dbg",dbg.nodes # cgt.assert_(cgt.equal(x, 2)) f = cgt.make_function([x],[x],dbg=dbg) f(1) with self.assertRaises(AssertionError): f(2)