def test_argmax_pushdown_bias(): x = tensor.dmatrix() b = tensor.dvector() out = tensor.argmax(softmax_with_bias(x, b), axis=-1) env = gof.Env( [x,b], [out]) theano.compile.mode.optdb.query( theano.compile.mode.OPT_FAST_RUN).optimize(env) #print 'AFTER' #for node in env.toposort(): # print node.op assert len(env.toposort()) == 4 assert isinstance(env.toposort()[0].op, tensor.DimShuffle) assert isinstance(env.toposort()[1].op, tensor.Elemwise) assert isinstance(env.toposort()[2].op, tensor.MaxAndArgmax) assert str(env.toposort()[3].op) == 'OutputGuard' x = tensor.dmatrix() b = tensor.dvector() out = tensor.max_and_argmax(softmax_with_bias(x, b), axis=-1)[0] env = gof.Env( [x,b], [out]) backup = config.warn.argmax_pushdown_bug config.warn.argmax_pushdown_bug = False try: theano.compile.mode.optdb.query( theano.compile.mode.OPT_FAST_RUN).optimize(env) finally: config.warn.argmax_pushdown_bug = backup #print 'AFTER' #for node in env.toposort(): # print node.op assert len(env.toposort()) == 3 assert isinstance(env.toposort()[0].op, SoftmaxWithBias) assert isinstance(env.toposort()[1].op, tensor.CAReduce) assert isinstance(env.toposort()[1].op.scalar_op, theano.scalar.Maximum) assert str(env.toposort()[2].op) == 'OutputGuard'
def test_argmax_pushdown_bias(): x = tensor.dmatrix() b = tensor.dvector() out = tensor.argmax(softmax_with_bias(x, b), axis=-1) env = gof.Env([x, b], [out]) theano.compile.mode.optdb.query( theano.compile.mode.OPT_FAST_RUN).optimize(env) #print 'AFTER' #for node in env.toposort(): # print node.op assert len(env.toposort()) == 4 assert isinstance(env.toposort()[0].op, tensor.DimShuffle) assert isinstance(env.toposort()[1].op, tensor.Elemwise) assert isinstance(env.toposort()[2].op, tensor.MaxAndArgmax) assert str(env.toposort()[3].op) == 'OutputGuard' x = tensor.dmatrix() b = tensor.dvector() out = tensor.max_and_argmax(softmax_with_bias(x, b), axis=-1)[0] env = gof.Env([x, b], [out]) backup = config.warn.argmax_pushdown_bug config.warn.argmax_pushdown_bug = False try: theano.compile.mode.optdb.query( theano.compile.mode.OPT_FAST_RUN).optimize(env) finally: config.warn.argmax_pushdown_bug = backup #print 'AFTER' #for node in env.toposort(): # print node.op assert len(env.toposort()) == 3 assert isinstance(env.toposort()[0].op, SoftmaxWithBias) assert isinstance(env.toposort()[1].op, tensor.CAReduce) assert isinstance(env.toposort()[1].op.scalar_op, theano.scalar.Maximum) assert str(env.toposort()[2].op) == 'OutputGuard'
def f(a, b): return softmax_with_bias(a, b)[:, 1]
def test_infer_shape(self): fff = theano.function([], outputs=softmax_with_bias( numpy.random.rand(3, 4), numpy.random.rand(4)).shape) assert all(fff() == [3, 4])
def f(a, b): return softmax_with_bias(a, b)[:,3]
def test_infer_shape(self): fff=theano.function([],outputs=softmax_with_bias(numpy.random.rand(3,4),numpy.random.rand(4)).shape) assert all(fff()==[3,4])