__author__ = 'auroua' import numpy import theano_test import theano_test.tensor as T rng = numpy.random N = 400 feats = 784 D = (rng.randn(N, feats).astype(theano_test.config.floatX), rng.randint(size=N,low=0, high=2).astype(theano_test.config.floatX)) training_steps = 10000 # Declare Theano symbolic variables x = T.matrix("x") y = T.vector("y") w = theano_test.shared(rng.randn(feats).astype(theano_test.config.floatX), name="w") b = theano_test.shared(numpy.asarray(0., dtype=theano_test.config.floatX), name="b") x.tag.test_value = D[0] y.tag.test_value = D[1] #print "Initial model:" #print w.get_value(), b.get_value() # Construct Theano expression graph p_1 = 1 / (1 + T.exp(-T.dot(x, w)-b)) # Probability of having a one prediction = p_1 > 0.5 # The prediction that is done: 0 or 1 xent = -y*T.log(p_1) - (1-y)*T.log(1-p_1) # Cross-entropy cost = xent.mean() + 0.01*(w**2).sum() # The cost to optimize gw,gb = T.grad(cost, [w,b]) # Compile expressions to functions train = theano_test.function(
#encoding:UTF-8 __author__ = 'auroua' import numpy as np import theano_test import theano_test.tensor as T rng = np.random N = 400 feats = 784 D = (rng.randn(N,feats),rng.randint(size=N,low=0,high=2)) training_steps = 10000 # Declare Theano symbolic variables x = T.matrix("x") y = T.vector('y') w = theano_test.shared(rng.randn(feats),name='w') b = theano_test.shared(0.,name='b') print 'Initial model:' print w.get_value(),b.get_value() # Construct Theano expression graph p_1 = 1 / (1 + T.exp(-T.dot(x, w) - b)) # Probability that target = 1 prediction = p_1 > 0.5 # The prediction thresholded xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1) # Cross-entropy loss function cost = xent.mean() + 0.01 * (w ** 2).sum()# The cost to minimize gw, gb = T.grad(cost, [w, b]) # Compute the gradient of the cost # (we shall return to this in a # following section of this tutorial) # Compile train = theano_test.function( inputs=[x,y],
__author__ = 'auroua' import theano_test.tensor as T from theano_test import function import theano_test import pydot print pydot.find_graphviz() x = T.dmatrix('x') y = x*2 print type(y.owner) print y.owner.op.name print len(y.owner.inputs) print type(y.owner.inputs[1].owner) #apply nodes are those that define which computations the graph does # When compiling a Theano function, what you give to the theano.function is actually a graph # (starting from the output variables you can traverse the graph up to the input variables). # While this graph structure shows how to compute the output from the input, # it also offers the possibility to improve the way this computation is carried out. a = T.vector('a') b = a+a**10 fab = function([a],b) print fab([0,1,2]) theano_test.printing.pydotprint(b, outfile="/home/auroua/symbolic_graph_unopt.png", var_with_name_simple=True) theano_test.printing.pydotprint(fab, outfile="/home/auroua/symbolic_graph_opt.png", var_with_name_simple=True)