def SpCrossEntropyCostFunction(o, y): ''' used for matrix output :param o: :param y: :return: ''' L = - ST.sp_sum(y * ST.structured_log(o) + (1 - y) * ST.structured_log(1 - o), axis=1) cost = T.mean(L) return cost
def SpCrossEntropyCostFunction(o, y): ''' used for matrix output :param o: :param y: :return: ''' L = -ST.sp_sum( y * ST.structured_log(o) + (1 - y) * ST.structured_log(1 - o), axis=1) cost = T.mean(L) return cost
def theanoExpr(self,env): env.binding[self.dst] = env.binding[self.vec] * B.sp_sum(env.binding[self.weighter],sparse_grad=True)
def theanoExpr(self,env): #convert to a sparse vector constant c = env.db.onehot(self.onehotConst) theanoConstVec = S.CSR(c.data, c.indices, c.indptr, c.shape) env.binding[self.dst] = theanoConstVec * B.sp_sum(env.binding[self.weighter],sparse_grad=True)
import theano.sparse.basic as B import matrixdb import numpy if __name__ == "__main__": db = matrixdb.MatrixDB.loadFile("test/fam.cfacts") va = db.onehot('william') vb = db.onehot('sarah') print 'a', va print 'b', vb print 'shape', va.shape print 'f1' tx = S.csr_matrix('x') r1 = B.sp_sum(tx + tx + tx, sparse_grad=True) s = tx * r1 f1 = theano.function(inputs=[tx], outputs=[s]) w = f1(va) print w[0] #print db.rowAsSymbolDict(w[0]) print 'f2(w=a,c=b)' tw = S.csr_matrix('w') #weighter tc = S.csr_matrix('c') #constant r2 = B.sp_sum(tw * 1.7, sparse_grad=True) s2 = tc * r2 f2 = theano.function(inputs=[tw, tc], outputs=[s2]) w = f2(va, vb) print w[0]
p.listing() # # load the data # xs, ys = loadExamples("test/textcattoy-train.examples", p.db) #returns inputs and outputs that are used to build the prediction #function mode = tensorlog.ModeDeclaration('predict(i,o)') ins, outs = p.theanoPredictExpr(mode, ['x']) scorex = outs[0] #the actual score vector for x # something simple to try differentiating toyLoss = B.sp_sum(scorex, sparse_grad=True) print 'gradToyLoss...' gradToyLoss = T.grad(toyLoss, p.getParamList()) # # now define a theano function that computes loss for ONE example # y = S.csr_matrix('y') prob = scorex * (1.0 / B.sp_sum(scorex, sparse_grad=True)) #scale x to 0-1 loss = B.sp_sum(-y * B.structured_log(prob), sparse_grad=True) #cross-entropy loss print 'loss...' theano.printing.debugprint(loss) lossFun = theano.function(inputs=[ins[0], y], outputs=[loss]) #
for v in a.inputs: debugVar(v,depth=depth+1,maxdepth=maxdepth) if __name__=="__main__": db = matrixdb.MatrixDB.loadFile("test/fam.cfacts") va = db.onehot('william') vb = db.onehot('sarah') print 'a',va print 'b',vb print 'shape',va.shape print 'f1: s = x*((x+x)+x)' tx = S.csr_matrix('x') r1 = B.sp_sum(tx+tx+tx,sparse_grad=True) s = tx*r1 s.name = 's' f1 = theano.function(inputs=[tx],outputs=[s]) w = f1(va) print w[0] debugVar(s) #print db.rowAsSymbolDict(w[0]) # # print 'f2(w=a,c=b)' # tw = S.csr_matrix('w') #weighter # tc = S.csr_matrix('c') #constant # r2 = B.sp_sum(tw*1.7,sparse_grad=True) # s2 = tc*r2
import theano.sparse.basic as B import matrixdb import numpy if __name__=="__main__": db = matrixdb.MatrixDB.loadFile("test/fam.cfacts") va = db.onehot('william') vb = db.onehot('sarah') print 'a',va print 'b',vb print 'shape',va.shape print 'f1' tx = S.csr_matrix('x') r1 = B.sp_sum(tx+tx+tx,sparse_grad=True) s = tx*r1 f1 = theano.function(inputs=[tx],outputs=[s]) w = f1(va) print w[0] #print db.rowAsSymbolDict(w[0]) print 'f2(w=a,c=b)' tw = S.csr_matrix('w') #weighter tc = S.csr_matrix('c') #constant r2 = B.sp_sum(tw*1.7,sparse_grad=True) s2 = tc*r2 f2 = theano.function(inputs=[tw,tc],outputs=[s2]) w = f2(va,vb) print w[0]
def theanoExpr(self, env): env.binding[self.dst] = env.binding[self.vec] * B.sp_sum( env.binding[self.weighter], sparse_grad=True)
def theanoExpr(self, env): #convert to a sparse vector constant c = env.db.onehot(self.onehotConst) theanoConstVec = S.CSR(c.data, c.indices, c.indptr, c.shape) env.binding[self.dst] = theanoConstVec * B.sp_sum( env.binding[self.weighter], sparse_grad=True)