def _test_function(): print '\n------------------------------------------------------------' print 'Test: activation, initialization functions' X = UL.theano_variable(2, 'X') linear = activation_linear(X) relu = activation_relu(X) softmax = activation_softmax(X) print P.pprint(linear) print P.pprint(relu) print P.pprint(softmax) W = init_glorot_uniform((16, 24), 'W') print(P.pprint(W))
def build(self): self.X = TU.theano_variable(self.input_dim, name=self.name) if self.mask_dim: self.m = TU.theano_variable(self.mask_dim, name='mask_'+self.name, dtype='int8')