def eval_forward(self, x, u): """ Evaluates the layer forward, i.e. from input x and random numbers u to output y. :param x: numpy array :param u: numpy array :return: numpy array """ if self.eval_forward_f is None: # conditional input tt_x = tt.matrix('x') # masked random numbers tt_u = tt.matrix('u') mu = self.mask * tt_u # scale net s_net = nn.FeedforwardNet(self.n_inputs + self.n_outputs, tt.concatenate([tt_x, mu], axis=1)) for h in self.s_hiddens: s_net.addLayer(h, self.s_act) s_net.addLayer(self.n_outputs, 'linear') util.copy_model_parms(self.s_net, s_net) s = s_net.output # translate net t_net = nn.FeedforwardNet(self.n_inputs + self.n_outputs, tt.concatenate([tt_x, mu], axis=1)) for h in self.t_hiddens: t_net.addLayer(h, self.t_act) t_net.addLayer(self.n_outputs, 'linear') util.copy_model_parms(self.t_net, t_net) t = t_net.output # transform (x,u) -> y y = mu + (1.0 - self.mask) * (tt_u * tt.exp(s) + t) # compile theano function self.eval_forward_f = theano.function(inputs=[tt_x, tt_u], outputs=y) return self.eval_forward_f(x.astype(dtype), u.astype(dtype))
def eval_forward(self, u): """ Evaluates the layer forward, i.e. from random numbers u to output x. :param u: numpy array :return: numpy array """ if self.eval_forward_f is None: # masked random numbers tt_u = tt.matrix('u') mu = self.mask * tt_u # scale net s_net = nn.FeedforwardNet(self.n_inputs, mu) for h in self.s_hiddens: s_net.addLayer(h, self.s_act) s_net.addLayer(self.n_inputs, 'linear') util.copy_model_parms(self.s_net, s_net) s = s_net.output # translate net t_net = nn.FeedforwardNet(self.n_inputs, mu) for h in self.t_hiddens: t_net.addLayer(h, self.t_act) t_net.addLayer(self.n_inputs, 'linear') util.copy_model_parms(self.t_net, t_net) t = t_net.output # transform u -> x x = mu + (1.0 - self.mask) * (tt_u * tt.exp(s) + t) # compile theano function self.eval_forward_f = theano.function( inputs=[tt_u], outputs=x ) return self.eval_forward_f(u.astype(dtype))