Exemplo n.º 1
0
 def test_cloning_available(self):
     gop = generator(integers())
     res = gop ** 2
     shared = theano.shared(np.float32(10))
     res1 = theano.clone(res, {gop: shared})
     f = theano.function([], res1)
     assert f() == np.float32(100)
Exemplo n.º 2
0
def join_nonshared_inputs(xs, vars, shared, make_shared=False):
    """
    Takes a list of theano Variables and joins their non shared inputs into a single input.
    
    Parameters
    ----------
    xs : list of theano tensors
    vars : list of variables to join

    Returns
    -------
    tensors, inarray
    tensors : list of same tensors but with inarray as input
    inarray : vector of inputs
    """
    joined = theano.tensor.concatenate([var.ravel() for var in vars])

    if not make_shared:
        tensor_type = joined.type
        inarray = tensor_type('inarray')
    else:
        inarray = theano.shared(joined.tag.test_value, 'inarray')

    ordering = ArrayOrdering(vars)
    inarray.tag.test_value = joined.tag.test_value
    
    get_var = { var.name : var for var in vars} 
    replace = {
        get_var[var] : reshape_t(inarray[slc], shp).astype(dtyp)
        for var, slc, shp, dtyp in ordering.vmap }

    replace.update(shared)

    xs_special = [theano.clone(x, replace, strict=False) for x in xs]
    return xs_special, inarray
Exemplo n.º 3
0
 def test_cloning_available(self):
     gop = generator(integers())
     res = gop**2
     shared = theano.shared(np.float32(10))
     res1 = theano.clone(res, {gop: shared})
     f = theano.function([], res1)
     assert f() == np.float32(100)
Exemplo n.º 4
0
 def add_obj_updates(self, updates, obj_n_mc=None, obj_optimizer=adam,
                     more_obj_params=None, more_replacements=None):
     if obj_n_mc is not None:
         _warn_not_used('obj_n_mc', self.op)
     d_obj_padams = self(None)
     d_obj_padams = theano.clone(d_obj_padams, more_replacements, strict=False)
     updates.update(obj_optimizer([d_obj_padams], self.obj_params))
Exemplo n.º 5
0
def join_nonshared_inputs(xs, vars, shared):
    """
    Takes a list of theano Variables and joins their non shared inputs into a single input.
    
    Parameters
    ----------
    xs : list of theano tensors
    vars : list of variables to join

    Returns
    -------
    tensors, inarray
    tensors : list of same tensors but with inarray as input
    inarray : vector of inputs
    """
    joined = theano.tensor.concatenate([var.ravel() for var in vars])

    tensor_type = joined.type
    inarray = tensor_type('inarray')
    ordering = ArrayOrdering(vars)
    inarray.tag.test_value = joined.tag.test_value

    get_var = {var.name: var for var in vars}
    replace = {
        get_var[var]: reshape_t(inarray[slc], shp).astype(dtyp)
        for var, slc, shp, dtyp in ordering.vmap
    }

    replace.update(shared)

    xs_special = [theano.clone(x, replace, strict=False) for x in xs]
    return xs_special, inarray
 def logp_norm(self):
     sized_symbolic_logp = self.approx.sized_symbolic_logp
     if self.use_histogram:
         sized_symbolic_logp = theano.clone(
             sized_symbolic_logp,
             dict(zip(self.approx.symbolic_randoms, self.approx.collect('histogram')))
         )
     return sized_symbolic_logp / self.approx.symbolic_normalizing_constant
Exemplo n.º 7
0
    def __call__(self, input):
        """ Replaces the single input of symbolic variable to be the passed argument.

        Parameters
        ----------
        input : TensorVariable
        """
        oldinput, = inputvars(self.tensor)
        return theano.clone(self.tensor, {oldinput: input}, strict=False)
Exemplo n.º 8
0
    def __call__(self, input):
        """ Replaces the single input of symbolic variable to be the passed argument.

        Parameters
        ----------
        input : TensorVariable  
        """
        oldinput, = inputvars(self.tensor)
        return theano.clone(self.tensor, { oldinput : input }, strict=False)
Exemplo n.º 9
0
 def test_gen_cloning_with_shape_change(self):
     data = floatX(np.random.uniform(size=(1000, 10)))
     minibatches = DataSampler(data, batchsize=50)
     gen = generator(minibatches)
     gen_r = tt_rng().normal(size=gen.shape).T
     X = gen.dot(gen_r)
     res, _ = theano.scan(lambda x: x.sum(), X, n_steps=X.shape[0])
     assert res.eval().shape == (50, )
     shared = theano.shared(data)
     res2 = theano.clone(res, {gen: shared**2})
     assert res2.eval().shape == (1000, )
Exemplo n.º 10
0
 def test_gen_cloning_with_shape_change(self):
     data = floatX(np.random.uniform(size=(1000, 10)))
     minibatches = DataSampler(data, batchsize=50)
     gen = generator(minibatches)
     gen_r = tt_rng().normal(size=gen.shape).T
     X = gen.dot(gen_r)
     res, _ = theano.scan(lambda x: x.sum(), X, n_steps=X.shape[0])
     assert res.eval().shape == (50,)
     shared = theano.shared(data)
     res2 = theano.clone(res, {gen: shared**2})
     assert res2.eval().shape == (1000,)
Exemplo n.º 11
0
 def __call__(self, z, **kwargs):
     op = self.op  # type: KSD
     grad = op.apply(self.tf)
     if 'more_obj_params' in kwargs:
         params = self.obj_params + kwargs['more_obj_params']
     else:
         params = self.test_params + kwargs['more_tf_params']
         grad *= pm.floatX(-1)
     grad = theano.clone(grad, {op.input_matrix: z})
     grad = tt.grad(None, params, known_grads={z: grad})
     return grad
Exemplo n.º 12
0
    def _build_joined(self, cost, args, vmap):
        args_joined = tt.vector('__args_joined')
        args_joined.tag.test_value = np.zeros(self.size, dtype=self.dtype)

        joined_slices = {}
        for vmap in vmap:
            sliced = args_joined[vmap.slc].reshape(vmap.shp)
            sliced.name = vmap.var
            joined_slices[vmap.var] = sliced

        replace = {var: joined_slices[var.name] for var in args}
        return args_joined, theano.clone(cost, replace=replace)
Exemplo n.º 13
0
    def _build_joined(self, cost, args, vmap):
        args_joined = tt.vector('__args_joined')
        args_joined.tag.test_value = np.zeros(self.size, dtype=self.dtype)

        joined_slices = {}
        for vmap in vmap:
            sliced = args_joined[vmap.slc].reshape(vmap.shp)
            sliced.name = vmap.var
            joined_slices[vmap.var] = sliced

        replace = {var: joined_slices[var.name] for var in args}
        return args_joined, theano.clone(cost, replace=replace)
Exemplo n.º 14
0
 def dlogp(self):
     loc_random = self.input_matrix[..., :self.approx.local_size]
     glob_random = self.input_matrix[..., self.approx.local_size:]
     loc_grad, glob_grad = tt.grad(self.logp_norm.sum(), [
         self.approx.symbolic_random_local_matrix,
         self.approx.symbolic_random_global_matrix
     ],
                                   disconnected_inputs='ignore')
     loc_grad, glob_grad = theano.clone(
         [loc_grad, glob_grad], {
             self.approx.symbolic_random_local_matrix: loc_random,
             self.approx.symbolic_random_global_matrix: glob_random
         })
     return tt.concatenate([loc_grad, glob_grad], axis=-1)
Exemplo n.º 15
0
 def dlogp(self):
     loc_random = self.input_matrix[..., :self.approx.local_size]
     glob_random = self.input_matrix[..., self.approx.local_size:]
     loc_grad, glob_grad = tt.grad(
         self.logp_norm.sum(),
         [self.approx.symbolic_random_local_matrix,
          self.approx.symbolic_random_global_matrix],
         disconnected_inputs='ignore'
     )
     loc_grad, glob_grad = theano.clone(
         [loc_grad, glob_grad],
         {self.approx.symbolic_random_local_matrix: loc_random,
          self.approx.symbolic_random_global_matrix: glob_random}
     )
     return tt.concatenate([loc_grad, glob_grad], axis=-1)