def test_bcast_semantics(): inputs = [ paddle.to_tensor( np.ones(shape=[1, 3, 1, 1], dtype='float32', name="x9")), paddle.to_tensor( np.ones(shape=[1, 8, 1, 1], dtype='float32', name="x10")) ] paddle.broadcast_tensors(inputs)
def test_type(): inputs = [ paddle.to_tensor( np.ones(shape=[1, 1, 1, 1], dtype='float32', name="x4")), paddle.to_tensor( np.ones(shape=[1, 4, 1, 1], dtype='float64', name="x5")) ] paddle.broadcast_tensors(inputs)
def test_dtype(): inputs = [ paddle.to_tensor( np.ones(shape=[1, 1, 1, 1], dtype='int8', name="x6")), paddle.to_tensor( np.ones(shape=[1, 4, 1, 1], dtype='int8', name="x7")) ] paddle.broadcast_tensors(inputs)
def test_dtype(): inputs = [ paddle.fluid.layers.data(shape=[1, 1, 1, 1], dtype='int8', name="x6"), paddle.fluid.layers.data(shape=[1, 4, 1, 1], dtype='int8', name="x7") ] paddle.broadcast_tensors(inputs)
def test_bcast_semantics(): inputs = [ paddle.fluid.layers.data(shape=[1, 3, 1, 1], dtype='float32', name="x9"), paddle.fluid.layers.data(shape=[1, 8, 1, 1], dtype='float32', name="x10") ] paddle.broadcast_tensors(inputs)
def test_type(): inputs = [ paddle.fluid.layers.data(shape=[1, 1, 1, 1], dtype='float32', name="x4"), paddle.fluid.layers.data(shape=[1, 4, 1, 1], dtype='float64', name="x5") ] paddle.broadcast_tensors(inputs)
def test_static(): inputs = [ paddle.fluid.layers.data(shape=[4, 1, 4, 1], dtype='float32', name="x0"), paddle.fluid.layers.data(shape=[1, 4, 1, 4], dtype='float32', name="x1") ] paddle.broadcast_tensors(inputs)
def test_dynamic(): paddle.disable_static() try: inputs = [ paddle.to_tensor( np.random.random([4, 1, 4, 1]).astype("float32")), paddle.to_tensor( np.random.random([1, 4, 1, 4]).astype("float32")) ] paddle.broadcast_tensors(inputs) finally: paddle.enable_static()
def __init__(self, alpha, beta): if isinstance(alpha, numbers.Real): alpha = paddle.full(shape=[1], fill_value=alpha) if isinstance(beta, numbers.Real): beta = paddle.full(shape=[1], fill_value=beta) self.alpha, self.beta = paddle.broadcast_tensors([alpha, beta]) self._dirichlet = Dirichlet(paddle.stack([self.alpha, self.beta], -1)) super(Beta, self).__init__(self._dirichlet._batch_shape)
def log_prob(self, value): """probability mass function evaluated at value Args: value (Tensor): value to be evaluated. Returns: Tensor: probability of value. """ if paddle.is_integer(value): value = paddle.cast(value, self.probs.dtype) logits, value = paddle.broadcast_tensors( [paddle.log(self.probs), value]) logits[(value == 0) & (paddle.isinf(logits))] = 0 return (paddle.lgamma(value.sum(-1) + 1) - paddle.lgamma(value + 1).sum(-1) + (value * logits).sum(-1))