Ejemplo n.º 1
0
def local_rv_size_lift(fgraph, node):
    """Lift the ``size`` parameter in a ``RandomVariable``.

    In other words, this will broadcast the distribution parameters by adding
    the extra dimensions implied by the ``size`` parameter, and remove the
    ``size`` parameter in the process.

    For example, ``normal(0, 1, size=(1, 2))`` becomes
    ``normal([[0, 0]], [[1, 1]], size=())``.

    """

    if not isinstance(node.op, RandomVariable):
        return

    rng, size, dtype, *dist_params = node.inputs

    dist_params = broadcast_params(dist_params, node.op.ndims_params)

    if get_vector_length(size) > 0:
        dist_params = [
            broadcast_to(p,
                         (tuple(size) +
                          tuple(p.shape)) if node.op.ndim_supp > 0 else size)
            for p in dist_params
        ]
    else:
        return

    new_node = node.op.make_node(rng, None, dtype, *dist_params)

    if config.compute_test_value != "off":
        compute_test_value(new_node)

    return new_node.outputs
Ejemplo n.º 2
0
def lift_rv_shapes(node):
    """Lift `RandomVariable`'s shape-related parameters.

    In other words, this will broadcast the distribution parameters and
    extra dimensions added by the `size` parameter.

    For example, ``normal([0.0, 1.0], 5.0, size=(3, 2))`` becomes
    ``normal([[0., 1.], [0., 1.], [0., 1.]], [[5., 5.], [5., 5.], [5., 5.]])``.

    """

    if not isinstance(node.op, RandomVariable):
        return False

    rng, size, dtype, *dist_params = node.inputs

    dist_params = broadcast_params(dist_params, node.op.ndims_params)

    dist_params = [
        broadcast_to(
            p, (tuple(size) + tuple(p.shape)) if node.op.ndim_supp > 0 else size
        )
        for p in dist_params
    ]

    return node.op.make_node(rng, None, dtype, *dist_params)
Ejemplo n.º 3
0
 def test_avoid_useless_subtensors(self):
     x = scalar()
     y = broadcast_to(x, (1, 2))
     # There shouldn't be any unnecessary `Subtensor` operations
     # (e.g. from `at.as_tensor((1, 2))[0]`)
     assert y.owner.inputs[1].owner is None
     assert y.owner.inputs[2].owner is None
Ejemplo n.º 4
0
    def test_perform(self):
        a = scalar()
        a.tag.test_value = 5

        s_1 = iscalar("s_1")
        s_1.tag.test_value = 4
        shape = (s_1, 1)

        bcast_res = broadcast_to(a, shape)

        assert bcast_res.broadcastable == (False, True)

        bcast_np = np.broadcast_to(5, (4, 1))
        bcast_aet = bcast_res.get_test_value()

        assert np.array_equal(bcast_aet, bcast_np)
        assert np.shares_memory(bcast_aet, a.get_test_value())
Ejemplo n.º 5
0
    def test_inplace(self):
        """Make sure that in-place optimizations are *not* performed on the output of a ``BroadcastTo``."""
        a = aet.zeros((5, ))
        d = aet.vector("d")
        c = aet.set_subtensor(a[np.r_[0, 1, 3]], d)
        b = broadcast_to(c, (5, ))
        q = b[np.r_[0, 1, 3]]
        e = aet.set_subtensor(q, np.r_[0, 0, 0])

        opts = Query(include=["inplace"])
        py_mode = Mode("py", opts)
        e_fn = function([d], e, mode=py_mode)

        advincsub_node = e_fn.maker.fgraph.outputs[0].owner
        assert isinstance(advincsub_node.op, AdvancedIncSubtensor1)
        assert isinstance(advincsub_node.inputs[0].owner.op, BroadcastTo)

        assert advincsub_node.op.inplace is False
Ejemplo n.º 6
0
class TestBroadcastTo(utt.InferShapeTester):

    rng = np.random.RandomState(43)

    def setup_method(self):
        super().setup_method()
        self.op_class = BroadcastTo
        self.op = broadcast_to

    @config.change_flags(compute_test_value="raise")
    def test_perform(self):
        a = scalar()
        a.tag.test_value = 5

        s_1 = iscalar("s_1")
        s_1.tag.test_value = 4
        shape = (s_1, 1)

        bcast_res = broadcast_to(a, shape)

        assert bcast_res.broadcastable == (False, True)

        bcast_np = np.broadcast_to(5, (4, 1))
        bcast_aet = bcast_res.get_test_value()

        assert np.array_equal(bcast_aet, bcast_np)
        assert np.shares_memory(bcast_aet, a.get_test_value())

    @pytest.mark.parametrize(
        "fn,input_dims",
        [
            [lambda x: broadcast_to(x, (1, )), (1, )],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (1, )],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (5, 1)],
            [lambda x: broadcast_to(x, (6, 2, 1, 3)), (2, 1, 3)],
        ],
    )
    def test_gradient(self, fn, input_dims):
        utt.verify_grad(
            fn,
            [np.random.rand(*input_dims).astype(config.floatX)],
            n_tests=1,
            rng=self.rng,
        )

    def test_infer_shape(self):
        a = tensor(config.floatX, [False, True, False])
        shape = list(a.shape)
        out = self.op(a, shape)

        self._compile_and_check(
            [a] + shape,
            [out],
            [np.random.rand(2, 1, 3).astype(config.floatX), 2, 1, 3],
            self.op_class,
        )

        a = tensor(config.floatX, [False, True, False])
        shape = [iscalar() for i in range(4)]
        self._compile_and_check(
            [a] + shape,
            [self.op(a, shape)],
            [np.random.rand(2, 1, 3).astype(config.floatX), 6, 2, 5, 3],
            self.op_class,
        )

    def test_inplace(self):
        """Make sure that in-place optimizations are *not* performed on the output of a ``BroadcastTo``."""
        a = aet.zeros((5, ))
        d = aet.vector("d")
        c = aet.set_subtensor(a[np.r_[0, 1, 3]], d)
        b = broadcast_to(c, (5, ))
        q = b[np.r_[0, 1, 3]]
        e = aet.set_subtensor(q, np.r_[0, 0, 0])

        opts = Query(include=["inplace"])
        py_mode = Mode("py", opts)
        e_fn = function([d], e, mode=py_mode)

        advincsub_node = e_fn.maker.fgraph.outputs[0].owner
        assert isinstance(advincsub_node.op, AdvancedIncSubtensor1)
        assert isinstance(advincsub_node.inputs[0].owner.op, BroadcastTo)

        assert advincsub_node.op.inplace is False
Ejemplo n.º 7
0
 def test_avoid_useless_scalars(self):
     x = scalar()
     y = broadcast_to(x, ())
     assert y is x
Ejemplo n.º 8
0
class TestBroadcastTo(utt.InferShapeTester):

    rng = np.random.RandomState(43)

    def setup_method(self):
        super().setup_method()
        self.op_class = BroadcastTo
        self.op = broadcast_to

    @config.change_flags(compute_test_value="raise")
    def test_perform(self):
        a = scalar()
        a.tag.test_value = 5

        s_1 = iscalar("s_1")
        s_1.tag.test_value = 4
        shape = (s_1, 1)

        bcast_res = broadcast_to(a, shape)

        assert bcast_res.broadcastable == (False, True)

        bcast_np = np.broadcast_to(5, (4, 1))
        bcast_aet = bcast_res.get_test_value()

        assert np.array_equal(bcast_aet, bcast_np)
        assert np.shares_memory(bcast_aet, a.get_test_value())

    @pytest.mark.parametrize(
        "fn,input_dims",
        [
            [lambda x: broadcast_to(x, (1,)), (1,)],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (1,)],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (5, 1)],
            [lambda x: broadcast_to(x, (6, 2, 1, 3)), (2, 1, 3)],
        ],
    )
    def test_gradient(self, fn, input_dims):
        utt.verify_grad(
            fn,
            [np.random.rand(*input_dims).astype(config.floatX)],
            n_tests=1,
            rng=self.rng,
        )

    def test_infer_shape(self):
        a = tensor(config.floatX, [False, True, False])
        shape = list(a.shape)
        out = self.op(a, shape)

        self._compile_and_check(
            [a] + shape,
            [out],
            [np.random.rand(2, 1, 3).astype(config.floatX), 2, 1, 3],
            self.op_class,
        )

        a = tensor(config.floatX, [False, True, False])
        shape = [iscalar() for i in range(4)]
        self._compile_and_check(
            [a] + shape,
            [self.op(a, shape)],
            [np.random.rand(2, 1, 3).astype(config.floatX), 6, 2, 5, 3],
            self.op_class,
        )
Ejemplo n.º 9
0
def test_BroadcastTo(x, shape):
    out = at_extra_ops.broadcast_to(x, shape)
    fgraph = FunctionGraph(outputs=[out])
    compare_jax_and_py(fgraph, [get_test_value(i) for i in fgraph.inputs])