コード例 #1
0
ファイル: opt.py プロジェクト: luke14free/Theano-PyMC
def lift_rv_shapes(node):
    """Lift `RandomVariable`'s shape-related parameters.

    In other words, this will broadcast the distribution parameters and
    extra dimensions added by the `size` parameter.

    For example, ``normal([0.0, 1.0], 5.0, size=(3, 2))`` becomes
    ``normal([[0., 1.], [0., 1.], [0., 1.]], [[5., 5.], [5., 5.], [5., 5.]])``.

    """

    if not isinstance(node.op, RandomVariable):
        return False

    rng, size, dtype, *dist_params = node.inputs

    dist_params = broadcast_params(dist_params, node.op.ndims_params)

    dist_params = [
        broadcast_to(p, (tuple(size) +
                         tuple(p.shape)) if node.op.ndim_supp > 0 else size)
        for p in dist_params
    ]

    return node.op.make_node(rng, None, dtype, *dist_params)
コード例 #2
0
    def test_perform(self):
        a = tt.scalar()
        a.tag.test_value = 5

        s_1 = tt.iscalar("s_1")
        s_1.tag.test_value = 4
        shape = (s_1, 1)

        bcast_res = broadcast_to(a, shape)

        assert bcast_res.broadcastable == (False, True)

        bcast_np = np.broadcast_to(5, (4, 1))
        bcast_tt = bcast_res.get_test_value()

        assert np.array_equal(bcast_tt, bcast_np)
        assert np.shares_memory(bcast_tt, a.get_test_value())
コード例 #3
0
class TestBroadcastTo(utt.InferShapeTester):

    rng = np.random.RandomState(43)

    def setup_method(self):
        super().setup_method()
        self.op_class = BroadcastTo
        self.op = broadcast_to

    @config.change_flags(compute_test_value="raise")
    def test_perform(self):
        a = tt.scalar()
        a.tag.test_value = 5

        s_1 = tt.iscalar("s_1")
        s_1.tag.test_value = 4
        shape = (s_1, 1)

        bcast_res = broadcast_to(a, shape)

        assert bcast_res.broadcastable == (False, True)

        bcast_np = np.broadcast_to(5, (4, 1))
        bcast_tt = bcast_res.get_test_value()

        assert np.array_equal(bcast_tt, bcast_np)
        assert np.shares_memory(bcast_tt, a.get_test_value())

    @pytest.mark.parametrize(
        "fn,input_dims",
        [
            [lambda x: broadcast_to(x, (1,)), (1,)],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (1,)],
            [lambda x: broadcast_to(x, (6, 2, 5, 3)), (5, 1)],
            [lambda x: broadcast_to(x, (6, 2, 1, 3)), (2, 1, 3)],
        ],
    )
    def test_gradient(self, fn, input_dims):
        utt.verify_grad(
            fn,
            [np.random.rand(*input_dims).astype(config.floatX)],
            n_tests=1,
            rng=self.rng,
        )

    def test_infer_shape(self):
        a = tt.tensor(config.floatX, [False, True, False])
        shape = list(a.shape)
        out = self.op(a, shape)

        self._compile_and_check(
            [a] + shape,
            [out],
            [np.random.rand(2, 1, 3).astype(config.floatX), 2, 1, 3],
            self.op_class,
        )

        a = tt.tensor(config.floatX, [False, True, False])
        shape = [tt.iscalar() for i in range(4)]
        self._compile_and_check(
            [a] + shape,
            [self.op(a, shape)],
            [np.random.rand(2, 1, 3).astype(config.floatX), 6, 2, 5, 3],
            self.op_class,
        )