コード例 #1
0
ファイル: utils.py プロジェクト: pyzeon/pymc3-hmm
def tt_broadcast_arrays(*args: TensorVariable):
    """Broadcast any number of arrays against each other.

    Parameters
    ----------
    `*args` : array_likes
        The arrays to broadcast.

    """
    bcast_shape = broadcast_shape(*args)
    return tuple(at_broadcast_to(a, bcast_shape) for a in args)
コード例 #2
0
ファイル: test_extra_ops.py プロジェクト: lucianopaz/aesara
def test_broadcast_shape_symbolic(s1_vals, s2_vals, exp_res):
    s1s = aet.lscalars(len(s1_vals))
    eval_point = {}
    for s, s_val in zip(s1s, s1_vals):
        eval_point[s] = s_val
        s.tag.test_value = s_val

    s2s = aet.lscalars(len(s2_vals))
    for s, s_val in zip(s2s, s2_vals):
        eval_point[s] = s_val
        s.tag.test_value = s_val

    res = broadcast_shape(s1s, s2s, arrays_are_shapes=True)
    res = aet.as_tensor(res)

    assert tuple(res.eval(eval_point)) == exp_res
コード例 #3
0
ファイル: test_extra_ops.py プロジェクト: blueskysir/aesara
def test_broadcast_shape():
    def shape_tuple(x, use_bcast=True):
        if use_bcast:
            return tuple(s if not bcast else 1
                         for s, bcast in zip(tuple(x.shape), x.broadcastable))
        else:
            return tuple(s for s in tuple(x.shape))

    x = np.array([[1], [2], [3]])
    y = np.array([4, 5, 6])
    b = np.broadcast(x, y)
    x_aet = aet.as_tensor_variable(x)
    y_aet = aet.as_tensor_variable(y)
    b_aet = broadcast_shape(x_aet, y_aet)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    # Now, we try again using shapes as the inputs
    #
    # This case also confirms that a broadcast dimension will
    # broadcast against a non-broadcast dimension when they're
    # both symbolic (i.e. we couldn't obtain constant values).
    b_aet = broadcast_shape(
        shape_tuple(x_aet, use_bcast=False),
        shape_tuple(y_aet, use_bcast=False),
        arrays_are_shapes=True,
    )
    assert any(
        isinstance(node.op, Assert)
        for node in applys_between([x_aet, y_aet], b_aet))
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    b_aet = broadcast_shape(shape_tuple(x_aet),
                            shape_tuple(y_aet),
                            arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    # These are all constants, so there shouldn't be any asserts in the
    # resulting graph.
    assert not any(
        isinstance(node.op, Assert)
        for node in applys_between([x_aet, y_aet], b_aet))

    x = np.array([1, 2, 3])
    y = np.array([4, 5, 6])
    b = np.broadcast(x, y)
    x_aet = aet.as_tensor_variable(x)
    y_aet = aet.as_tensor_variable(y)
    b_aet = broadcast_shape(x_aet, y_aet)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    b_aet = broadcast_shape(shape_tuple(x_aet),
                            shape_tuple(y_aet),
                            arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    # TODO: This will work when/if we use a more sophisticated `is_same_graph`
    # implementation.
    # assert not any(
    #     isinstance(node.op, Assert)
    #     for node in graph_ops([x_aet, y_aet], b_aet)
    # )

    x = np.empty((1, 2, 3))
    y = np.array(1)
    b = np.broadcast(x, y)
    x_aet = aet.as_tensor_variable(x)
    y_aet = aet.as_tensor_variable(y)
    b_aet = broadcast_shape(x_aet, y_aet)
    assert b_aet[0].value == 1
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    assert not any(
        isinstance(node.op, Assert)
        for node in applys_between([x_aet, y_aet], b_aet))
    b_aet = broadcast_shape(shape_tuple(x_aet),
                            shape_tuple(y_aet),
                            arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)

    x = np.empty((2, 1, 3))
    y = np.empty((2, 1, 1))
    b = np.broadcast(x, y)
    x_aet = aet.as_tensor_variable(x)
    y_aet = aet.as_tensor_variable(y)
    b_aet = broadcast_shape(x_aet, y_aet)
    assert b_aet[1].value == 1
    assert np.array_equal([z.eval() for z in b_aet], b.shape)
    # TODO: This will work when/if we use a more sophisticated `is_same_graph`
    # implementation.
    # assert not any(
    #     isinstance(node.op, Assert)
    #     for node in graph_ops([x_aet, y_aet], b_aet)
    # )
    b_aet = broadcast_shape(shape_tuple(x_aet),
                            shape_tuple(y_aet),
                            arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_aet], b.shape)

    x1_shp_aet = iscalar("x1")
    x2_shp_aet = iscalar("x2")
    y1_shp_aet = iscalar("y1")
    x_shapes = (1, x1_shp_aet, x2_shp_aet)
    x_aet = aet.ones(x_shapes)
    y_shapes = (y1_shp_aet, 1, x2_shp_aet)
    y_aet = aet.ones(y_shapes)
    b_aet = broadcast_shape(x_aet, y_aet)
    # TODO: This will work when/if we use a more sophisticated `is_same_graph`
    # implementation.
    # assert not any(
    #     isinstance(node.op, Assert)
    #     for node in graph_ops([x_aet, y_aet], b_aet)
    # )
    res = aet.as_tensor(b_aet).eval({
        x1_shp_aet: 10,
        x2_shp_aet: 4,
        y1_shp_aet: 2,
    })
    assert np.array_equal(res, (2, 10, 4))

    y_shapes = (y1_shp_aet, 1, y1_shp_aet)
    y_aet = aet.ones(y_shapes)
    b_aet = broadcast_shape(x_aet, y_aet)
    assert isinstance(b_aet[-1].owner.op, Assert)
コード例 #4
0
def test_broadcast_shape_basic():
    def shape_tuple(x, use_bcast=True):
        if use_bcast:
            return tuple(
                s if not bcast else 1
                for s, bcast in zip(tuple(x.shape), x.broadcastable)
            )
        else:
            return tuple(s for s in tuple(x.shape))

    x = np.array([[1], [2], [3]])
    y = np.array([4, 5, 6])
    b = np.broadcast(x, y)
    x_at = at.as_tensor_variable(x)
    y_at = at.as_tensor_variable(y)
    b_at = broadcast_shape(x_at, y_at)
    assert np.array_equal([z.eval() for z in b_at], b.shape)
    # Now, we try again using shapes as the inputs
    #
    # This case also confirms that a broadcast dimension will
    # broadcast against a non-broadcast dimension when they're
    # both symbolic (i.e. we couldn't obtain constant values).
    b_at = broadcast_shape(
        shape_tuple(x_at, use_bcast=False),
        shape_tuple(y_at, use_bcast=False),
        arrays_are_shapes=True,
    )
    assert any(
        isinstance(node.op, Assert) for node in applys_between([x_at, y_at], b_at)
    )
    assert np.array_equal([z.eval() for z in b_at], b.shape)
    b_at = broadcast_shape(shape_tuple(x_at), shape_tuple(y_at), arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_at], b.shape)

    x = np.array([1, 2, 3])
    y = np.array([4, 5, 6])
    b = np.broadcast(x, y)
    x_at = at.as_tensor_variable(x)
    y_at = at.as_tensor_variable(y)
    b_at = broadcast_shape(x_at, y_at)
    assert np.array_equal([z.eval() for z in b_at], b.shape)
    b_at = broadcast_shape(shape_tuple(x_at), shape_tuple(y_at), arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_at], b.shape)

    x = np.empty((1, 2, 3))
    y = np.array(1)
    b = np.broadcast(x, y)
    x_at = at.as_tensor_variable(x)
    y_at = at.as_tensor_variable(y)
    b_at = broadcast_shape(x_at, y_at)
    assert b_at[0].value == 1
    assert np.array_equal([z.eval() for z in b_at], b.shape)
    b_at = broadcast_shape(shape_tuple(x_at), shape_tuple(y_at), arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_at], b.shape)

    x = np.empty((2, 1, 3))
    y = np.empty((2, 1, 1))
    b = np.broadcast(x, y)
    x_at = at.as_tensor_variable(x)
    y_at = at.as_tensor_variable(y)
    b_at = broadcast_shape(x_at, y_at)
    assert b_at[1].value == 1
    assert np.array_equal([z.eval() for z in b_at], b.shape)
    b_at = broadcast_shape(shape_tuple(x_at), shape_tuple(y_at), arrays_are_shapes=True)
    assert np.array_equal([z.eval() for z in b_at], b.shape)

    x1_shp_at = iscalar("x1")
    x2_shp_at = iscalar("x2")
    y1_shp_at = iscalar("y1")
    x_shapes = (1, x1_shp_at, x2_shp_at)
    x_at = at.ones(x_shapes)
    y_shapes = (y1_shp_at, 1, x2_shp_at)
    y_at = at.ones(y_shapes)
    b_at = broadcast_shape(x_at, y_at)
    res = at.as_tensor(b_at).eval(
        {
            x1_shp_at: 10,
            x2_shp_at: 4,
            y1_shp_at: 2,
        }
    )
    assert np.array_equal(res, (2, 10, 4))

    y_shapes = (y1_shp_at, 1, y1_shp_at)
    y_at = at.ones(y_shapes)
    b_at = broadcast_shape(x_at, y_at)
    assert isinstance(b_at[-1].owner.op, Assert)