def worker():
     rank = dist.get_rank()
     if rank in ranks:
         group = dist.new_group(ranks)
         assert group.size == 2
         assert group.key == "2,0"
         assert group.rank == ranks.index(rank)
         dt = get_default_device()[:-1]
         assert group.comp_node == "{}{}:2".format(dt, rank)
Beispiel #2
0
 def worker(val, shape):
     rank = dist.get_rank()
     if rank == 0:  # remote send
         x = tensor(val, device="xpu0")
         remote_send(x, 1)
         sync()
     else:  # remote recv
         y = remote_recv(0)
         assert y.device == get_default_device()
         np.testing.assert_almost_equal(val, y.numpy())
Beispiel #3
0
        )
        invsqrt_channel_var = f("**", f("+", channel_var, eps), c(-0.5))
        inv_var_wt = f("*", invsqrt_channel_var, weight)
        neg_channel_mean = f("-", channel_mean)
        outvar = f(
            "fma3",
            input,
            inv_var_wt,
            f("fma3", neg_channel_mean, inv_var_wt, bias),
        )
        return (outvar, ), (True, )

    return batch_norm_nd


@pytest.mark.parametrize("device", [get_default_device(), "cpux"])
@pytest.mark.parametrize("batch_size", [1, 8])
@pytest.mark.parametrize("channels", [3])
@pytest.mark.parametrize("use_trace, symbolic", [(False, None), (True, False),
                                                 (True, True)])
@pytest.mark.parametrize("gopt_level", [None, 1, 2])
@pytest.mark.parametrize("dtype", ["float32"])
def test_subgraph(device, batch_size, channels, use_trace, symbolic,
                  gopt_level, dtype):
    device = CompNode(device)

    def subgraph_batch_norm(inp, weight, bias, eps, diff):
        inp = inp.detach()
        with GradManager().attach(inp) as gm:
            batch_norm_fn = _get_batch_norm_fn(dtype,
                                               device,