コード例 #1
0
def test_chainer_parameter_grad_setter(shape):
    arr = numpy.full(shape, 17, 'float32')
    chainer_param = chainer.Parameter(arr)

    # Conversion
    torch_param = cpm.ChainerParameter(chainer_param)
    # Initialize grad
    torch_param.requires_grad = True
    optimizer = torch.optim.SGD([torch_param], lr=0.01, momentum=0.9)
    optimizer.zero_grad()

    # Setter
    grad = torch.full(shape, 9, dtype=torch.float32)
    torch_param.grad = grad
    numpy.testing.assert_array_equal(grad, torch_param.grad)
コード例 #2
0
def test_chainer_parameter(shape):
    # initialized parameter
    arr = numpy.full(shape, 17, 'float32')
    chainer_param = chainer.Parameter(arr)

    # Conversion
    torch_param = cpm.ChainerParameter(chainer_param)

    assert isinstance(torch_param, torch.nn.Parameter)
    assert torch_param.shape == shape
    assert (torch_param.data.numpy() == numpy.full(shape, 17, 'float32')).all()

    # Test memory sharing
    new_arr = numpy.random.randint(-4, 4, shape)
    torch_param.data[...] = torch.tensor(new_arr.copy())
    assert (chainer_param.array == new_arr).all()
コード例 #3
0
def test_chainer_parameter_grad_getter(shape):
    arr = numpy.full(shape, 17, 'float32')
    grad = numpy.full(shape, 9, 'float32')
    chainer_param = chainer.Parameter(arr)
    chainer_param.grad = grad.copy()

    # Conversion
    torch_param = cpm.ChainerParameter(chainer_param)

    # Getter
    torch_grad = torch_param.grad

    assert isinstance(torch_grad, torch.Tensor)
    assert (torch_grad.numpy() == grad).all()

    # Test memory sharing
    new_arr = numpy.random.randint(-4, 4, shape)
    torch_grad[...] = torch.tensor(new_arr.copy())
    assert (chainer_param.grad == new_arr).all()
コード例 #4
0
def test_chainer_parameter_uninitialized():
    # Uninitialized parameters are not supported
    chainer_param = chainer.Parameter()

    with pytest.raises(TypeError):
        cpm.ChainerParameter(chainer_param)