Ejemplo n.º 1
0
def test_conv2d():
    """Test 2D custom convolution module

    We compare our custom reflection-padded convolution to the builtin
    Torch convolution.

    We check that:
    - The shape of the output is equal
    - The output values are equal in the center, where the
      reflection-padding should not have influenced the result.

    """

    batch_sz = 5
    in_channels = 5
    kernel_size = 3
    padding_torch = 1
    size = (45, 55)

    # xi: for inplace
    # xc: for normal torch convolution
    xi = torch.randn(batch_sz, in_channels, *size).cuda()
    xc = xi.data.clone()

    output = torch.ones(batch_sz, 1, *size).cuda()
    ci = conv_relu.ConvRelu2dInPlaceModule(output,
                                           in_channels,
                                           1,
                                           kernel_size=kernel_size)
    cc = nn.Sequential(
        nn.Conv2d(in_channels,
                  1,
                  kernel_size=kernel_size,
                  padding=padding_torch),
        nn.ReLU(),
    )

    for c in [ci, *cc.modules()]:
        c.cuda()
        try:
            c.weight.data.fill_(1)
            c.bias.data.zero_()
        except AttributeError:
            pass  # skip relu (it does not have parameters)

    xiv = Variable(xi, requires_grad=True)
    xcv = Variable(xc, requires_grad=True)

    yi = ci(xiv)
    yc = cc(xcv)
    assert yi.shape == yc.shape

    # Check center of output, where the output should be equal.
    d = 1
    yi_ = yi[:, :, d:-d, d:-d]
    yc_ = yc[:, :, d:-d, d:-d]

    # Check that pytorch and own convolution agree in the center:
    assert torch_equal(yi_, yc_)
    assert torch_equal(yi.data, output)
Ejemplo n.º 2
0
def test_conv3d():
    """Test 3D custom convolution module

    We compare our custom reflection-padded convolution to the builtin
    Torch convolution.

    We check that:
    - The shape of the output is equal
    - The output values are equal in the center, where the
      reflection-padding should not have influenced the result.

    """

    batch_sz = 5
    in_channels = 5
    kernel_size = 3
    padding_torch = 1
    size = (13, 7, 19)

    # xi: for inplace
    # xc: for normal torch convolution
    xi = torch.ones(batch_sz, in_channels, *size).cuda()
    xc = torch.ones(batch_sz, in_channels, *size).cuda()

    output = torch.ones(batch_sz, 1, *size).cuda()
    ci = Conv3DModule(output, in_channels, 1, kernel_size=kernel_size)
    cc = nn.Conv3d(in_channels, 1, kernel_size=kernel_size, padding=padding_torch)

    for c in [ci, cc]:
        c.cuda()
        c.weight.data.fill_(1)
        c.bias.data.zero_()

    xi.requires_grad = True
    xc.requires_grad = True

    yi = ci(xi)
    yc = cc(xc)
    assert yi.shape == yc.shape

    # Check center of output, where the output should be equal.
    d = 1
    yi_ = yi[:, :, d:-d, d:-d, d:-d]
    yc_ = yc[:, :, d:-d, d:-d, d:-d]

    # Check that pytorch and own convolution agree in the center:
    assert torch_equal(yi_, yc_)
    assert torch_equal(yi.data, output)
Ejemplo n.º 3
0
#%%
import os, sys
class_path = os.path.dirname(os.path.abspath('__file__')) + "/make_class/"
class_path
# '/Users/user/Projects/neural_tutorial'
#%%
sys.path.insert(1, class_path)
print('module path was inserted.')

#%%
import CustomClass as cc
print(cc.introduce())

#%%
from CustomClass import CustomClass as cc
mycc = cc()  ## jupyter 에서는 print 없이 출력가능
mycc.plus(1, 1)

#%%
from CustomClass import Mathmatics as math
math().square(2, 2, debug=True)