def __init__(self): super(MyConv, self).__init__() self.conv = nn.Sequential( exnn.Conv2d(20, 2), exnn.Conv2d(30, 2), exnn.Flatten(), )
def __init__(self): super(Net, self).__init__() self.conv = nn.Sequential(exnn.Conv2d(10, kernel_size=5), nn.MaxPool2d(2), nn.ReLU(), exnn.Conv2d(20, kernel_size=5), nn.Dropout2d(), nn.MaxPool2d(2), nn.ReLU()) self.linear = nn.Sequential(exnn.Linear(320, 50), nn.ReLU(), nn.Dropout(), exnn.Linear(50, 10), nn.LogSoftmax(dim=1))
def conv2d(out_channels, kernel_size, stride): conv = nn.Sequential( exnn.Conv2d(out_channels=out_channels, kernel_size=kernel_size, stride=stride), nn.ReLU()) return conv
def conv2d(out_channels, kernel_size, stride): return exnn.Conv2d(out_channels=out_channels, kernel_size=kernel_size, stride=stride)
def test_cuda_conv2d_with_seq(): net = nn.Sequential(exnn.Conv2d(3, 2)) net = net.to('cuda') x = torch.randn(10, 20, 28, 28).to('cuda') y = net(x) assert list(y.shape) == [10, 3, 27, 27]
def test_cuda_conv2d(): net = exnn.Conv2d(3, 2).to('cuda') x = torch.randn(10, 20, 28, 28).to('cuda') y = net(x) assert list(y.shape) == [10, 3, 27, 27]
def __init__(self): super(MyConv, self).__init__() self.conv = exnn.Conv2d(3, 2)
def __init__(self, out_channels, kernel_size, stride): super(ConvRelu, self).__init__() self.conv = exnn.Conv2d(out_channels=out_channels, kernel_size=kernel_size, stride=stride) self.relu = nn.ReLU()