def test_channel_shuffle(): x = torch.randn(1, 24, 56, 56) with pytest.raises(AssertionError): # num_channels should be divisible by groups channel_shuffle(x, 7) groups = 3 batch_size, num_channels, height, width = x.size() channels_per_group = num_channels // groups out = channel_shuffle(x, groups) # test the output value when groups = 3 for b in range(batch_size): for c in range(num_channels): c_out = c % channels_per_group * groups + c // channels_per_group for i in range(height): for j in range(width): assert x[b, c, i, j] == out[b, c_out, i, j]
def _inner_forward(x): if self.stride > 1: out = torch.cat((self.branch1(x), self.branch2(x)), dim=1) else: x1, x2 = x.chunk(2, dim=1) out = torch.cat((x1, self.branch2(x2)), dim=1) out = channel_shuffle(out, 2) return out
def _inner_forward(x): if self.stride > 1: out = torch.cat((self.branch1(x), self.branch2(x)), dim=1) else: # Channel Split operation. using these lines of code to replace # ``chunk(x, 2, dim=1)`` can make it easier to deploy a # shufflenetv2 model by using mmdeploy. channels = x.shape[1] c = channels // 2 + channels % 2 x1 = x[:, :c, :, :] x2 = x[:, c:, :, :] out = torch.cat((x1, self.branch2(x2)), dim=1) out = channel_shuffle(out, 2) return out
def _inner_forward(x): residual = x out = self.g_conv_1x1_compress(x) out = self.depthwise_conv3x3_bn(out) if self.groups > 1: out = channel_shuffle(out, self.groups) out = self.g_conv_1x1_expand(out) if self.combine == 'concat': residual = self.avgpool(residual) out = self.act(out) out = self._combine_func(residual, out) else: out = self._combine_func(residual, out) out = self.act(out) return out