Exemple #1
0
 def fuse(self):  # fuse model Conv2d() + BatchNorm2d() layers
     print('Fusing layers... ')
     for m in self.model.modules():
         if type(m) is Conv:
             m._non_persistent_buffers_set = set(
             )  # pytorch 1.6.0 compatability
             m.conv = fuse_conv_and_bn(m.conv, m.bn)  # update conv
             m.bn = None  # remove batchnorm
             m.forward = m.fuseforward  # update forward
     self.info()
     return self
Exemple #2
0
 def fuse(self):
     # Fuse Conv2d + BatchNorm2d layers throughout model
     fused_list = nn.ModuleList()
     for a in list(self.children())[0]:
         if isinstance(a, nn.Sequential):
             for i, b in enumerate(a):
                 if isinstance(b, nn.modules.batchnorm.BatchNorm2d):
                     # fuse this bn layer with the previous conv2d layer
                     conv = a[i - 1]
                     fused = torch_utils.fuse_conv_and_bn(conv, b)
                     a = nn.Sequential(fused, *list(a.children())[i + 1:])
                     break
         fused_list.append(a)
     self.module_list = fused_list
Exemple #3
0
 def fuse(self):
     # Fuse Conv2d + BatchNorm2d layers throughout model
     print('Fusing layers...')
     fused_list = nn.ModuleList()
     for a in list(self.children())[0]:
         if isinstance(a, nn.Sequential):
             for i, b in enumerate(a):
                 if isinstance(b, nn.modules.batchnorm.BatchNorm2d):
                     # fuse this bn layer with the previous conv2d layer
                     conv = a[i - 1]
                     fused = torch_utils.fuse_conv_and_bn(conv, b)
                     a = nn.Sequential(fused, *list(a.children())[i + 1:])
                     break
         fused_list.append(a)
     self.module_list = fused_list
     self.info() if not ONNX_EXPORT else None  # yolov3-spp reduced from 225 to 152 layers
Exemple #4
0
        print('Remove Focus...')
        old_focus = model.model[0]

        # new focus
        conv1_out_channels = 0
        for k, v in old_focus.named_parameters():
            if k == 'conv.conv.bias':
                conv1_out_channels = v.shape[0]

        assert conv1_out_channels > 0
        new_focus = FocusNew(3, conv1_out_channels, 3)

        # fuse conv and bn
        for m in new_focus.modules():
            if type(m) is Conv and hasattr(m, 'bn'):
                m.conv = fuse_conv_and_bn(m.conv, m.bn)  # update conv
                delattr(m, 'bn')  # remove batchnorm
                m.forward = m.fuseforward  # update forward

        tt = str(FocusNew)[8:-2].replace('__main__.', '')  # module type
        npp = sum([x.numel() for x in new_focus.parameters()])  # number params
        new_focus.i, new_focus.f, new_focus.type, new_focus.np = 0, -1, tt, npp  # attach index, 'from' index, type, number params

        # copy weights , include bn.moving.var ..
        focus_weights = dict()
        for param_tensor in old_focus.state_dict():
            focus_weights[param_tensor] = old_focus.state_dict()[param_tensor]

        # for k, v in model.named_parameters():
        #     if k == "model.0.conv.conv.weight":
        #         focus_weights["conv.conv.weight"] = v
Exemple #5
0
def fuse(model):
    for m in model.modules():
        if type(m) is Conv:
            m.conv = torch_utils.fuse_conv_and_bn(m.conv, m.bn)  # update conv
            m.bn = None  # remove batchnorm
            m.forward = m.fuseforward  # update forward