Esempio n. 1
0
    def fuse(self):  # fuse model Conv2d() + BatchNorm2d() layers

        for m in self.model.modules():
            if type(m) is Conv and hasattr(m, 'bn'):
                m.conv = fuse_conv_and_bn(m.conv, m.bn)  # update conv
                delattr(m, 'bn')  # remove batchnorm
                m.forward = m.fuseforward  # update forward
        self.info()
        return self
Esempio n. 2
0
 def fuse(self):  # fuse model Conv2d() + BatchNorm2d() layers
     LOGGER.info('Fusing layers... ')
     for m in self.model.modules():
         if isinstance(m, (Conv, DWConv)) and hasattr(m, 'bn'):
             m.conv = fuse_conv_and_bn(m.conv, m.bn)  # update conv
             delattr(m, 'bn')  # remove batchnorm
             m.forward = m.forward_fuse  # update forward
     self.info()
     return self
Esempio n. 3
0
 def fuse(self):  # fuse model Conv2d() + BatchNorm2d() layers
     for m in self.model.modules():
         if type(m) is Conv:
             m._non_persistent_buffers_set = set(
             )  # pytorch 1.6.0 compatability
             m.conv = fuse_conv_and_bn(m.conv, m.bn)  # update conv
             m.bn = None  # remove batchnorm
             m.forward = m.fuseforward  # update forward
     # self.info()
     return self