示例#1
0
 def __init__(self):
     super(BasicStem, self).__init__(
         ReplicationPad3d((3, 3, 3, 3, 1, 1)),
         nn.Conv3d(3, 64, kernel_size=(3, 7, 7), stride=(1, 2, 2),
                   padding=(0, 0, 0), bias=False),
         nn.BatchNorm3d(64),
         nn.ReLU(inplace=True))
示例#2
0
 def __init__(self):
     super(BasicStem2D, self).__init__(
         ReplicationPad3d((3, 3, 3, 3, 0, 0)),
         nn.Conv3d(3, 64, kernel_size=(1, 7, 7), stride=(1, 2, 2),
                   padding=(0, 0, 0), bias=False),
         nn.BatchNorm3d(64), # HACK maybe don't use 3d batch norm? to make life harder
         nn.ReLU(inplace=True))
示例#3
0
    def __init__(self, denselayer2d, inflate_convs=False):
        super(_DenseLayer3d, self).__init__()

        self.inflate_convs = inflate_convs
        for name, child in denselayer2d.named_children():
            if isinstance(child, torch.nn.BatchNorm2d):
                self.add_module(name, inflate.inflate_batch_norm(child))
            elif isinstance(child, torch.nn.ReLU):
                self.add_module(name, child)
            elif isinstance(child, torch.nn.Conv2d):
                kernel_size = child.kernel_size[0]
                if inflate_convs and kernel_size > 1:
                    # Pad input in the time dimension
                    assert kernel_size % 2 == 1, 'kernel size should be\
                            odd be got {}'.format(kernel_size)
                    pad_size = int(kernel_size / 2)
                    pad_time = ReplicationPad3d(
                        (0, 0, 0, 0, pad_size, pad_size))
                    self.add_module('padding.1', pad_time)
                    # Add time dimension of same dim as the space one
                    self.add_module(name,
                                    inflate.inflate_conv(child, kernel_size))
                else:
                    self.add_module(name, inflate.inflate_conv(child, 1))
            else:
                raise ValueError('{} is not among handled layer types'.format(
                    type(child)))
        self.drop_rate = denselayer2d.drop_rate
示例#4
0
 def __init__(self,
              in_planes,
              out_planes,
              midplanes,
              stride=1,
              padding=1):
     super(Conv2Plus1D, self).__init__(
         ReplicationPad3d((padding, padding, padding, padding, 0, 0)),
         nn.Conv3d(in_planes, midplanes, kernel_size=(1, 3, 3),
                   stride=(1, stride, stride), padding=(0, padding, padding),
                   bias=False),
         nn.BatchNorm3d(midplanes),
         nn.ReLU(inplace=True),
         ReplicationPad3d((0, 0, 0, 0, padding, padding)),
         nn.Conv3d(midplanes, out_planes, kernel_size=(3, 1, 1),
                   stride=(stride, 1, 1), padding=(padding, 0, 0),
                   bias=False))
示例#5
0
 def __init__(self,
              in_planes,
              out_planes,
              midplanes=None,
              stride=1,
              padding=1):
     super(Conv3DSimplePad, self).__init__()
     self.pieces = nn.Sequential(
         ReplicationPad3d(
             (padding, padding, padding, padding, padding, padding)),
         Conv3DSimple(in_planes, out_planes, midplanes, stride, (0, 0, 0)))
示例#6
0
 def __init__(self,
              in_planes,
              out_planes,
              midplanes=None,
              stride=1,
              padding=1):
     super(Conv3DNoTemporalPad, self).__init__()
     self.pieces = nn.Sequential(
         ReplicationPad3d((padding, padding, padding, padding, 0, 0)),
         nn.Conv3d(in_channels=in_planes,
                   out_channels=out_planes,
                   kernel_size=(1, 3, 3),
                   stride=(1, stride, stride),
                   padding=(0, 0, 0),
                   bias=False))
示例#7
0
 def __init__(self, transition2d, inflate_conv=False):
     """
     Inflates transition layer from transition2d
     """
     super(_Transition3d, self).__init__()
     for name, layer in transition2d.named_children():
         if isinstance(layer, torch.nn.BatchNorm2d):
             self.add_module(name, inflate.inflate_batch_norm(layer))
         elif isinstance(layer, torch.nn.ReLU):
             self.add_module(name, layer)
         elif isinstance(layer, torch.nn.Conv2d):
             if inflate_conv:
                 pad_time = ReplicationPad3d((0, 0, 0, 0, 1, 1))
                 self.add_module('padding.1', pad_time)
                 self.add_module(name, inflate.inflate_conv(layer, 3))
             else:
                 self.add_module(name, inflate.inflate_conv(layer, 1))
         elif isinstance(layer, torch.nn.AvgPool2d):
             self.add_module(name, inflate.inflate_pool(layer, 2))
         else:
             raise ValueError('{} is not among handled layer types'.format(
                 type(layer)))