Exemple #1
0
def residual_block(m, a, b, leakiness=0.01, dimensions=2):
    """
    append to a sequence module:
    produce output of [identity,3x3+3x3] then add together

    inputs
    ------
    m [scn.Sequential module] network to add layers to
    a [int]: number of input channels
    b [int]: number of output channels
    leakiness [float]: leakiness of ReLU activations
    dimensions [int]: dimensions of input sparse tensor

    modifies
    --------
    m: adds layers
    """
    m.add(scn.ConcatTable().add(scn.Identity(
    ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
        scn.Sequential().add(scn.BatchNormLeakyReLU(
            a, leakiness=leakiness)).add(
                scn.SubmanifoldConvolution(dimensions, a, b, 3, False)).add(
                    scn.BatchNormLeakyReLU(b, leakiness=leakiness)).add(
                        scn.SubmanifoldConvolution(
                            dimensions, b, b, 3, False)))).add(scn.AddTable())
Exemple #2
0
 def bar(nPlanes, bias):
     m = scn.Sequential()
     m.add(scn.BatchNormReLU(nPlanes))
     m.add(scn.NetworkInNetwork(
         nPlanes, nClasses,
         bias))  #accumulte softmax input, only one set of biases
     return m
Exemple #3
0
    def _make_transpose(self, transblock, planes, blocks, stride=1):

        upsample = None
        if stride != 1:
            upsample = scn.Sequential(
                scn.SparseToDense(2,self.inplanes * transblock.expansion),
                nn.ConvTranspose2d(self.inplanes * transblock.expansion, planes,
                                  kernel_size=2, stride=stride, padding=0, bias=False),
                scn.DenseToSparse(2),
                scn.BatchNormalization(planes)                
            )            
        elif self.inplanes * transblock.expansion != planes:
            upsample = scn.Sequential(
                scn.NetworkInNetwork(self.inplanes * transblock.expansion, planes, False),
                scn.BatchNormalization(planes)
            )

        layers = []
        
        for i in range(1, blocks):
            layers.append(transblock(self.inplanes, self.inplanes * transblock.expansion))

        layers.append(transblock(self.inplanes, planes, stride, upsample))
        self.inplanes = planes // transblock.expansion

        return scn.Sequential(*layers)
Exemple #4
0
    def block(self,
              m,
              a,
              b,
              dimension=3,
              residual_blocks=False,
              leakiness=0,
              kernel_size=3,
              use_batch_norm=True):  # default using residual_block
        if use_batch_norm:
            Activation = lambda channels: scn.BatchNormLeakyReLU(
                channels, leakiness=leakiness)
        else:
            Activation = lambda channels: scn.LeakyReLU(leakiness)

        if residual_blocks:  #ResNet style blocks
            m.add(scn.ConcatTable().add(scn.Identity(
            ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
                scn.Sequential().add(Activation(a)).add(
                    scn.SubmanifoldConvolution(dimension, a, b, kernel_size,
                                               False)).add(Activation(b)).add(
                                                   scn.SubmanifoldConvolution(
                                                       dimension, b, b,
                                                       kernel_size,
                                                       False)))).add(
                                                           scn.AddTable())
        else:  #VGG style blocks
            m.add(scn.Sequential().add(Activation(a)).add(
                scn.SubmanifoldConvolution(dimension, a, b, kernel_size,
                                           False)))
 def residual(nIn, nOut, stride):
     if stride > 1:
         return scn.Convolution(dimension, nIn, nOut, 3, stride, False)
     elif nIn != nOut:
         return scn.NetworkInNetwork(nIn, nOut, False)
     else:
         return scn.Identity()
Exemple #6
0
 def block(m, a, b):
     if residual_blocks:  #ResNet style blocks
         m.add(scn.ConcatTable().add(scn.Identity(
         ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
             scn.Sequential().add(
                 scn.BatchNormLeakyReLU(
                     a,
                     momentum=bn_momentum,
                     leakiness=leakiness,
                     track_running_stats=track_running_stats)
             ).add(scn.SubmanifoldConvolution(
                 dimension, a, b, 3, False)).add(
                     scn.BatchNormLeakyReLU(
                         b,
                         momentum=bn_momentum,
                         leakiness=leakiness,
                         track_running_stats=track_running_stats)).add(
                             scn.SubmanifoldConvolution(
                                 dimension, b, b, 3,
                                 False)))).add(scn.AddTable())
     else:  #VGG style blocks
         m.add(scn.Sequential().add(
             scn.BatchNormLeakyReLU(
                 a,
                 momentum=bn_momentum,
                 leakiness=leakiness,
                 track_running_stats=track_running_stats)).add(
                     scn.SubmanifoldConvolution(dimension, a, b, 3,
                                                False)))
     operation = {'kernel': [1, 1, 1], 'stride': [1, 1, 1]}
     return operation
Exemple #7
0
    def _make_skip_layer(self, inplanes, planes):

        layers = scn.Sequential(
            scn.NetworkInNetwork(inplanes, planes, False),
            scn.BatchNormReLU(planes)
        )
        return layers
Exemple #8
0
 def block(self, n_in, n_out):
     m = scn.Sequential()
     if self.residual_blocks:  # ResNet style blocks
         m.add(scn.ConcatTable().add(
             scn.Identity() if n_in ==
             n_out else scn.NetworkInNetwork(n_in, n_out, False)).add(
                 scn.Sequential().add(
                     scn.BatchNormLeakyReLU(
                         n_in, leakiness=self.leakiness)).add(
                             scn.SubmanifoldConvolution(
                                 self.dimension, n_in, n_out, 3,
                                 False)).add(
                                     scn.BatchNormLeakyReLU(
                                         n_out,
                                         leakiness=self.leakiness)).add(
                                             scn.SubmanifoldConvolution(
                                                 self.dimension, n_out,
                                                 n_out, 3, False))))
         m.add(scn.AddTable())
     else:  # VGG style blocks
         m.add(scn.BatchNormLeakyReLU(n_in, leakiness=self.leakiness))
         m.add(
             scn.SubmanifoldConvolution(self.dimension, n_in, n_out, 3,
                                        False))
     return m
Exemple #9
0
def res(m, dimension, a, b):
    m.add(scn.ConcatTable()
          .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False))
          .add(scn.Sequential()
               .add(scn.BatchNormReLU(a))
               .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False))
               .add(scn.BatchNormReLU(b))
               .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False))))\
     .add(scn.AddTable())
 def block(m, a, b):  # ResNet style blocks
     m.add(scn.ConcatTable()
           .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False))
           .add(scn.Sequential()
             .add(scn.BatchNormLeakyReLU(a, leakiness=leakiness))
             .add(scn.SubmanifoldConvolution(self._dimension, a, b, 3, False))
             .add(scn.BatchNormLeakyReLU(b, leakiness=leakiness))
             .add(scn.SubmanifoldConvolution(self._dimension, b, b, 3, False)))
      ).add(scn.AddTable())
Exemple #11
0
 def __init__(self, inplanes, planes, stride=1, upsample=None, **kwargs):
     super(TransBottleneckSparse, self).__init__()
     self.conv1 = scn.NetworkInNetwork(inplanes * 4, inplanes, False)
     self.bn1 = scn.BatchNormReLU(inplanes)
     if upsample is not None and stride != 1:
         self.conv2 = scn.Sequential(
             scn.SparseToDense(2,inplanes),
             nn.ConvTranspose2d(inplanes, inplanes,
                               kernel_size=2, stride=stride, padding=0,
                               output_padding=0, bias=False),
             scn.DenseToSparse(2)
         )
     else:
         self.conv2 = conv3x3_sparse(inplanes, inplanes, stride)
     self.bn2 = scn.BatchNormReLU(inplanes)
     self.conv3 = scn.NetworkInNetwork(inplanes, planes, False)
     self.bn3 = scn.BatchNormalization(planes)
     self.relu = scn.ReLU()
     self.add = scn.AddTable()
     self.upsample = upsample
     self.stride = stride
Exemple #12
0
 def f(m, a, b):
     m.add(scn.ConcatTable().add(scn.Identity(
     ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add(
         scn.Sequential().add(norm_layer(
             a, leakiness=self.leakiness)).add(
                 scn.SubmanifoldConvolution(
                     self.dimension, a, b, 3, self.allow_bias)).add(
                         norm_layer(b, leakiness=self.leakiness)).add(
                             scn.SubmanifoldConvolution(
                                 self.dimension, b, b, 3,
                                 self.allow_bias)))).add(scn.AddTable())
     return m
Exemple #13
0
 def __init__(self, transblock, layers, num_classes=150):
     self.inplanes = 512
     super(ResNetTransposeSparse, self).__init__()
     
     self.dense_to_sparse = scn.DenseToSparse(2)
     self.add = AddSparseDense()
     
     self.deconv1 = self._make_transpose(transblock, 256 * transblock.expansion, layers[0], stride=2)
     self.deconv2 = self._make_transpose(transblock, 128 * transblock.expansion, layers[1], stride=2)
     self.deconv3 = self._make_transpose(transblock, 64 * transblock.expansion, layers[2], stride=2)
     self.deconv4 = self._make_transpose(transblock, 64 * transblock.expansion, layers[3], stride=2)
     
     self.skip0 = self._make_skip_layer(128, 64 * transblock.expansion)
     self.skip1 = self._make_skip_layer(256, 64 * transblock.expansion)
     self.skip2 = self._make_skip_layer(512, 128 * transblock.expansion)
     self.skip3 = self._make_skip_layer(1024, 256 * transblock.expansion)
     self.skip4 = self._make_skip_layer(2048, 512 * transblock.expansion)
     
     self.densify0 = scn.SparseToDense(2, 64 * transblock.expansion)
     self.densify1 = scn.SparseToDense(2, 64 * transblock.expansion)
     self.densify2 = scn.SparseToDense(2, 128 * transblock.expansion)
     self.densify3 = scn.SparseToDense(2, 256 * transblock.expansion)
     
     self.inplanes = 64
     self.final_conv = self._make_transpose(transblock, 64 * transblock.expansion, 3)
     
     self.final_deconv = scn.Sequential(
             scn.SparseToDense(2, self.inplanes * transblock.expansion),
             nn.ConvTranspose2d(self.inplanes * transblock.expansion, num_classes, kernel_size=2,
                                            stride=2, padding=0, bias=True)
         )            
     
     self.out6_conv = nn.Conv2d(2048, num_classes, kernel_size=1, stride=1, bias=True)
     self.out5_conv = scn.NetworkInNetwork(256 * transblock.expansion, num_classes, True)
     self.out4_conv = scn.NetworkInNetwork(128 * transblock.expansion, num_classes, True)
     self.out3_conv = scn.NetworkInNetwork(64 * transblock.expansion, num_classes, True)
     self.out2_conv = scn.NetworkInNetwork(64 * transblock.expansion, num_classes, True)
     
     self.sparse_to_dense = scn.SparseToDense(2, num_classes)
 def block(m, a, b):
     if residual_blocks:  #ResNet style blocks
         m.add(scn.ConcatTable().add(scn.Identity(
         ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
             scn.Sequential().add(scn.BatchNormReLU(a)).add(
                 scn.SubmanifoldConvolution(dimension, a, b, 3, False)).add(
                     scn.BatchNormReLU(b)).add(
                         scn.SubmanifoldConvolution(dimension, b, b, 3,
                                                    False)))).add(
                                                        scn.AddTable())
     else:  #VGG style blocks
         m.add(scn.Sequential().add(scn.BatchNormReLU(a)).add(
             scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
Exemple #15
0
 def block(self, m, a, b, dimension=3, residual_blocks=False, leakiness=0):  # default using residual_block
     if residual_blocks: #ResNet style blocks
         m.add(scn.ConcatTable()
               .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False))
               .add(scn.Sequential()
                 .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness))
                 .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False))
                 .add(scn.BatchNormLeakyReLU(b,leakiness=leakiness))
                 .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False)))
          ).add(scn.AddTable())
     else: #VGG style blocks
         m.add(scn.Sequential()
              .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness))
              .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
Exemple #16
0
 def _nin_block(self, module, a, b):
     '''
     Utility Method for attaching feature dimension reducing
     BN + NetworkInNetwork blocks.
     INPUTS:
         - module (scn Module): network module to attach ResNet block.
         - a (int): number of input feature dimension
         - b (int): number of output feature dimension
     RETURNS:
         None (operation is in-place)
     '''
     module.add(scn.Sequential().add(
         scn.BatchNormLeakyReLU(a, leakiness=self.leakiness)).add(
             scn.NetworkInNetwork(a, b, self.allow_bias)))
Exemple #17
0
def get_channel_changer_or_identity(
        num_dims, sparse, input_channels, output_channels=None,
        residual_changer_bias=True):
    stride = np.full(num_dims, 1)

    if output_channels is None or input_channels == output_channels:
        return get_identity(num_dims, sparse, input_channels)

    if sparse:
        layer = scn.NetworkInNetwork(
                    input_channels, output_channels, residual_changer_bias)
    else:
        conv_class = get_dense_conv_class(num_dims)
        layer = conv_class(
            input_channels, output_channels, kernel_size=1,
            bias=residual_changer_bias)

    return sparse, stride, output_channels, layer
Exemple #18
0
def resnet_block(dimension,
                 n_in,
                 n_out,
                 kernel,
                 leakiness=0,
                 computation='convolution'):
    '''Build and return ResNet block
    '''

    assert computation in [
        'submanifoldconvolution', 'convolution', 'fullconvolution',
        'deconvolution'
    ]
    if computation == 'convolution':
        computation = lambda n_in, n_out: scn.Convolution(
            dimension, n_in, n_out, kernel[0], kernel[1], False)
    elif computation == 'submanifoldconvolution':
        assert type(
            kernel
        ) == int, f"`kernel` must be int, {type(kernel)} was provided"
        computation = lambda n_in, n_out: scn.SubmanifoldConvolution(
            dimension, n_in, n_out, kernel, False)
    elif computation == 'deconvolution':
        assert type(
            kernel
        ) == int, f"`kernel` must be int, {type(kernel)} was provided"
        computation = lambda n_in, n_out: scn.Deconvolution(
            dimension, n_in, n_out, kernel, kernel, False)
    else:
        computation = lambda n_in, n_out: scn.FullConvolution(
            dimension, n_in, n_out, kernel[0], kernel[1], False)

    block = scn.Sequential()
    block.add(scn.ConcatTable(
    ).add(scn.NetworkInNetwork(n_in, n_out, False)).add(scn.Sequential().add(
        #                scn.BatchNormLeakyReLU(n_in, leakiness=leakiness)
        scn.LeakyReLU(leakiness)).add(computation(n_in, n_out)).add(
            #                scn.BatchNormLeakyReLU(n_out, leakiness=leakiness)
            scn.LeakyReLU(leakiness)).add(computation(n_out, n_out)))).add(
                scn.AddTable())
    return block
Exemple #19
0
 def _resnet_block(self, module, a, b):
     '''
     Utility Method for attaching ResNet-Style Blocks.
     INPUTS:
         - module (scn Module): network module to attach ResNet block.
         - a (int): number of input feature dimension
         - b (int): number of output feature dimension
     RETURNS:
         None (operation is in-place)
     '''
     module.add(scn.ConcatTable().add(scn.Identity(
     ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add(
         scn.Sequential().add(
             scn.BatchNormLeakyReLU(a, leakiness=self.leakiness)).add(
                 scn.SubmanifoldConvolution(
                     self.dimension, a, b, 3, self.allow_bias)).add(
                         scn.BatchNormLeakyReLU(
                             b, leakiness=self.leakiness)).add(
                                 scn.SubmanifoldConvolution(
                                     self.dimension, b, b, 3,
                                     self.allow_bias)))).add(scn.AddTable())
 def bar(m, nPlanes, bias):
     m.add(scn.BatchNormLeakyReLU(nPlanes, leakiness=leakiness))
     m.add(scn.NetworkInNetwork(
         nPlanes, nClasses,
         bias))  #accumulte softmax input, only one set of biases