Esempio n. 1
0
    def __init__(self, inplanes, outplanes, batch_norm, leaky_relu):
        nn.Module.__init__(self)

        self.batch_norm = batch_norm
        self.leaky_relu = leaky_relu

        self.conv1 = scn.SubmanifoldConvolution(dimension=3,
            nIn         = inplanes,
            nOut        = outplanes,
            filter_size = 3,
            bias=False)

        if self.batch_norm:
            if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes)
            else:                self.bn1 = scn.BatchNormReLU(outplanes)

        self.conv2 = scn.SubmanifoldConvolution(dimension=3,
            nIn         = outplanes,
            nOut        = outplanes,
            filter_size = 3,
            bias        = False)

        if self.batch_norm:
            self.bn2 = scn.BatchNormalization(outplanes)

        self.residual = scn.Identity()

        if self.leaky_relu: self.relu = scn.LeakyReLU()
        else:                self.relu = scn.ReLU()

        self.add = scn.AddTable()
Esempio n. 2
0
    def block(self,
              m,
              a,
              b,
              dimension=3,
              residual_blocks=False,
              leakiness=0,
              kernel_size=3,
              use_batch_norm=True):  # default using residual_block
        if use_batch_norm:
            Activation = lambda channels: scn.BatchNormLeakyReLU(
                channels, leakiness=leakiness)
        else:
            Activation = lambda channels: scn.LeakyReLU(leakiness)

        if residual_blocks:  #ResNet style blocks
            m.add(scn.ConcatTable().add(scn.Identity(
            ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
                scn.Sequential().add(Activation(a)).add(
                    scn.SubmanifoldConvolution(dimension, a, b, kernel_size,
                                               False)).add(Activation(b)).add(
                                                   scn.SubmanifoldConvolution(
                                                       dimension, b, b,
                                                       kernel_size,
                                                       False)))).add(
                                                           scn.AddTable())
        else:  #VGG style blocks
            m.add(scn.Sequential().add(Activation(a)).add(
                scn.SubmanifoldConvolution(dimension, a, b, kernel_size,
                                           False)))
Esempio n. 3
0
    def __init__(self,
                 dimension,
                 reps,
                 n_layers,
                 leakiness=0,
                 input_layer=None,
                 name='encoder',
                 device=None):
        super(Encoder, self).__init__()
        self.dimension = dimension
        self.reps = reps
        self.n_layers = n_layers
        self.leakiness = leakiness
        self.name = name
        self.device = device

        if input_layer != None:
            self.input_layer = scn.InputLayer(len(input_layer), input_layer)

        self.blocks = []
        self.block_names = {}
        n_in, n_out = 1, 1
        for i in range(len(n_layers)):
            block = scn.Sequential()
            # add reps Resnet blocks, where reps >= 1 and first block just ensures number of
            # input channels is correct
            for rep in range(reps):
                block.add(
                    resnet_block(dimension,
                                 n_in,
                                 n_out,
                                 1,
                                 leakiness,
                                 computation='submanifoldconvolution'))
                n_in = n_out
            n_out = n_layers[i][1]
            '''
            block.add(
                scn.BatchNormLeakyReLU(n_in, leakiness)
            )
            '''
            block.add(scn.LeakyReLU(leakiness))
            if len(n_layers[i]) == 2:
                block.add(scn.Convolution(dimension, n_in, n_out, 2, 2, False))
            elif len(n_layers[i]
                     ) == 3 and n_layers[i][2] == 'submanifoldconvolution':
                block.add(
                    scn.SubmanifoldConvolution(dimension, n_in, n_out, 2,
                                               False))
            elif len(n_layers[i]) == 3 and n_layers[i][2] == 'maxpool':
                block.add(scn.MaxPooling(dimension, 2, 2))
            elif len(n_layers[i]) == 3 and n_layers[i][2] == 'avgpool':
                block.add(scn.AveragePooling(dimension, 2, 2))
            block_name = get_block_name(name, dimension, reps, n_in, n_out,
                                        leakiness)
            n_in = n_out
            self.blocks.append(block)
            self.block_names[block_name] = len(self.blocks) - 1
        self.blocks = torch.nn.ModuleList(self.blocks)
Esempio n. 4
0
 def __init__(self):
     super(Model, self).__init__()
     self.inputLayer = scn.InputLayer(dimension, spatial_size=512, mode=3)
     self.initialconv = scn.SubmanifoldConvolution(dimension, nPlanes, 64,
                                                   7, False)
     self.residual = scn.Identity()
     self.add = scn.AddTable()
     self.sparsebl11 = scn.Sequential().add(
         scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add(
             scn.BatchNormLeakyReLU(64)).add(
                 scn.SubmanifoldConvolution(dimension, 64, 64, 3, False))
     self.sparsebl12 = scn.Sequential().add(
         scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add(
             scn.BatchNormLeakyReLU(64)).add(
                 scn.SubmanifoldConvolution(dimension, 64, 64, 3, False))
     self.sparsebl21 = scn.Sequential().add(
         scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add(
             scn.BatchNormLeakyReLU(128)).add(
                 scn.SubmanifoldConvolution(dimension, 128, 128, 3, False))
     self.sparsebl22 = scn.Sequential().add(
         scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add(
             scn.BatchNormLeakyReLU(128)).add(
                 scn.SubmanifoldConvolution(dimension, 128, 128, 3, False))
     self.relu1 = scn.LeakyReLU(64)
     self.relu2 = scn.LeakyReLU(128)
     self.downsample1 = scn.Sequential().add(
         scn.Convolution(dimension, 64, 64, [2, 2, 2], [2, 2, 2],
                         False)).add(scn.BatchNormLeakyReLU(64))
     self.downsample2 = scn.Sequential().add(
         scn.Convolution(dimension, 64, 128, [2, 2, 2], [2, 2, 2],
                         False)).add(scn.BatchNormLeakyReLU(128))
     self.downsample3 = scn.Sequential().add(
         scn.Convolution(dimension, 128, 64, [4, 4, 4], [4, 4, 4],
                         False)).add(scn.BatchNormLeakyReLU(64))
     self.downsample4 = scn.Sequential().add(
         scn.Convolution(dimension, 64, 2, [4, 4, 4], [4, 4, 4],
                         False)).add(scn.BatchNormLeakyReLU(2))
     self.sparsetodense = scn.SparseToDense(dimension, 2)
     self.dropout1 = nn.Dropout(0.5)
     self.dropout2 = nn.Dropout(0.5)
     self.linear2 = nn.Linear(2 * 8 * 8 * 8, 2)
     self.linear3 = nn.Linear(2, 1)
Esempio n. 5
0
def resnet_block(dimension,
                 n_in,
                 n_out,
                 kernel,
                 leakiness=0,
                 computation='convolution'):
    '''Build and return ResNet block
    '''

    assert computation in [
        'submanifoldconvolution', 'convolution', 'fullconvolution',
        'deconvolution'
    ]
    if computation == 'convolution':
        computation = lambda n_in, n_out: scn.Convolution(
            dimension, n_in, n_out, kernel[0], kernel[1], False)
    elif computation == 'submanifoldconvolution':
        assert type(
            kernel
        ) == int, f"`kernel` must be int, {type(kernel)} was provided"
        computation = lambda n_in, n_out: scn.SubmanifoldConvolution(
            dimension, n_in, n_out, kernel, False)
    elif computation == 'deconvolution':
        assert type(
            kernel
        ) == int, f"`kernel` must be int, {type(kernel)} was provided"
        computation = lambda n_in, n_out: scn.Deconvolution(
            dimension, n_in, n_out, kernel, kernel, False)
    else:
        computation = lambda n_in, n_out: scn.FullConvolution(
            dimension, n_in, n_out, kernel[0], kernel[1], False)

    block = scn.Sequential()
    block.add(scn.ConcatTable(
    ).add(scn.NetworkInNetwork(n_in, n_out, False)).add(scn.Sequential().add(
        #                scn.BatchNormLeakyReLU(n_in, leakiness=leakiness)
        scn.LeakyReLU(leakiness)).add(computation(n_in, n_out)).add(
            #                scn.BatchNormLeakyReLU(n_out, leakiness=leakiness)
            scn.LeakyReLU(leakiness)).add(computation(n_out, n_out)))).add(
                scn.AddTable())
    return block
Esempio n. 6
0
 def block(self,
           m,
           a,
           b,
           dimension=3,
           residual_blocks=False,
           leakiness=0,
           kernel_size=3):  # default using residual_block
     if residual_blocks:  #ResNet style blocks
         m.add(scn.ConcatTable().add(scn.Identity(
         ) if a == b else scn.NetworkInNetwork(a, b, False)).add(
             scn.Sequential().add(scn.LeakyReLU(leakiness)).add(
                 scn.SubmanifoldConvolution(
                     dimension, a, b, kernel_size,
                     False)).add(scn.LeakyReLU(leakiness)).add(
                         scn.SubmanifoldConvolution(
                             dimension, b, b, kernel_size,
                             False)))).add(scn.AddTable())
     else:  #VGG style blocks
         m.add(scn.Sequential().add(scn.LeakyReLU(leakiness)).add(
             scn.SubmanifoldConvolution(dimension, a, b, kernel_size,
                                        False)))
Esempio n. 7
0
    def __init__(self, inplanes, outplanes, batch_norm, leaky_relu, nplanes=1):
        nn.Module.__init__(self)

        self.batch_norm = batch_norm
        self.leaky_relu = leaky_relu

        self.conv = scn.Convolution(dimension=3,
                                    nIn=inplanes,
                                    nOut=outplanes,
                                    filter_size=[nplanes, 2, 2],
                                    filter_stride=[1, 2, 2],
                                    bias=False)

        if self.batch_norm:
            self.bn = scn.BatchNormalization(outplanes)

        if self.leaky_relu: self.relu = scn.LeakyReLU()
        else: self.relu = scn.ReLU()
Esempio n. 8
0
    def __init__(self, inplanes, outplanes, batch_norm, leaky_relu, nplanes=1):

        nn.Module.__init__(self)

        self.batch_norm = batch_norm
        self.leaky_relu = leaky_relu

        self.conv1 = scn.SubmanifoldConvolution(dimension=3,
                                                nIn=inplanes,
                                                nOut=outplanes,
                                                filter_size=[nplanes, 3, 3],
                                                bias=False)

        if self.batch_norm:
            if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes)
            else: self.bn1 = scn.BatchNormReLU(outplanes)
        else:
            if self.leaky_relu: self.relu = scn.LeakyReLU()
            else: self.relu = scn.ReLU()
Esempio n. 9
0
    def __init__(self,
                 dimension,
                 n_layers,
                 unet=False,
                 name='decoder',
                 use_sparsify=True,
                 device=None):
        super(Decoder, self).__init__()
        self.dimension = dimension
        self.n_layers = n_layers
        self.unet = unet
        self.name = name
        self.device = device

        self.blocks = []
        self.block_names = {}

        for i in range(len(n_layers)):
            n_in, n_out = n_layers[i][1], n_layers[i][0]
            block = scn.Sequential()
            '''
            block.add(
                scn.BatchNormLeakyReLU(n_in, 0)
            )
            '''
            block.add(scn.LeakyReLU(0))
            if len(n_layers[i]) == 2:
                block.add(
                    scn.FullConvolution(dimension, n_in, n_out, 2, 2, False))
            elif len(n_layers[i]) == 3 and n_layers[i][2] == False:
                # don't upsample
                block.add(
                    scn.SubmanifoldConvolution(dimension, n_in, n_out, 1,
                                               False))
            if use_sparsify:
                block.add(scn.Sparsify(dimension, n_out))
            block_name = get_block_name(name, dimension, 0, n_in, n_out, 0)
            self.blocks.append(block)
            self.block_names[block_name] = len(self.blocks) - 1
        self.blocks = torch.nn.ModuleList(self.blocks)
Esempio n. 10
0
    def __init__(
        self,
        output_shape,
        use_norm=True,
        name='Pyramid_LightNoBN',
        use_residual=True,
        blocks=[
            (64, 15), (80, 11), (96, 7), (128, 5)
        ],  # define blocks, the tuple represent (num_filters, kernel). (blocks are divided by one downsample op)
        layers_per_block=2,  # each layer consists of 2 sscnn if use residual is true, and 1 if false
        downsample_type='max_pool2',
        leakiness=0,
        #  dense_blocks=[(160, (3, 3, 3), (2, 1, 1)), (192, (3, 3, 3), (2, 1, 1)), (224, (3, 3, 3), (2, 1, 1))], # define final dense blocks, with (num_filters, kernel, stride)
        #  out_filters=512,
        #  final_z_dim=12,
        **kwargs):
        super(Pyramid_LightNoBN, self).__init__()
        self.name = name
        self.use_residual = use_residual
        self.layers_per_block = layers_per_block
        self.blocks = blocks
        if downsample_type == 'max_pool2':
            Downsample = change_default_args(dimension=3,
                                             pool_size=(2, 2, 2),
                                             pool_stride=(2, 2,
                                                          2))(scn.MaxPooling)
        else:
            # scn.Convolution(dimension, num_filter_fpn[k-1], num_filter_fpn[k], 3, 2, False)
            raise ValueError('Invalid downsample type')

        sparse_shape = np.array(output_shape[1:4])  # + [1, 0, 0]

        self.scn_input = scn.InputLayer(3, sparse_shape.tolist())
        self.voxel_output_shape = output_shape

        m = scn.Sequential()
        (num_filters, kernel_size) = blocks[0]
        # TODO fix, should add a subsparseconv
        self.block(m,
                   1,
                   num_filters,
                   dimension=3,
                   residual_blocks=use_residual,
                   kernel_size=kernel_size)
        for _ in range(layers_per_block - 1):
            self.block(m,
                       num_filters,
                       num_filters,
                       dimension=3,
                       residual_blocks=use_residual,
                       kernel_size=kernel_size)

        self.block_models = [m]
        prev_num_filters = num_filters

        for k, (num_filters, kernel_size) in enumerate(blocks[1:]):
            k = k + 1

            m = scn.Sequential()
            # downsample
            m.add(scn.LeakyReLU(leakiness))
            m.add(Downsample())

            self.block(m,
                       prev_num_filters,
                       num_filters,
                       dimension=3,
                       residual_blocks=use_residual,
                       kernel_size=kernel_size)
            for _ in range(layers_per_block - 1):
                self.block(m,
                           num_filters,
                           num_filters,
                           dimension=3,
                           residual_blocks=use_residual,
                           kernel_size=kernel_size)

            self.block_models.append(m)
            prev_num_filters = num_filters

        self.block_models.append(scn.Sequential().add(
            scn.LeakyReLU(leakiness)).add(Downsample()))
        self.block_models = ListModule(*self.block_models)

        self.sparse_to_dense = scn.SparseToDense(3, prev_num_filters)

        self.z_combiner = nn.Sequential(
            nn.Conv3d(prev_num_filters,
                      prev_num_filters, (8, 1, 1),
                      groups=prev_num_filters),
            # nn.BatchNorm3d(num_filters),
            nn.LeakyReLU(negative_slope=leakiness))