def v(depth, nPlanes): m = scn.Sequential() if depth == 1: for _ in range(reps): res(m, nPlanes, nPlanes, dropout_p) else: m = scn.Sequential() for _ in range(reps): res(m, nPlanes, nPlanes, dropout_p) if dropout_width: m.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add(scn.BatchNormReLU(nPlanes)).add( #In place of Maxpooling scn.Convolution( dimension, nPlanes, nPlanes, 2, 2, False)).add(scn.Dropout(dropout_p)).add( v(depth - 1, nPlanes)).add( scn.BatchNormReLU(nPlanes)).add( scn.Deconvolution( dimension, nPlanes, nPlanes, 2, 2, False)))) else: m.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add(scn.BatchNormReLU(nPlanes)).add( scn.Convolution(dimension, nPlanes, nPlanes, 2, 2, False)).add(v(depth - 1, nPlanes)).add( scn.BatchNormReLU(nPlanes)).add( scn.Deconvolution( dimension, nPlanes, nPlanes, 2, 2, False)))) m.add(scn.JoinTable()) for i in range(reps): res(m, 2 * nPlanes if i == 0 else nPlanes, nPlanes, dropout_p) return m
def block(self, nPlanes, n, reps, stride): m = scn.Sequential() for rep in range(reps): if rep == 0: m.add(scn.BatchNormReLU(nPlanes)) m.add(scn.ConcatTable().add(self.residual( nPlanes, n, stride)).add(scn.Sequential().add( scn.SubmanifoldConvolution(self.dimension, nPlanes, n, 3, False) if stride == 1 else scn.Convolution( self.dimension, nPlanes, n, 2, stride, False)).add( scn.BatchNormReLU(n)).add( scn.SubmanifoldConvolution( self.dimension, n, n, 3, False)))) else: m.add(scn.ConcatTable().add(scn.Sequential().add( scn.BatchNormReLU(nPlanes)).add( scn.SubmanifoldConvolution( self.dimension, nPlanes, n, 3, False)).add(scn.BatchNormReLU(n)).add( scn.SubmanifoldConvolution( self.dimension, n, n, 3, False))).add(scn.Identity())) m.add(scn.AddTable()) nPlanes = n return m
def block(self, n_in, n_out): m = scn.Sequential() if self.residual_blocks: # ResNet style blocks m.add(scn.ConcatTable().add( scn.Identity() if n_in == n_out else scn.NetworkInNetwork(n_in, n_out, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU( n_in, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, n_in, n_out, 3, False)).add( scn.BatchNormLeakyReLU( n_out, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, n_out, n_out, 3, False)))) m.add(scn.AddTable()) else: # VGG style blocks m.add(scn.BatchNormLeakyReLU(n_in, leakiness=self.leakiness)) m.add( scn.SubmanifoldConvolution(self.dimension, n_in, n_out, 3, False)) return m
def residual_block(m, a, b, leakiness=0.01, dimensions=2): """ append to a sequence module: produce output of [identity,3x3+3x3] then add together inputs ------ m [scn.Sequential module] network to add layers to a [int]: number of input channels b [int]: number of output channels leakiness [float]: leakiness of ReLU activations dimensions [int]: dimensions of input sparse tensor modifies -------- m: adds layers """ m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(scn.BatchNormLeakyReLU( a, leakiness=leakiness)).add( scn.SubmanifoldConvolution(dimensions, a, b, 3, False)).add( scn.BatchNormLeakyReLU(b, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, b, b, 3, False)))).add(scn.AddTable())
def __init__(self, inplanes, outplanes, batch_norm, leaky_relu): nn.Module.__init__(self) self.batch_norm = batch_norm self.leaky_relu = leaky_relu self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = 3, bias=False) if self.batch_norm: if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes) else: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = 3, bias = False) if self.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() if self.leaky_relu: self.relu = scn.LeakyReLU() else: self.relu = scn.ReLU() self.add = scn.AddTable()
def __init__(self, inplanes, outplanes, nplanes=1): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias=False) # if FLAGS.BATCH_NORM: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias = False) # if FLAGS.BATCH_NORM: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def __init__(self, nf_in, nf, input_sparsetensor, return_sparsetensor, max_data_size): nn.Module.__init__(self) data_dim = 3 self.nf_in = nf_in self.nf = nf self.input_sparsetensor = input_sparsetensor self.return_sparsetensor = return_sparsetensor self.max_data_size = max_data_size if not self.input_sparsetensor: self.p0 = scn.InputLayer(data_dim, self.max_data_size, mode=0) self.p1 = scn.SubmanifoldConvolution(data_dim, nf_in, nf, filter_size=FSIZE0, bias=False) self.p2 = scn.Sequential() self.p2.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add(scn.BatchNormReLU(nf)).add( scn.SubmanifoldConvolution( data_dim, nf, nf, FSIZE0, False)).add(scn.BatchNormReLU(nf)).add( scn.SubmanifoldConvolution(data_dim, nf, nf, FSIZE0, False)))).add( scn.AddTable()) self.p2.add(scn.BatchNormReLU(nf)) # downsample space by factor of 2 self.p3 = scn.Sequential().add( scn.Convolution(data_dim, nf, nf, FSIZE1, 2, False)) self.p3.add(scn.BatchNormReLU(nf)) if not self.return_sparsetensor: self.p4 = scn.SparseToDense(data_dim, nf)
def residual(nIn, nOut, stride): if stride > 1: return scn.Convolution(dimension, nIn, nOut, 3, stride, False) elif nIn != nOut: return scn.NetworkInNetwork(nIn, nOut, False) else: return scn.Identity()
def block(m, a, b): if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU( a, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats) ).add(scn.SubmanifoldConvolution( dimension, a, b, 3, False)).add( scn.BatchNormLeakyReLU( b, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats)).add( scn.SubmanifoldConvolution( dimension, b, b, 3, False)))).add(scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add( scn.BatchNormLeakyReLU( a, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False))) operation = {'kernel': [1, 1, 1], 'stride': [1, 1, 1]} return operation
def block(self, m, a, b, dimension=3, residual_blocks=False, leakiness=0, kernel_size=3, use_batch_norm=True): # default using residual_block if use_batch_norm: Activation = lambda channels: scn.BatchNormLeakyReLU( channels, leakiness=leakiness) else: Activation = lambda channels: scn.LeakyReLU(leakiness) if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(Activation(a)).add( scn.SubmanifoldConvolution(dimension, a, b, kernel_size, False)).add(Activation(b)).add( scn.SubmanifoldConvolution( dimension, b, b, kernel_size, False)))).add( scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add(Activation(a)).add( scn.SubmanifoldConvolution(dimension, a, b, kernel_size, False)))
def __init__(self, inplanes, outplanes, bias, batch_norm): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation1 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation1 = scn.ReLU() self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation2 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation2 = scn.ReLU() self.residual = scn.Identity() self.add = scn.AddTable()
def residual_block(m, ninputchs, noutputchs, leakiness=0.01, dimensions=2): """ Residual Modulae Block intention is to append to a sequence module (m) produce output of [identity,3x3+3x3] then add together inputs ------ m [scn.Sequential module] network to add layers to ninputchs [int]: number of input channels noutputchs [int]: number of output channels leakiness [float]: leakiness of ReLU activations dimensions [int]: dimensions of input sparse tensor modifies -------- m: adds layers """ inoutsame = ninputchs == noutputchs m.add(scn.ConcatTable().add( scn.Identity() if inoutsame else scn. NetworkInNetwork(ninputchs, noutputchs, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU(ninputchs, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, ninputchs, noutputchs, 3, False)).add( scn.BatchNormLeakyReLU( noutputchs, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, noutputchs, noutputchs, 3, False)))).add(scn.AddTable())
def U(nPlanes, n_input_planes=-1): #Recursive function m = scn.Sequential() for i in range(reps): block(m, n_input_planes if n_input_planes != -1 else nPlanes[0], nPlanes[0]) n_input_planes = -1 if len(nPlanes) > 1: m.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add( scn.BatchNormLeakyReLU( nPlanes[0], leakiness=leakiness)).add( scn.Convolution(dimension, nPlanes[0], nPlanes[1], downsample[0], downsample[1], False)).add(U(nPlanes[1:])).add( scn.BatchNormLeakyReLU( nPlanes[1], leakiness=leakiness)).add( scn.Deconvolution( dimension, nPlanes[1], nPlanes[0], downsample[0], downsample[1], False)))) m.add(scn.JoinTable()) for i in range(reps): block(m, nPlanes[0] * (2 if i == 0 else 1), nPlanes[0]) return m
def __init__(self, *, inplanes, outplanes, nplanes=1, params): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=params.use_bias) self.do_batch_norm = False if params.batch_norm: self.do_batch_norm = True self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=False) if params.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def res(m, dimension, a, b): m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormReLU(a)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)) .add(scn.BatchNormReLU(b)) .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False))))\ .add(scn.AddTable())
def block(m, a, b): # ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a, leakiness=leakiness)) .add(scn.SubmanifoldConvolution(self._dimension, a, b, 3, False)) .add(scn.BatchNormLeakyReLU(b, leakiness=leakiness)) .add(scn.SubmanifoldConvolution(self._dimension, b, b, 3, False))) ).add(scn.AddTable())
def SparseResNet(dimension, nInputPlanes, layers): import sparseconvnet as scn """ pre-activated ResNet e.g. layers = {{'basic',16,2,1},{'basic',32,2}} """ nPlanes = nInputPlanes m = scn.Sequential() def residual(nIn, nOut, stride): if stride > 1: return scn.Convolution(dimension, nIn, nOut, 2, stride, False) elif nIn != nOut: return scn.NetworkInNetwork(nIn, nOut, False) else: return scn.Identity() for n, reps, stride in layers: for rep in range(reps): if rep == 0: m.add(scn.BatchNormReLU(nPlanes)) tab = scn.ConcatTable() tab_seq = scn.Sequential() if stride == 1: tab_seq.add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False)) else: tab_seq.add( scn.Convolution(dimension, nPlanes, n, 2, stride, False)) tab_seq.add(scn.BatchNormReLU(n)) tab_seq.add( scn.SubmanifoldConvolution(dimension, n, n, 3, False)) tab.add(tab_seq) tab.add(residual(nPlanes, n, stride)) m.add(tab) else: tab = scn.ConcatTable() tab_seq = scn.Sequential() tab_seq.add(scn.BatchNormReLU(nPlanes)) tab_seq.add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False)) tab_seq.add(scn.BatchNormReLU(n)) tab_seq.add( scn.SubmanifoldConvolution(dimension, n, n, 3, False)) tab.add(tab_seq) tab.add(scn.Identity()) m.add(tab) nPlanes = n m.add(scn.AddTable()) m.add(scn.BatchNormReLU(nPlanes)) return m
def f(m, a, b): m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add( scn.Sequential().add(norm_layer( a, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, a, b, 3, self.allow_bias)).add( norm_layer(b, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, b, b, 3, self.allow_bias)))).add(scn.AddTable()) return m
def decoder_block(self, nPlanes, n, reps, stride): m = scn.Sequential() for rep in range(reps): m.add(scn.ConcatTable().add( scn.Sequential().add(scn.BatchNormReLU(nPlanes)).add( scn.SubmanifoldConvolution(self.dimension, nPlanes, n, 3, False)) # .add(scn.BatchNormReLU(n)) # .add(scn.SubmanifoldConvolution(dimension, n, n, 3, False)) ).add(scn.Identity())) m.add(scn.AddTable()) nPlanes = n return m
def block(m, a, b): if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(scn.BatchNormReLU(a)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False)).add( scn.BatchNormReLU(b)).add( scn.SubmanifoldConvolution(dimension, b, b, 3, False)))).add( scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add(scn.BatchNormReLU(a)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
def foo(m,np): for _ in range(reps): if residual: #ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity()) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(np,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, np, np, 3, False)) .add(scn.BatchNormLeakyReLU(np,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, np, np, 3, False))) ).add(scn.AddTable()) else: #VGG style blocks m.add(scn.BatchNormLeakyReLU(np,leakiness=leakiness) ).add(scn.SubmanifoldConvolution(dimension, np, np, 3, False))
def block(self, m, a, b, dimension=3, residual_blocks=False, leakiness=0): # default using residual_block if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)) .add(scn.BatchNormLeakyReLU(b,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False))) ).add(scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
def SparseResNet(dimension, nInputPlanes, layers, mom=0.99): """ pre-activated ResNet e.g. layers = {{'basic',16,2,1},{'basic',32,2}} """ nPlanes = nInputPlanes m = scn.Sequential() def residual(nIn, nOut, stride): if stride > 1: return scn.Convolution(dimension, nIn, nOut, 3, stride, False) elif nIn != nOut: return scn.NetworkInNetwork(nIn, nOut, False) else: return scn.Identity() for blockType, n, reps, stride in layers: for rep in range(reps): if blockType[0] == 'b': # basic block if rep == 0: m.add(scn.BatchNormReLU(nPlanes, momentum=mom, eps=1e-5)) m.add(scn.ConcatTable().add(scn.Sequential().add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False) if stride == 1 else scn.Convolution( dimension, nPlanes, n, 3, stride, False)).add( scn.BatchNormReLU( n, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, n, n, 3, False))).add( residual(nPlanes, n, stride))) else: m.add(scn.ConcatTable().add(scn.Sequential().add( scn.BatchNormReLU( nPlanes, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, nPlanes, n, 3, False)).add( scn.BatchNormReLU( n, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, n, n, 3, False))).add( scn.Identity())) nPlanes = n m.add(scn.AddTable()) m.add(scn.BatchNormReLU(nPlanes, momentum=mom, eps=1e-5)) return m
def U(nPlanes): #Recursive function m = scn.Sequential() if len(nPlanes) == 1: for _ in range(reps): block(m, nPlanes[0], nPlanes[0]) else: m = scn.Sequential() for _ in range(reps): block(m, nPlanes[0], nPlanes[0]) m.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add(scn.BatchNormReLU(nPlanes[0])).add( scn.Convolution(dimension, nPlanes[0], nPlanes[1], downsample[0], downsample[1], False)).add(U(nPlanes[1:])).add( scn.UnPooling(dimension, downsample[0], downsample[1])))) m.add(scn.JoinTable()) return m
def __init__(self): super(Model, self).__init__() self.inputLayer = scn.InputLayer(dimension, spatial_size=512, mode=3) self.initialconv = scn.SubmanifoldConvolution(dimension, nPlanes, 64, 7, False) self.residual = scn.Identity() self.add = scn.AddTable() self.sparsebl11 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add( scn.BatchNormLeakyReLU(64)).add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)) self.sparsebl12 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add( scn.BatchNormLeakyReLU(64)).add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)) self.sparsebl21 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add( scn.BatchNormLeakyReLU(128)).add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)) self.sparsebl22 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add( scn.BatchNormLeakyReLU(128)).add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)) self.relu1 = scn.LeakyReLU(64) self.relu2 = scn.LeakyReLU(128) self.downsample1 = scn.Sequential().add( scn.Convolution(dimension, 64, 64, [2, 2, 2], [2, 2, 2], False)).add(scn.BatchNormLeakyReLU(64)) self.downsample2 = scn.Sequential().add( scn.Convolution(dimension, 64, 128, [2, 2, 2], [2, 2, 2], False)).add(scn.BatchNormLeakyReLU(128)) self.downsample3 = scn.Sequential().add( scn.Convolution(dimension, 128, 64, [4, 4, 4], [4, 4, 4], False)).add(scn.BatchNormLeakyReLU(64)) self.downsample4 = scn.Sequential().add( scn.Convolution(dimension, 64, 2, [4, 4, 4], [4, 4, 4], False)).add(scn.BatchNormLeakyReLU(2)) self.sparsetodense = scn.SparseToDense(dimension, 2) self.dropout1 = nn.Dropout(0.5) self.dropout2 = nn.Dropout(0.5) self.linear2 = nn.Linear(2 * 8 * 8 * 8, 2) self.linear3 = nn.Linear(2, 1)
def _resnet_block(self, module, a, b): ''' Utility Method for attaching ResNet-Style Blocks. INPUTS: - module (scn Module): network module to attach ResNet block. - a (int): number of input feature dimension - b (int): number of output feature dimension RETURNS: None (operation is in-place) ''' module.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add( scn.Sequential().add( scn.BatchNormLeakyReLU(a, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, a, b, 3, self.allow_bias)).add( scn.BatchNormLeakyReLU( b, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, b, b, 3, self.allow_bias)))).add(scn.AddTable())
def __init__(self, cfg, name='ynet_full'): super().__init__(cfg, name) self.model_config = cfg[name] self.num_filters = self.model_config.get('filters', 16) self.seed_dim = self.model_config.get('seed_dim', 1) self.sigma_dim = self.model_config.get('sigma_dim', 1) self.embedding_dim = self.model_config.get('embedding_dim', 3) self.inputKernel = self.model_config.get('input_kernel_size', 3) self.coordConv = self.model_config.get('coordConv', False) # YResNet Configurations # operation for mapping latent secondary features to primary features self.mapping_op = self.model_config.get('mapping_operation', 'pool') assert self.mapping_op in self.supported_mapping_ops # Network Freezing Options self.encoder_freeze = self.model_config.get('encoder_freeze', False) self.embedding_freeze = self.model_config.get('embedding_freeze', False) self.seediness_freeze = self.model_config.get('seediness_freeze', False) # Input Layer Configurations and commonly used scn operations. self.input = scn.Sequential().add( scn.InputLayer(self.dimension, self.spatial_size, mode=3)).add( scn.SubmanifoldConvolution(self.dimension, self.nInputFeatures, \ self.num_filters, self.inputKernel, self.allow_bias)) # Kernel size 3, no bias self.add = scn.AddTable() # Preprocessing logic for secondary self.t_bn = scn.BatchNormLeakyReLU(1, leakiness=self.leakiness) self.netinnet = scn.Sequential() self._resnet_block(self.netinnet, 1, self.num_filters) # Timing information max_seq_len = self.model_config.get('max_seq_len', 5) self.pe = SinusoidalPositionalEncoding(max_seq_len, 1) # Backbone YResNet. Do NOT change namings! self.primary_encoder = YResNetEncoder(cfg, name='yresnet_encoder') self.secondary_encoder = YResNetEncoder(cfg, name='yresnet_encoder') if self.mapping_op == 'conv': self.mapping = ConvolutionalFeatureMapping(self.dimension, self.nPlanes[-1], self.nPlanes[-1], 2, 2, False) elif self.mapping_op == 'pool': self.mapping = PoolFeatureMapping( self.dimension, 2, 2, ) self.seed_net = YResNetDecoder(cfg, name='seediness_decoder') self.cluster_net = YResNetDecoder(cfg, name='embedding_decoder') # Encoder-Decoder 1x1 Connections encoder_planes = [i for i in self.primary_encoder.nPlanes] cluster_planes = [i for i in self.cluster_net.nPlanes] seed_planes = [i for i in self.seed_net.nPlanes] self.skip_mode = self.model_config.get('skip_mode', 'default') self.cluster_skip = scn.Sequential() self.seed_skip = scn.Sequential() # Output Layers self.output_cluster = scn.Sequential() self._nin_block(self.output_cluster, self.cluster_net.num_filters, 4) self.output_cluster.add(scn.OutputLayer(self.dimension)) self.output_seediness = scn.Sequential() self._nin_block(self.output_seediness, self.seed_net.num_filters, 1) self.output_seediness.add(scn.OutputLayer(self.dimension)) if self.skip_mode == 'default': for p1, p2 in zip(encoder_planes, cluster_planes): self.cluster_skip.add(scn.Identity()) for p1, p2 in zip(encoder_planes, seed_planes): self.seed_skip.add(scn.Identity()) elif self.skip_mode == '1x1': for p1, p2 in zip(encoder_planes, cluster_planes): self._nin_block(self.cluster_skip, p1, p2) for p1, p2 in zip(encoder_planes, seed_planes): self._nin_block(self.seed_skip, p1, p2) else: raise ValueError('Invalid skip connection mode!') # Freeze Layers if self.encoder_freeze: for p in self.encoder.parameters(): p.requires_grad = False if self.embedding_freeze: for p in self.cluster_net.parameters(): p.requires_grad = False for p in self.output_cluster.parameters(): p.requires_grad = False if self.seediness_freeze: for p in self.seed_net.parameters(): p.requires_grad = False for p in self.output_seediness.parameters(): p.requires_grad = False # Pytorch Activations self.tanh = nn.Tanh() self.sigmoid = nn.Sigmoid()
def get_identity(num_dims, sparse, input_channels, output_channels=None): assert input_channels == output_channels or output_channels is None stride = np.full(num_dims, 1) identity = scn.Identity() if sparse else nn.Identity() return sparse, stride, input_channels, identity
def __init__(self, cfg, name='unet_full'): super().__init__(cfg, name) self.model_config = cfg[name] self.num_filters = self.model_config.get('filters', 16) self.ghost = self.model_config.get('ghost', False) self.seed_dim = self.model_config.get('seed_dim', 1) self.sigma_dim = self.model_config.get('sigma_dim', 1) self.embedding_dim = self.model_config.get('embedding_dim', 3) self.num_classes = self.model_config.get('num_classes', 5) self.num_gnn_features = self.model_config.get('num_gnn_features', 16) self.inputKernel = self.model_config.get('input_kernel_size', 3) self.coordConv = self.model_config.get('coordConv', False) # Network Freezing Options self.encoder_freeze = self.model_config.get('encoder_freeze', False) self.ppn_freeze = self.model_config.get('ppn_freeze', False) self.segmentation_freeze = self.model_config.get('segmentation_freeze', False) self.embedding_freeze = self.model_config.get('embedding_freeze', False) self.seediness_freeze = self.model_config.get('seediness_freeze', False) # Input Layer Configurations and commonly used scn operations. self.input = scn.Sequential().add( scn.InputLayer(self.dimension, self.spatial_size, mode=3)).add( scn.SubmanifoldConvolution(self.dimension, self.nInputFeatures, \ self.num_filters, self.inputKernel, self.allow_bias)) # Kernel size 3, no bias self.concat = scn.JoinTable() self.add = scn.AddTable() # Backbone UResNet. Do NOT change namings! self.encoder = UResNetEncoder(cfg, name='uresnet_encoder') # self.seg_net = UResNetDecoder(cfg, name='segmentation_decoder') self.seed_net = UResNetDecoder(cfg, name='seediness_decoder') self.cluster_net = UResNetDecoder(cfg, name='embedding_decoder') # Encoder-Decoder 1x1 Connections encoder_planes = [i for i in self.encoder.nPlanes] # seg_planes = [i for i in self.seg_net.nPlanes] cluster_planes = [i for i in self.cluster_net.nPlanes] seed_planes = [i for i in self.seed_net.nPlanes] # print("Encoder Planes: ", encoder_planes) # print("Seg Planes: ", seg_planes) # print("Cluster Planes: ", cluster_planes) # print("Seediness Planes: ", seed_planes) self.skip_mode = self.model_config.get('skip_mode', 'default') # self.seg_skip = scn.Sequential() self.cluster_skip = scn.Sequential() self.seed_skip = scn.Sequential() # print(self.seg_skip) # print(self.cluster_skip) # print(self.seed_skip) # Output Layers self.output_cluster = scn.Sequential() self._nin_block(self.output_cluster, self.cluster_net.num_filters, 4) self.output_cluster.add(scn.OutputLayer(self.dimension)) self.output_seediness = scn.Sequential() self._nin_block(self.output_seediness, self.seed_net.num_filters, 1) self.output_seediness.add(scn.OutputLayer(self.dimension)) ''' self.output_segmentation = scn.Sequential() self._nin_block(self.output_segmentation, self.seg_net.num_filters, self.num_classes) self.output_segmentation.add(scn.OutputLayer(self.dimension)) ''' ''' self.output_gnn_features = scn.Sequential() sum_filters = self.seg_net.num_filters + self.seed_net.num_filters + self.cluster_net.num_filters self._resnet_block(self.output_gnn_features, sum_filters, self.num_gnn_features) self._nin_block(self.output_gnn_features, self.num_gnn_features, self.num_gnn_features) self.output_gnn_features.add(scn.OutputLayer(self.dimension)) ''' if self.ghost: self.linear_ghost = scn.Sequential() self._nin_block(self.linear_ghost, self.num_filters, 2) # self.linear_ghost.add(scn.OutputLayer(self.dimension)) # PPN # self.ppn = PPN(cfg) if self.skip_mode == 'default': ''' for p1, p2 in zip(encoder_planes, seg_planes): self.seg_skip.add(scn.Identity()) ''' for p1, p2 in zip(encoder_planes, cluster_planes): self.cluster_skip.add(scn.Identity()) for p1, p2 in zip(encoder_planes, seed_planes): self.seed_skip.add(scn.Identity()) ''' self.ppn_transform = scn.Sequential() ppn1_num_filters = seg_planes[self.ppn.ppn1_stride-self.ppn._num_strides] self._nin_block(self.ppn_transform, encoder_planes[-1], ppn1_num_filters) ''' elif self.skip_mode == '1x1': ''' for p1, p2 in zip(encoder_planes, seg_planes): self._nin_block(self.seg_skip, p1, p2) ''' for p1, p2 in zip(encoder_planes, cluster_planes): self._nin_block(self.cluster_skip, p1, p2) for p1, p2 in zip(encoder_planes, seed_planes): self._nin_block(self.seed_skip, p1, p2) # self.ppn_transform = scn.Identity() else: raise ValueError('Invalid skip connection mode!') # Freeze Layers if self.encoder_freeze: for p in self.encoder.parameters(): p.requires_grad = False print('Encoder Freezed') ''' if self.ppn_freeze: for p in self.ppn.parameters(): p.requires_grad = False print('PPN Freezed') ''' ''' if self.segmentation_freeze: for p in self.seg_net.parameters(): p.requires_grad = False for p in self.output_segmentation.parameters(): p.requires_grad = False print('Segmentation Branch Freezed') ''' if self.embedding_freeze: for p in self.cluster_net.parameters(): p.requires_grad = False for p in self.output_cluster.parameters(): p.requires_grad = False print('Clustering Branch Freezed') if self.seediness_freeze: for p in self.seed_net.parameters(): p.requires_grad = False for p in self.output_seediness.parameters(): p.requires_grad = False print('Seediness Branch Freezed') # Pytorch Activations self.tanh = nn.Tanh() self.sigmoid = nn.Sigmoid()