def __init__(self, *, inplanes, outplanes, nplanes=1, params): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=params.use_bias) self.do_batch_norm = False if params.batch_norm: self.do_batch_norm = True self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=False) if params.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def residual_block(m, a, b, leakiness=0.01, dimensions=2): """ append to a sequence module: produce output of [identity,3x3+3x3] then add together inputs ------ m [scn.Sequential module] network to add layers to a [int]: number of input channels b [int]: number of output channels leakiness [float]: leakiness of ReLU activations dimensions [int]: dimensions of input sparse tensor modifies -------- m: adds layers """ m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(scn.BatchNormLeakyReLU( a, leakiness=leakiness)).add( scn.SubmanifoldConvolution(dimensions, a, b, 3, False)).add( scn.BatchNormLeakyReLU(b, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, b, b, 3, False)))).add(scn.AddTable())
def __init__(self, nf_in, nf, input_sparsetensor, return_sparsetensor, max_data_size): nn.Module.__init__(self) data_dim = 3 self.nf_in = nf_in self.nf = nf self.input_sparsetensor = input_sparsetensor self.return_sparsetensor = return_sparsetensor self.max_data_size = max_data_size if not self.input_sparsetensor: self.p0 = scn.InputLayer(data_dim, self.max_data_size, mode=0) self.p1 = scn.SubmanifoldConvolution(data_dim, nf_in, nf, filter_size=FSIZE0, bias=False) self.p2 = scn.Sequential() self.p2.add(scn.ConcatTable().add(scn.Identity()).add( scn.Sequential().add(scn.BatchNormReLU(nf)).add( scn.SubmanifoldConvolution( data_dim, nf, nf, FSIZE0, False)).add(scn.BatchNormReLU(nf)).add( scn.SubmanifoldConvolution(data_dim, nf, nf, FSIZE0, False)))).add( scn.AddTable()) self.p2.add(scn.BatchNormReLU(nf)) # downsample space by factor of 2 self.p3 = scn.Sequential().add( scn.Convolution(data_dim, nf, nf, FSIZE1, 2, False)) self.p3.add(scn.BatchNormReLU(nf)) if not self.return_sparsetensor: self.p4 = scn.SparseToDense(data_dim, nf)
def block(self, m, a, b, dimension=3, residual_blocks=False, leakiness=0, kernel_size=3, use_batch_norm=True): # default using residual_block if use_batch_norm: Activation = lambda channels: scn.BatchNormLeakyReLU( channels, leakiness=leakiness) else: Activation = lambda channels: scn.LeakyReLU(leakiness) if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(Activation(a)).add( scn.SubmanifoldConvolution(dimension, a, b, kernel_size, False)).add(Activation(b)).add( scn.SubmanifoldConvolution( dimension, b, b, kernel_size, False)))).add( scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add(Activation(a)).add( scn.SubmanifoldConvolution(dimension, a, b, kernel_size, False)))
def __init__(self, inplanes, outplanes, nplanes=1): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias=False) # if FLAGS.BATCH_NORM: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias = False) # if FLAGS.BATCH_NORM: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def __init__(self, inplanes, outplanes, bias, batch_norm): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation1 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation1 = scn.ReLU() self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation2 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation2 = scn.ReLU() self.residual = scn.Identity() self.add = scn.AddTable()
def block(m, a, b): if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU( a, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats) ).add(scn.SubmanifoldConvolution( dimension, a, b, 3, False)).add( scn.BatchNormLeakyReLU( b, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats)).add( scn.SubmanifoldConvolution( dimension, b, b, 3, False)))).add(scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add( scn.BatchNormLeakyReLU( a, momentum=bn_momentum, leakiness=leakiness, track_running_stats=track_running_stats)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False))) operation = {'kernel': [1, 1, 1], 'stride': [1, 1, 1]} return operation
def residual_block(m, ninputchs, noutputchs, leakiness=0.01, dimensions=2): """ Residual Modulae Block intention is to append to a sequence module (m) produce output of [identity,3x3+3x3] then add together inputs ------ m [scn.Sequential module] network to add layers to ninputchs [int]: number of input channels noutputchs [int]: number of output channels leakiness [float]: leakiness of ReLU activations dimensions [int]: dimensions of input sparse tensor modifies -------- m: adds layers """ inoutsame = ninputchs == noutputchs m.add(scn.ConcatTable().add( scn.Identity() if inoutsame else scn. NetworkInNetwork(ninputchs, noutputchs, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU(ninputchs, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, ninputchs, noutputchs, 3, False)).add( scn.BatchNormLeakyReLU( noutputchs, leakiness=leakiness)).add( scn.SubmanifoldConvolution( dimensions, noutputchs, noutputchs, 3, False)))).add(scn.AddTable())
def sparse_residual(inner_block, residual_changer, activation): layer = scn.Sequential( scn.ConcatTable(residual_changer, inner_block), scn.AddTable()) if activation is not None: layer.append(activation) return layer
def block(self, nPlanes, n, reps, stride): m = scn.Sequential() for rep in range(reps): if rep == 0: m.add(scn.BatchNormReLU(nPlanes)) m.add(scn.ConcatTable().add(self.residual( nPlanes, n, stride)).add(scn.Sequential().add( scn.SubmanifoldConvolution(self.dimension, nPlanes, n, 3, False) if stride == 1 else scn.Convolution( self.dimension, nPlanes, n, 2, stride, False)).add( scn.BatchNormReLU(n)).add( scn.SubmanifoldConvolution( self.dimension, n, n, 3, False)))) else: m.add(scn.ConcatTable().add(scn.Sequential().add( scn.BatchNormReLU(nPlanes)).add( scn.SubmanifoldConvolution( self.dimension, nPlanes, n, 3, False)).add(scn.BatchNormReLU(n)).add( scn.SubmanifoldConvolution( self.dimension, n, n, 3, False))).add(scn.Identity())) m.add(scn.AddTable()) nPlanes = n return m
def __init__(self, inplanes, outplanes, batch_norm, leaky_relu): nn.Module.__init__(self) self.batch_norm = batch_norm self.leaky_relu = leaky_relu self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = 3, bias=False) if self.batch_norm: if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes) else: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = 3, bias = False) if self.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() if self.leaky_relu: self.relu = scn.LeakyReLU() else: self.relu = scn.ReLU() self.add = scn.AddTable()
def block(self, n_in, n_out): m = scn.Sequential() if self.residual_blocks: # ResNet style blocks m.add(scn.ConcatTable().add( scn.Identity() if n_in == n_out else scn.NetworkInNetwork(n_in, n_out, False)).add( scn.Sequential().add( scn.BatchNormLeakyReLU( n_in, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, n_in, n_out, 3, False)).add( scn.BatchNormLeakyReLU( n_out, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, n_out, n_out, 3, False)))) m.add(scn.AddTable()) else: # VGG style blocks m.add(scn.BatchNormLeakyReLU(n_in, leakiness=self.leakiness)) m.add( scn.SubmanifoldConvolution(self.dimension, n_in, n_out, 3, False)) return m
def __init__(self, cfg, name='yresnet_decoder'): super(YResNetDecoder, self).__init__(cfg, name='network_base') self.model_config = cfg[name] self.reps = self.model_config.get('reps', 2) # Conv block repetition factor self.kernel_size = self.model_config.get('kernel_size', 2) self.num_strides = self.model_config.get('num_strides', 5) self.num_filters = self.model_config.get('filters', 16) self.nPlanes = [ i * self.num_filters for i in range(1, self.num_strides + 1) ] self.downsample = [self.kernel_size, 2] # [filter size, filter stride] self.concat = scn.JoinTable() self.add = scn.AddTable() dropout_prob = self.model_config.get('dropout_prob', 0.5) self.encoder_num_filters = self.model_config.get( 'encoder_num_filters', None) if self.encoder_num_filters is None: self.encoder_num_filters = self.num_filters self.encoder_nPlanes = [ i * self.encoder_num_filters for i in range(1, self.num_strides + 1) ] # Define Sparse YResNet Decoder. self.decoding_block = scn.Sequential() self.decoding_conv = scn.Sequential() for idx, i in enumerate(list(range(self.num_strides - 2, -1, -1))): if idx == 0: m = scn.Sequential().add( scn.BatchNormLeakyReLU(self.encoder_nPlanes[i + 1], leakiness=self.leakiness)).add( scn.Deconvolution( self.dimension, self.encoder_nPlanes[i + 1], self.nPlanes[i], self.downsample[0], self.downsample[1], self.allow_bias)) else: m = scn.Sequential().add( scn.BatchNormLeakyReLU( self.nPlanes[i + 1], leakiness=self.leakiness)).add( scn.Deconvolution(self.dimension, self.nPlanes[i + 1], self.nPlanes[i], self.downsample[0], self.downsample[1], self.allow_bias)).add( scn.Dropout(p=dropout_prob)) self.decoding_conv.add(m) m = scn.Sequential() for j in range(self.reps): self._resnet_block(m, self.nPlanes[i] + (self.encoder_nPlanes[i] \ if j == 0 else 0), self.nPlanes[i]) self.decoding_block.add(m)
def block(m, a, b): # ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a, leakiness=leakiness)) .add(scn.SubmanifoldConvolution(self._dimension, a, b, 3, False)) .add(scn.BatchNormLeakyReLU(b, leakiness=leakiness)) .add(scn.SubmanifoldConvolution(self._dimension, b, b, 3, False))) ).add(scn.AddTable())
def __init__(self, inplanes, kernel, dim=3): torch.nn.Module.__init__(self) self.bnr1 = scn.BatchNormReLU(inplanes) self.subconv1 = scn.SubmanifoldConvolution(dim, inplanes, inplanes, kernel, 0) self.bnr2 = scn.BatchNormReLU(inplanes) self.subconv2 = scn.SubmanifoldConvolution(dim, inplanes, inplanes, kernel, 0) self.add = scn.AddTable()
def res(m, dimension, a, b): m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormReLU(a)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)) .add(scn.BatchNormReLU(b)) .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False))))\ .add(scn.AddTable())
def SparseResNet(dimension, nInputPlanes, layers): import sparseconvnet as scn """ pre-activated ResNet e.g. layers = {{'basic',16,2,1},{'basic',32,2}} """ nPlanes = nInputPlanes m = scn.Sequential() def residual(nIn, nOut, stride): if stride > 1: return scn.Convolution(dimension, nIn, nOut, 2, stride, False) elif nIn != nOut: return scn.NetworkInNetwork(nIn, nOut, False) else: return scn.Identity() for n, reps, stride in layers: for rep in range(reps): if rep == 0: m.add(scn.BatchNormReLU(nPlanes)) tab = scn.ConcatTable() tab_seq = scn.Sequential() if stride == 1: tab_seq.add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False)) else: tab_seq.add( scn.Convolution(dimension, nPlanes, n, 2, stride, False)) tab_seq.add(scn.BatchNormReLU(n)) tab_seq.add( scn.SubmanifoldConvolution(dimension, n, n, 3, False)) tab.add(tab_seq) tab.add(residual(nPlanes, n, stride)) m.add(tab) else: tab = scn.ConcatTable() tab_seq = scn.Sequential() tab_seq.add(scn.BatchNormReLU(nPlanes)) tab_seq.add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False)) tab_seq.add(scn.BatchNormReLU(n)) tab_seq.add( scn.SubmanifoldConvolution(dimension, n, n, 3, False)) tab.add(tab_seq) tab.add(scn.Identity()) m.add(tab) nPlanes = n m.add(scn.AddTable()) m.add(scn.BatchNormReLU(nPlanes)) return m
def baz(depth, nPlanes): if depth == 1: return scn.Sequential().add(foo(nPlanes)).add(bar(nPlanes, True)) else: return scn.Sequential().add(foo(nPlanes)).add(scn.ConcatTable().add(bar(nPlanes,False)).add( scn.Sequential()\ .add(scn.BatchNormReLU(nPlanes))\ .add(scn.Convolution(dimension, nPlanes, l(nPlanes), 2, 2, False))\ .add(baz(depth-1,l(nPlanes)))\ .add(scn.UnPooling(dimension, 2, 2)) )).add(scn.AddTable())
def f(m, a, b): m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add( scn.Sequential().add(norm_layer( a, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, a, b, 3, self.allow_bias)).add( norm_layer(b, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, b, b, 3, self.allow_bias)))).add(scn.AddTable()) return m
def decoder_block(self, nPlanes, n, reps, stride): m = scn.Sequential() for rep in range(reps): m.add(scn.ConcatTable().add( scn.Sequential().add(scn.BatchNormReLU(nPlanes)).add( scn.SubmanifoldConvolution(self.dimension, nPlanes, n, 3, False)) # .add(scn.BatchNormReLU(n)) # .add(scn.SubmanifoldConvolution(dimension, n, n, 3, False)) ).add(scn.Identity())) m.add(scn.AddTable()) nPlanes = n return m
def block(m, a, b): if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, False)).add( scn.Sequential().add(scn.BatchNormReLU(a)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False)).add( scn.BatchNormReLU(b)).add( scn.SubmanifoldConvolution(dimension, b, b, 3, False)))).add( scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential().add(scn.BatchNormReLU(a)).add( scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
def foo(m,np): for _ in range(reps): if residual: #ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity()) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(np,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, np, np, 3, False)) .add(scn.BatchNormLeakyReLU(np,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, np, np, 3, False))) ).add(scn.AddTable()) else: #VGG style blocks m.add(scn.BatchNormLeakyReLU(np,leakiness=leakiness) ).add(scn.SubmanifoldConvolution(dimension, np, np, 3, False))
def block(self, m, a, b, dimension=3, residual_blocks=False, leakiness=0): # default using residual_block if residual_blocks: #ResNet style blocks m.add(scn.ConcatTable() .add(scn.Identity() if a == b else scn.NetworkInNetwork(a, b, False)) .add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)) .add(scn.BatchNormLeakyReLU(b,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, b, b, 3, False))) ).add(scn.AddTable()) else: #VGG style blocks m.add(scn.Sequential() .add(scn.BatchNormLeakyReLU(a,leakiness=leakiness)) .add(scn.SubmanifoldConvolution(dimension, a, b, 3, False)))
def baz(nPlanes): m=scn.Sequential() foo(m,nPlanes[0]) if len(nPlanes)==1: bar(m,nPlanes[0],True) else: a=scn.Sequential() bar(a,nPlanes,False) b=scn.Sequential( scn.BatchNormLeakyReLU(nPlanes,leakiness=leakiness), scn.Convolution(dimension, nPlanes[0], nPlanes[1], downsample[0], downsample[1], False), baz(nPlanes[1:]), scn.UnPooling(dimension, downsample[0], downsample[1])) m.add(ConcatTable(a,b)) m.add(scn.AddTable())
def SparseResNet(dimension, nInputPlanes, layers, mom=0.99): """ pre-activated ResNet e.g. layers = {{'basic',16,2,1},{'basic',32,2}} """ nPlanes = nInputPlanes m = scn.Sequential() def residual(nIn, nOut, stride): if stride > 1: return scn.Convolution(dimension, nIn, nOut, 3, stride, False) elif nIn != nOut: return scn.NetworkInNetwork(nIn, nOut, False) else: return scn.Identity() for blockType, n, reps, stride in layers: for rep in range(reps): if blockType[0] == 'b': # basic block if rep == 0: m.add(scn.BatchNormReLU(nPlanes, momentum=mom, eps=1e-5)) m.add(scn.ConcatTable().add(scn.Sequential().add( scn.SubmanifoldConvolution(dimension, nPlanes, n, 3, False) if stride == 1 else scn.Convolution( dimension, nPlanes, n, 3, stride, False)).add( scn.BatchNormReLU( n, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, n, n, 3, False))).add( residual(nPlanes, n, stride))) else: m.add(scn.ConcatTable().add(scn.Sequential().add( scn.BatchNormReLU( nPlanes, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, nPlanes, n, 3, False)).add( scn.BatchNormReLU( n, momentum=mom, eps=1e-5)).add( scn.SubmanifoldConvolution( dimension, n, n, 3, False))).add( scn.Identity())) nPlanes = n m.add(scn.AddTable()) m.add(scn.BatchNormReLU(nPlanes, momentum=mom, eps=1e-5)) return m
def __init__(self, inplanes, kernel, stride, bias=False, dim=3): torch.nn.Module.__init__(self) outplanes = int(inplanes / 2) #f1 self.bnr1 = scn.BatchNormReLU(inplanes) self.deconv1 = scn.Deconvolution(dim, inplanes, outplanes, kernel, stride, bias) self.bnr2 = scn.BatchNormReLU(outplanes) self.subconv = scn.SubmanifoldConvolution(dim, outplanes, outplanes, kernel, bias) #f2 self.deconv2 = scn.Deconvolution(dim, inplanes, outplanes, kernel, stride, bias) self.add = scn.AddTable()
def __init__(self, inplanes, planes, stride=1, upsample=None, **kwargs): super(TransBasicBlockSparse, self).__init__() self.conv1 = conv3x3_sparse(inplanes, inplanes) self.bn1 = scn.BatchNormReLU(inplanes) self.relu = scn.ReLU() if upsample is not None and stride != 1: self.conv2 = scn.Sequential( scn.SparseToDense(2,inplanes), nn.ConvTranspose2d(inplanes, planes, kernel_size=2, stride=stride, padding=0, output_padding=0, bias=False), scn.DenseToSparse(2) ) else: self.conv2 = conv3x3_sparse(inplanes, planes, stride) self.bn2 = scn.BatchNormalization(planes) self.add = scn.AddTable() self.upsample = upsample self.stride = stride
def __init__(self): super(Model, self).__init__() self.inputLayer = scn.InputLayer(dimension, spatial_size=512, mode=3) self.initialconv = scn.SubmanifoldConvolution(dimension, nPlanes, 64, 7, False) self.residual = scn.Identity() self.add = scn.AddTable() self.sparsebl11 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add( scn.BatchNormLeakyReLU(64)).add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)) self.sparsebl12 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)).add( scn.BatchNormLeakyReLU(64)).add( scn.SubmanifoldConvolution(dimension, 64, 64, 3, False)) self.sparsebl21 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add( scn.BatchNormLeakyReLU(128)).add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)) self.sparsebl22 = scn.Sequential().add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)).add( scn.BatchNormLeakyReLU(128)).add( scn.SubmanifoldConvolution(dimension, 128, 128, 3, False)) self.relu1 = scn.LeakyReLU(64) self.relu2 = scn.LeakyReLU(128) self.downsample1 = scn.Sequential().add( scn.Convolution(dimension, 64, 64, [2, 2, 2], [2, 2, 2], False)).add(scn.BatchNormLeakyReLU(64)) self.downsample2 = scn.Sequential().add( scn.Convolution(dimension, 64, 128, [2, 2, 2], [2, 2, 2], False)).add(scn.BatchNormLeakyReLU(128)) self.downsample3 = scn.Sequential().add( scn.Convolution(dimension, 128, 64, [4, 4, 4], [4, 4, 4], False)).add(scn.BatchNormLeakyReLU(64)) self.downsample4 = scn.Sequential().add( scn.Convolution(dimension, 64, 2, [4, 4, 4], [4, 4, 4], False)).add(scn.BatchNormLeakyReLU(2)) self.sparsetodense = scn.SparseToDense(dimension, 2) self.dropout1 = nn.Dropout(0.5) self.dropout2 = nn.Dropout(0.5) self.linear2 = nn.Linear(2 * 8 * 8 * 8, 2) self.linear3 = nn.Linear(2, 1)
def resnet_block(dimension, n_in, n_out, kernel, leakiness=0, computation='convolution'): '''Build and return ResNet block ''' assert computation in [ 'submanifoldconvolution', 'convolution', 'fullconvolution', 'deconvolution' ] if computation == 'convolution': computation = lambda n_in, n_out: scn.Convolution( dimension, n_in, n_out, kernel[0], kernel[1], False) elif computation == 'submanifoldconvolution': assert type( kernel ) == int, f"`kernel` must be int, {type(kernel)} was provided" computation = lambda n_in, n_out: scn.SubmanifoldConvolution( dimension, n_in, n_out, kernel, False) elif computation == 'deconvolution': assert type( kernel ) == int, f"`kernel` must be int, {type(kernel)} was provided" computation = lambda n_in, n_out: scn.Deconvolution( dimension, n_in, n_out, kernel, kernel, False) else: computation = lambda n_in, n_out: scn.FullConvolution( dimension, n_in, n_out, kernel[0], kernel[1], False) block = scn.Sequential() block.add(scn.ConcatTable( ).add(scn.NetworkInNetwork(n_in, n_out, False)).add(scn.Sequential().add( # scn.BatchNormLeakyReLU(n_in, leakiness=leakiness) scn.LeakyReLU(leakiness)).add(computation(n_in, n_out)).add( # scn.BatchNormLeakyReLU(n_out, leakiness=leakiness) scn.LeakyReLU(leakiness)).add(computation(n_out, n_out)))).add( scn.AddTable()) return block
def _resnet_block(self, module, a, b): ''' Utility Method for attaching ResNet-Style Blocks. INPUTS: - module (scn Module): network module to attach ResNet block. - a (int): number of input feature dimension - b (int): number of output feature dimension RETURNS: None (operation is in-place) ''' module.add(scn.ConcatTable().add(scn.Identity( ) if a == b else scn.NetworkInNetwork(a, b, self.allow_bias)).add( scn.Sequential().add( scn.BatchNormLeakyReLU(a, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, a, b, 3, self.allow_bias)).add( scn.BatchNormLeakyReLU( b, leakiness=self.leakiness)).add( scn.SubmanifoldConvolution( self.dimension, b, b, 3, self.allow_bias)))).add(scn.AddTable())