def __init__(self, inplanes, outplanes, bias, batch_norm): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation1 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation1 = scn.ReLU() self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=3, bias=bias) if batch_norm: self.activation2 = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation2 = scn.ReLU() self.residual = scn.Identity() self.add = scn.AddTable()
def __init__(self, inplanes, outplanes, nplanes=1): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias=False) # if FLAGS.BATCH_NORM: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = [nplanes,3,3], bias = False) # if FLAGS.BATCH_NORM: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def __init__(self, inplanes, outplanes, batch_norm, leaky_relu): nn.Module.__init__(self) self.batch_norm = batch_norm self.leaky_relu = leaky_relu self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn = inplanes, nOut = outplanes, filter_size = 3, bias=False) if self.batch_norm: if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes) else: self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn = outplanes, nOut = outplanes, filter_size = 3, bias = False) if self.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() if self.leaky_relu: self.relu = scn.LeakyReLU() else: self.relu = scn.ReLU() self.add = scn.AddTable()
def __init__(self, nInputFeatures, nClasses, dropout_depth=False, dropout_width=False, dropout_p=0.5, wide_model=False, old_version=False): nn.Module.__init__(self) self.sparseModel = scn.Sequential().add( scn.ValidConvolution(3, nInputFeatures, 64, 3, False)).add( ResNetUNetDropout(3, 64, 2, 4, dropout_depth=dropout_depth, dropout_width=dropout_width, dropout_p=dropout_p) \ if dropout_depth or dropout_width else scn.ResNetUNet(3, 64, 2, 4)) self.use_wide_model = wide_model if wide_model: self.wide = nn.Linear(nInputFeatures, 1) #self.wide_and_deep = scn.JoinTable() self.linear = nn.Linear(65, nClasses) print "Using wide model" else: self.linear = nn.Linear(64, nClasses) self.act = nn.Softmax(dim=1) #Some older models still have this in, but it's not called if old_version: self.final = scn.ValidConvolution(3, 64, nClasses, 1, False) self.relu = scn.ReLU()
def __init__(self, *, inplanes, outplanes, nplanes=1, params): nn.Module.__init__(self) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=params.use_bias) self.do_batch_norm = False if params.batch_norm: self.do_batch_norm = True self.bn1 = scn.BatchNormReLU(outplanes) self.conv2 = scn.SubmanifoldConvolution(dimension=3, nIn=outplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=False) if params.batch_norm: self.bn2 = scn.BatchNormalization(outplanes) self.residual = scn.Identity() self.relu = scn.ReLU() self.add = scn.AddTable()
def decoder(self, in_channels, out_channels, filter_size, filter_stride=1, bias=True): layer = scn.Sequential( scn.Deconvolution(3, in_channels, out_channels, filter_size, filter_stride, bias), scn.ReLU()) return layer
def __init__(self, inplanes, outplanes, nplanes=1): nn.Module.__init__(self) self.conv = scn.Convolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 2, 2], filter_stride=[1, 2, 2], bias=False) # if FLAGS.BATCH_NORM: self.bn = scn.BatchNormalization(outplanes) self.relu = scn.ReLU()
def __init__(self, inplanes, outplanes, bias, batch_norm): nn.Module.__init__(self) self.conv = scn.Convolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=2, filter_stride=2, bias=bias) # if FLAGS.BATCH_NORM: if batch_norm: self.activation = scn.BatchNormReLU(outplanes, momentum=0.5) else: self.activation = scn.ReLU()
def __init__(self, *, inplanes, outplanes, nplanes=1, params): nn.Module.__init__(self) self.conv = scn.Deconvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 2, 2], filter_stride=[1, 2, 2], bias=params.use_bias) self.do_batch_norm = False if params.batch_norm: self.do_batch_norm = True self.bn = scn.BatchNormalization(outplanes) self.relu = scn.ReLU()
def __init__(self, *, inplanes, outplanes, nplanes=1, params): nn.Module.__init__(self) print(nplanes) self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=params.use_bias) self.do_batch_norm = False if params.batch_norm: self.do_batch_norm = True self.bn1 = scn.BatchNormReLU(outplanes) self.relu = scn.ReLU()
def encoder(self, in_channels, out_channels, filter_size=3, filter_stride=1, bias=True, batchnorm=True, submanifold=True, dropout=0.0): layer = scn.Sequential( scn.SubmanifoldConvolution(3, in_channels, out_channels, filter_size, bias) if submanifold \ else scn.Convolution(3, in_channels, out_channels, filter_size, filter_stride, bias), scn.BatchNormReLU(out_channels) if batchnorm else scn.ReLU()) if dropout > 0.0: layer.add(Dropout(dropout)) return layer
def forward(self, x): pdb.set_trace() x = self.inputLayer(x) x = self.sscn1(x) x = scn.ReLU()(x) x = self.sscn2(x) x = scn.SparseToDense(2, 64)(x) x = F.relu(x) x = F.max_pool2d(x, 2) x = self.dropout1(x) x = torch.flatten(x, 1) x = self.fc1(x) x = F.relu(x) x = self.dropout2(x) x = self.fc2(x) output = F.log_softmax(x, dim=1) return output
def __init__(self, inplanes, outplanes, batch_norm, leaky_relu, nplanes=1): nn.Module.__init__(self) self.batch_norm = batch_norm self.leaky_relu = leaky_relu self.conv = scn.Convolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 2, 2], filter_stride=[1, 2, 2], bias=False) if self.batch_norm: self.bn = scn.BatchNormalization(outplanes) if self.leaky_relu: self.relu = scn.LeakyReLU() else: self.relu = scn.ReLU()
def __init__(self, inplanes, outplanes, batch_norm, leaky_relu, nplanes=1): nn.Module.__init__(self) self.batch_norm = batch_norm self.leaky_relu = leaky_relu self.conv1 = scn.SubmanifoldConvolution(dimension=3, nIn=inplanes, nOut=outplanes, filter_size=[nplanes, 3, 3], bias=False) if self.batch_norm: if self.leaky_relu: self.bn1 = scn.BatchNormLeakyReLU(outplanes) else: self.bn1 = scn.BatchNormReLU(outplanes) else: if self.leaky_relu: self.relu = scn.LeakyReLU() else: self.relu = scn.ReLU()
def __init__(self, inplanes, planes, stride=1, upsample=None, **kwargs): super(TransBasicBlockSparse, self).__init__() self.conv1 = conv3x3_sparse(inplanes, inplanes) self.bn1 = scn.BatchNormReLU(inplanes) self.relu = scn.ReLU() if upsample is not None and stride != 1: self.conv2 = scn.Sequential( scn.SparseToDense(2,inplanes), nn.ConvTranspose2d(inplanes, planes, kernel_size=2, stride=stride, padding=0, output_padding=0, bias=False), scn.DenseToSparse(2) ) else: self.conv2 = conv3x3_sparse(inplanes, planes, stride) self.bn2 = scn.BatchNormalization(planes) self.add = scn.AddTable() self.upsample = upsample self.stride = stride
def get_relu(num_dims, sparse, input_channels, inplace=True): stride = np.full(num_dims, 1) layer = (scn.ReLU() if sparse else nn.ReLU(inplace=inplace)) return sparse, stride, input_channels, layer