def initialize(self, fmap, stride=2): self.bn0 = L.batch_norm() self.activ = L.activation(M.PARAM_RELU) self.c1 = L.conv2D(3, fmap, stride=stride, pad='VALID', usebias=False) self.bn1 = L.batch_norm() self.c2 = L.conv2D(3, fmap, pad='VALID', usebias=False) # shortcut conv self.c3 = L.conv2D(1, fmap, stride=stride, usebias=False)
def initialize(self): self.bn0 = L.batch_norm() self.activ = L.activation(M.PARAM_RELU) self.c1 = L.conv2D(3, 512, dilation_rate=2, pad='VALID', usebias=False) self.bn1 = L.batch_norm() self.c2 = L.conv2D(3, 1024, dilation_rate=2, pad='VALID', usebias=False)
def initialize(self, fmaps, dilation_rate): self.dilation_rate = dilation_rate self.bn0 = L.batch_norm() self.activ = L.activation(M.PARAM_RELU) self.c1 = L.conv2D(1, fmaps[0], pad='VALID', usebias=False) self.bn1 = L.batch_norm() self.c2 = L.conv2D(3, fmaps[1], pad='VALID', usebias=False, dilation_rate=dilation_rate) self.bn2 = L.batch_norm() self.c3 = L.conv2D(1, fmaps[2], pad='VALID', usebias=False) # shortcut self.c4 = L.conv2D(1, fmaps[2], pad='VALID', usebias=False)
def initialize(self): self.c0 = L.conv2D(3, 64, usebias=False) # c0 self.r1_0 = ConvBlocks(128) self.r1_1 = IdentityBlock(128) self.r1_2 = IdentityBlock(128) # c1 self.r2_0 = ConvBlocks(256) self.r2_1 = IdentityBlock(256) self.r2_2 = IdentityBlock(256) # c2 self.r3_0 = ConvBlocks(256) self.r3_1 = IdentityBlock(256) self.r3_2 = IdentityBlock(256) self.r3_3 = IdentityBlock(256) self.r3_4 = IdentityBlock(256) self.r3_5 = IdentityBlock(256) # c3 self.r4_0 = Block5() self.r4_1 = ID5Block() self.r4_2 = ID5Block() self.r4_3 = LastBlock([512, 1024, 2048], 4) self.r4_4 = LastBlock([1024, 2048, 4096], 4) self.bn4 = L.batch_norm() self.c4 = L.conv2D(3, 512, dilation_rate=12, pad='VALID')
def initialize(self, size, outchn, dilation_rate=1, stride=1, pad='SAME', activation=-1, batch_norm=False, usebias=True, kernel_data=None, bias_data=None, weight_norm=False): self.conv = L.conv1D(size, outchn, stride=stride, pad=pad, usebias=usebias, kernel_data=kernel_data, bias_data=bias_data, dilation_rate=dilation_rate, weight_norm=weight_norm) self.batch_norm = batch_norm self.activation_ = activation if batch_norm: self.bn = L.batch_norm() if activation != -1: self.activation = L.activation(activation)
def initialize(self, outsize, usebias=True, batch_norm=False, activation=-1): self.fclayer = L.fcLayer(outsize,usebias=usebias) self.batch_norm = batch_norm self.activation_ = activation if batch_norm: self.bn = L.batch_norm() if activation!=-1: self.activation = L.activation(activation)
def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False): self.deconv = L.deconv2D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None) self.batch_norm = batch_norm self.activation_ = activation if batch_norm: self.bn = L.batch_norm() if activation!=-1: self.activation = L.activation(activation)
def initialize(self, outsize, adj_mtx=None, adj_fn=None, usebias=True, activation=-1, batch_norm=False): self.GCL = L.graphConvLayer(outsize, adj_mtx=adj_mtx, adj_fn=adj_fn, usebias=usebias) self.batch_norm = batch_norm self.activation_ = activation if batch_norm: self.bn = L.batch_norm() if activation!=-1: self.activation = L.activation(activation)
def initialize(self, outchn, stride=1, ratio=4, activation=PARAM_RELU): self.outchn = outchn # self.stride = stride self.activ = L.activation(activation) self.bn = L.batch_norm() self.l1 = ConvLayer(1, outchn//ratio, activation=PARAM_RELU, batch_norm=True) self.l2 = ConvLayer(3, outchn//ratio, activation=PARAM_RELU, batch_norm=True, stride=stride) self.l3 = ConvLayer(1, outchn) self.shortcut_conv = ConvLayer(1, outchn, activation=PARAM_RELU, stride=stride) self.shortcut_pool = L.maxpoolLayer(stride)
def initialize(self, fmap): self.bn0 = L.batch_norm() self.activ = L.activation(M.PARAM_RELU) self.c1 = L.conv2D(3, fmap, pad='VALID', usebias=False) self.bn1 = L.batch_norm() self.c2 = L.conv2D(3, fmap, pad='VALID', usebias=False)