コード例 #1
0
 def initialize(self,
                size,
                outchn,
                dilation_rate=1,
                stride=1,
                pad='SAME',
                activation=-1,
                batch_norm=False,
                usebias=True,
                kernel_data=None,
                bias_data=None,
                weight_norm=False):
     self.conv = L.conv1D(size,
                          outchn,
                          stride=stride,
                          pad=pad,
                          usebias=usebias,
                          kernel_data=kernel_data,
                          bias_data=bias_data,
                          dilation_rate=dilation_rate,
                          weight_norm=weight_norm)
     self.batch_norm = batch_norm
     self.activation_ = activation
     if batch_norm:
         self.bn = L.batch_norm()
     if activation != -1:
         self.activation = L.activation(activation)
コード例 #2
0
 def initialize(self, fmap, stride=2):
     self.bn0 = L.batch_norm()
     self.activ = L.activation(M.PARAM_RELU)
     self.c1 = L.conv2D(3, fmap, stride=stride, pad='VALID', usebias=False)
     self.bn1 = L.batch_norm()
     self.c2 = L.conv2D(3, fmap, pad='VALID', usebias=False)
     # shortcut conv
     self.c3 = L.conv2D(1, fmap, stride=stride, usebias=False)
コード例 #3
0
	def initialize(self, outsize, usebias=True, batch_norm=False, activation=-1):
		self.fclayer = L.fcLayer(outsize,usebias=usebias)
		self.batch_norm = batch_norm
		self.activation_ = activation
		if batch_norm:
			self.bn = L.batch_norm()
		if activation!=-1:
			self.activation = L.activation(activation)
コード例 #4
0
	def initialize(self, size, outchn, activation=-1, stride=1, usebias=True, pad='SAME', batch_norm=False):
		self.deconv = L.deconv2D(size,outchn,stride=stride,usebias=usebias,pad=pad, name=None)
		self.batch_norm = batch_norm
		self.activation_ = activation
		if batch_norm:
			self.bn = L.batch_norm()
		if activation!=-1:
			self.activation = L.activation(activation)
コード例 #5
0
	def initialize(self, outsize, adj_mtx=None, adj_fn=None, usebias=True, activation=-1, batch_norm=False):
		self.GCL = L.graphConvLayer(outsize, adj_mtx=adj_mtx, adj_fn=adj_fn, usebias=usebias)
		self.batch_norm = batch_norm
		self.activation_ = activation
		if batch_norm:
			self.bn = L.batch_norm()
		if activation!=-1:
			self.activation = L.activation(activation)
コード例 #6
0
 def initialize(self):
     self.bn0 = L.batch_norm()
     self.activ = L.activation(M.PARAM_RELU)
     self.c1 = L.conv2D(3, 512, dilation_rate=2, pad='VALID', usebias=False)
     self.bn1 = L.batch_norm()
     self.c2 = L.conv2D(3,
                        1024,
                        dilation_rate=2,
                        pad='VALID',
                        usebias=False)
コード例 #7
0
	def initialize(self, outchn, stride=1, ratio=4, activation=PARAM_RELU):
		self.outchn = outchn
		# self.stride = stride
		self.activ = L.activation(activation)
		self.bn = L.batch_norm()
		self.l1 = ConvLayer(1, outchn//ratio, activation=PARAM_RELU, batch_norm=True)
		self.l2 = ConvLayer(3, outchn//ratio, activation=PARAM_RELU, batch_norm=True, stride=stride)
		self.l3 = ConvLayer(1, outchn)
		self.shortcut_conv = ConvLayer(1, outchn, activation=PARAM_RELU, stride=stride)
		self.shortcut_pool = L.maxpoolLayer(stride)
コード例 #8
0
    def initialize(self, fmaps, dilation_rate):
        self.dilation_rate = dilation_rate
        self.bn0 = L.batch_norm()
        self.activ = L.activation(M.PARAM_RELU)
        self.c1 = L.conv2D(1, fmaps[0], pad='VALID', usebias=False)
        self.bn1 = L.batch_norm()
        self.c2 = L.conv2D(3,
                           fmaps[1],
                           pad='VALID',
                           usebias=False,
                           dilation_rate=dilation_rate)
        self.bn2 = L.batch_norm()
        self.c3 = L.conv2D(1, fmaps[2], pad='VALID', usebias=False)

        # shortcut
        self.c4 = L.conv2D(1, fmaps[2], pad='VALID', usebias=False)
コード例 #9
0
 def initialize(self, fmap):
     self.bn0 = L.batch_norm()
     self.activ = L.activation(M.PARAM_RELU)
     self.c1 = L.conv2D(3, fmap, pad='VALID', usebias=False)
     self.bn1 = L.batch_norm()
     self.c2 = L.conv2D(3, fmap, pad='VALID', usebias=False)