Ejemplo n.º 1
0
 def initialize(self,
                outsize,
                batch_norm=False,
                affine=True,
                activation=-1,
                usebias=True,
                norm=False):
     self.fc = L.fclayer(outsize, usebias, norm)
     self.batch_norm = batch_norm
     self.activation = activation
     if self.activation == PARAM_PRELU:
         self.act = torch.nn.PReLU(num_parameters=outchn)
     elif self.activation == PARAM_PRELU1:
         self.act = torch.nn.PReLU(num_parameters=1)
     if batch_norm:
         self.bn = L.BatchNorm(affine=affine)
Ejemplo n.º 2
0
 def initialize(self,
                size,
                outchn,
                stride=1,
                pad='SAME_LEFT',
                dilation_rate=1,
                activation=-1,
                batch_norm=False,
                affine=True,
                usebias=True,
                groups=1):
     self.conv = L.conv2D(size, outchn, stride, pad, dilation_rate, usebias,
                          groups)
     if batch_norm:
         self.bn = L.BatchNorm(affine=affine)
     self.batch_norm = batch_norm
     self.activation = activation
     self.act = L.Activation(activation)
Ejemplo n.º 3
0
 def initialize(self,
                size,
                outchn,
                stride=1,
                pad='SAME_LEFT',
                dilation_rate=1,
                activation=-1,
                batch_norm=False,
                affine=True,
                usebias=True,
                groups=1):
     self.conv = L.conv3D(size, outchn, stride, pad, dilation_rate, usebias,
                          groups)
     if batch_norm:
         self.bn = L.BatchNorm(affine=affine)
     self.batch_norm = batch_norm
     self.activation = activation
     if self.activation == PARAM_PRELU:
         self.act = torch.nn.PReLU(num_parameters=outchn)
     elif self.activation == PARAM_PRELU1:
         self.act = torch.nn.PReLU(num_parameters=1)