Пример #1
0
 def __init__(self, cin, comp_graph):
     super(GepBlock, self).__init__()
     self.paths = len(comp_graph)
     self.relu = relu(True)
     for i in range(self.paths):
         setattr(self, 'path_%d' % i, GepNetLayer(cin, comp_graph[i]))
     self.convproj = conv2d(cin * self.paths, cin, ksize=1, use_relu=False)
Пример #2
0
def conv2d(cin,
           cout=None,
           ksize=3,
           stride=1,
           padding=None,
           dilation=None,
           groups=None,
           use_relu=True,
           use_bn=True,
           bn=NormType.Batch,
           bias=False):
    if cout is None: cout = cin
    if padding is None: padding = ksize // 2
    if dilation is None: dilation = 1
    if groups is None: groups = 1
    layer = [
        init_default(
            nn.Conv2d(cin,
                      cout,
                      ksize,
                      stride=stride,
                      padding=padding,
                      dilation=dilation,
                      groups=groups,
                      bias=bias), nn.init.kaiming_normal_)
    ]
    if use_bn: layer.append(batchnorm_2d(cout, norm_type=bn))
    if use_relu: layer.append(relu(True))
    return nn.Sequential(*layer)
Пример #3
0
 def __init__(self, cin, comp_graph):
     super(Cell, self).__init__()
     self.n_branch = len(comp_graph)
     self.relu = relu(True)
     for i in range(self.n_branch):
         setattr(self, 'branch_%d' % i, Layer(cin, comp_graph[i]))
     self.convproj = conv2d(cin*self.n_branch, cin, ksize=1, use_relu=False)
Пример #4
0
 def __init__(self, up_in_c:int, final_div:bool=True, blur:bool=False, leaky:float=None,
              self_attention:bool=False, **kwargs):
     super().__init__()
     self.shuf = PixelShuffle_ICNR(up_in_c, up_in_c//2, blur=blur, leaky=leaky, **kwargs)
     ni = up_in_c//2
     nf = ni if final_div else ni//2
     self.conv1 = conv_layer(ni, nf, leaky=leaky, **kwargs)
     self.conv2 = conv_layer(nf, nf, leaky=leaky, self_attention=self_attention, **kwargs)
     self.relu = relu(leaky=leaky)
Пример #5
0
def stem_blk(cin,
             cout=None,
             ksize=3,
             stride=1,
             use_relu=True,
             use_bn=True,
             bn=NormType.Batch,
             bias=False,
             pool='avg'):
    if cout is None: cout = cin
    padding = ksize // 2
    layer = [
        init_default(
            nn.Conv2d(cin,
                      cout,
                      ksize,
                      stride=stride,
                      padding=padding,
                      bias=bias), nn.init.kaiming_normal_)
    ]
    if use_bn: layer.append(batchnorm_2d(cout, norm_type=bn))
    if use_relu: layer.append(relu(True))
    if pool == 'max': layer.append(nn.MaxPool2d(2, stride=2))
    if pool == 'avg':
        layer.append(
            nn.AvgPool2d(2, stride=2, ceil_mode=True, count_include_pad=False))

    layer.append(
        init_default(
            nn.Conv2d(cout,
                      cout * 2,
                      ksize,
                      stride=stride,
                      padding=padding,
                      bias=bias), nn.init.kaiming_normal_))
    if use_bn: layer.append(batchnorm_2d(cout * 2, norm_type=bn))
    if use_relu: layer.append(relu(True))
    if pool == 'max': layer.append(nn.MaxPool2d(2, stride=2))
    if pool == 'avg':
        layer.append(
            nn.AvgPool2d(2, stride=2, ceil_mode=True, count_include_pad=False))
    return nn.Sequential(*layer)