Exemple #1
0
 def __init__(self, ni, nf, upsample=None):
     super().__init__(upsample)
     self.conv1 = fv.conv2d(ni, ni)
     self.bn1 = nn.BatchNorm2d(ni)
     self.conv2 = fv.conv2d(ni, nf)
     self.bn2 = nn.BatchNorm2d(nf)
     self.relu = nn.ReLU(inplace=True)
Exemple #2
0
 def __init__(self, in_channels, n_objects, parts_sections):
     super().__init__()
     self.conv = nnlayers.conv_layer(in_channels, in_channels)
     module_list = [
         fv.conv2d(in_channels, n_parts, ks=1, bias=True)
         for n_parts in parts_sections
     ]
     module_list.append(fv.conv2d(in_channels, n_objects, ks=1, bias=True))
     self.classifier = nn.ModuleList(module_list)
Exemple #3
0
 def __init__(self, ni, nf, upsample=None):
     super().__init__(upsample)
     width = ni // self.expansion
     self.conv1 = fv.conv2d(ni, width, ks=1)
     self.bn1 = nn.BatchNorm2d(width)
     self.conv2 = fv.conv2d(width, width)
     self.bn2 = nn.BatchNorm2d(width)
     self.conv3 = fv.conv2d(width, nf, ks=1)
     self.bn3 = nn.BatchNorm2d(nf)
     self.relu = nn.ReLU(inplace=True)
 def __init__(self, hooks: Collection[Hook], nc: Collection[int] = None):
     super(Hcolumns, self).__init__()
     self.hooks = hooks
     self.n = len(self.hooks)
     self.factorization = None
     if nc is not None:
         self.factorization = nn.ModuleList()
         for i in range(self.n):
             self.factorization.append(
                 nn.Sequential(
                     conv2d(nc[i], nc[-1], 3, padding=1, bias=True),
                     conv2d(nc[-1], nc[-1], 3, padding=1, bias=True),
                 ))
Exemple #5
0
 def __init__(self, tree, weights_encoder='', weights_decoder='', hidden=2):
     super().__init__()
     self.fpn = get_fpn(tree,
                        weights_encoder=weights_encoder,
                        weights_decoder=weights_decoder)
     fpn_dim = 512
     self.embedding = fv.embedding(tree.n_obj_with_parts + 1, fpn_dim)
     self.td = nn.ModuleList(
         [nnlayers.conv_layer(fpn_dim, fpn_dim) for _ in range(hidden)])
     dims = tree.sections + [tree.n_obj]
     self.heads = nn.ModuleList(
         [fv.conv2d(fpn_dim, dim, ks=1, bias=True) for dim in dims])
     self.bu_start = nn.ModuleList(
         [fv.conv2d(dim, fpn_dim // 2) for dim in dims])
     self.bu_lateral = nn.ModuleList([
         nnlayers.conv_layer(fpn_dim, fpn_dim // 2) for _ in range(hidden)
     ])
     self.bu = nn.ModuleList([
         nnlayers.conv_layer(fpn_dim, fpn_dim // 2)
         for _ in range(hidden - 1)
     ] + [nnlayers.conv_layer(fpn_dim, fpn_dim)])
     self.obj_inst = tree.n_obj_with_parts
     self.tree = tree
Exemple #6
0
 def __init__(self,
              instructor,
              tree,
              weights_encoder='',
              weights_decoder='',
              emb_op=torch.mul):
     super().__init__()
     self.fpn = get_fpn(tree,
                        weights_encoder=weights_encoder,
                        weights_decoder=weights_decoder)
     fpn_dim = 512
     self.td = nn.Sequential(
         nnlayers.conv_layer(fpn_dim, fpn_dim // 4),
         nnlayers.conv_layer(fpn_dim // 4, fpn_dim // 8),
         fv.conv2d(fpn_dim // 8, 1, ks=1, bias=True))
     self.embedding = fv.embedding(tree.n_obj, fpn_dim)
     self.instructor = instructor
     self.emb_op = emb_op
Exemple #7
0
 def __init__(self, ni, nf):
     super().__init__(nnlayers.conv_layer(ni, ni),
                      fv.conv2d(ni, nf, ks=1, bias=True))
Exemple #8
0
def conv_layer(ni, nf, ks=3):
    return nn.Sequential(fv.conv2d(ni, nf, ks=ks, bias=False),
                         nn.BatchNorm2d(nf), nn.ReLU(inplace=True))