예제 #1
0
 def forward(self, x):
     print(x.shape, "is the shape of the input")
     x = self.first_cn(x)
     x = M.ConvLayer(1,
                     int(x.shape[-1] / 2),
                     activation=M.PARAM_LRELU,
                     usebias=False,
                     batch_norm=True)(x)
     x = self.dense_1(x)
     print(x.shape, "is the shape after the dense_1")
     x = M.ConvLayer(1,
                     int(x.shape[-1] / 2),
                     activation=M.PARAM_LRELU,
                     usebias=False,
                     batch_norm=True)(x)
     print(x.shape, "is the shape of the transition shape")
     x = self.dense_2(x)
     x = M.ConvLayer(1,
                     int(x.shape[-1] / 2),
                     activation=M.PARAM_LRELU,
                     usebias=False,
                     batch_norm=True)(x)
     x = self.dense_3(x)
     x = M.ConvLayer(1,
                     int(x.shape[-1] / 2),
                     activation=M.PARAM_LRELU,
                     usebias=False,
                     batch_norm=True)(x)
     x = self.dense_4(x)
     x = self.first_cn(x)
     x = self.bn(x)
     x = M.flatten(x)
     x = tf.nn.dropout(x, 0.4)
     x = self.embedding(x)
     return x
예제 #2
0
    def initialize(self):

        self.branch3x3 = M.ConvLayer(
            3,
            384,
            activation=M.PARAM_LRELU,
            usebias=False,
            batch_norm=True,
            stride=2
        )  #M.ConvLayer(3, 384,activation=M.PARAM_LRELU, usebias=False, batch_norm=True)

        self.branch3x3dbl_1 = M.ConvLayer(1,
                                          64,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_2 = M.ConvLayer(3,
                                          96,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_3 = M.ConvLayer(
            3,
            96,
            activation=M.PARAM_LRELU,
            usebias=False,
            batch_norm=True,
            stride=2)  #__init__(self, size, stride, pad='SAME')
        self.pool = M.MaxPool(3, 2)
예제 #3
0
    def initialize(self, channels_7x7):

        self.branch1x1 = M.ConvLayer(
            1, 192, activation=M.PARAM_LRELU, usebias=False, batch_norm=True
        )  #M.ConvLayer(1, 192,activation=M.PARAM_LRELU, usebias=False, batch_norm=True)

        c7 = channels_7x7
        self.branch7x7_1 = M.ConvLayer(1,
                                       c7,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.branch7x7_2 = M.ConvLayer([1, 7],
                                       c7,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.branch7x7_3 = M.ConvLayer([7, 1],
                                       192,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)

        self.branch7x7dbl_1 = M.ConvLayer(1,
                                          c7,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch7x7dbl_2 = M.ConvLayer([7, 1],
                                          c7,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch7x7dbl_3 = M.ConvLayer([1, 7],
                                          c7,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch7x7dbl_4 = M.ConvLayer([7, 1],
                                          c7,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch7x7dbl_5 = M.ConvLayer([1, 7],
                                          192,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)

        self.branch_pool = M.ConvLayer(1,
                                       192,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.pool = M.MaxPool(3, 1)  #__init__(self, size, stride, pad='SAME')
예제 #4
0
 def initialize(self, outchn):
     self.c1 = M.ConvLayer(1,
                           4 * outchn,
                           activation=M.PARAM_LRELU,
                           usebias=False,
                           batch_norm=True)
     self.c2 = M.ConvLayer(3,
                           outchn,
                           activation=M.PARAM_LRELU,
                           usebias=False,
                           batch_norm=True)
예제 #5
0
 def initialize(self, embedding_size, embedding_bn=True, outchn=0):
     self.first_cn = M.ConvLayer(7,
                                 32,
                                 activation=M.PARAM_LRELU,
                                 usebias=False,
                                 batch_norm=True)
     self.dense_1 = Dense_block(4, 32)
     self.dense_2 = Dense_block(4, 64)
     self.dense_3 = Dense_block(4, 128)
     self.dense_4 = Dense_block(4, 256)
     self.bn = M.BatchNorm()
     self.transition = M.ConvLayer(1,
                                   256,
                                   activation=M.PARAM_LRELU,
                                   usebias=False,
                                   batch_norm=True)
     #self.transition=M.ConvLayer(6, 2*outchn, activation=M.PARAM_LRELU, usebias=False, batch_norm=True)
     self.embedding = M.Dense(embedding_size, batch_norm=embedding_bn)
예제 #6
0
 def initialize(self, embedding_size, embedding_bn=True, outchn=0):
     #super(Inception3, self).__init__()
     #self.aux_logits = aux_logits
     #self.transform_input = transform_input M.ConvLayer(7, 32, stride=2, activation=M.PARAM_LRELU, usebias=False, batch_norm=True)
     self.Conv2d_1a_3x3 = M.ConvLayer(3,
                                      32,
                                      activation=M.PARAM_LRELU,
                                      usebias=False,
                                      batch_norm=True,
                                      stride=2)
     self.Conv2d_2a_3x3 = M.ConvLayer(3,
                                      32,
                                      activation=M.PARAM_LRELU,
                                      usebias=False,
                                      batch_norm=True)
     self.Conv2d_2b_3x3 = M.ConvLayer(3,
                                      64,
                                      activation=M.PARAM_LRELU,
                                      usebias=False,
                                      batch_norm=True)
     self.Conv2d_3b_1x1 = M.ConvLayer(1,
                                      80,
                                      activation=M.PARAM_LRELU,
                                      usebias=False,
                                      batch_norm=True)
     self.Conv2d_4a_3x3 = M.ConvLayer(3,
                                      192,
                                      activation=M.PARAM_LRELU,
                                      usebias=False,
                                      batch_norm=True)
     self.Mixed_5b = InceptionA(pool_features=32)  #192
     self.Mixed_5c = InceptionA(pool_features=64)  #256
     self.Mixed_5d = InceptionA(pool_features=64)  #288
     self.Mixed_6a = InceptionB()  #288
     self.Mixed_6b = InceptionC(channels_7x7=128)  #768
     self.Mixed_6c = InceptionC(channels_7x7=160)
     self.Mixed_6d = InceptionC(channels_7x7=160)
     self.Mixed_6e = InceptionC(channels_7x7=192)
     #if aux_logits:
     #self.AuxLogits = InceptionAux(768, num_classes)
     self.Mixed_7a = InceptionD(768)
     self.Mixed_7b = InceptionE()
     self.Mixed_7c = InceptionE()
     self.fc = M.Dense(embedding_size, batch_norm=embedding_bn)
예제 #7
0
 def initialize(self, out, stride, shortcut=False):
     self.shortcut = shortcut
     self.c1 = M.ConvLayer(3,
                           out,
                           pad='SAME_LEFT',
                           usebias=False,
                           activation=M.PARAM_RELU,
                           batch_norm=True)
     self.c2 = M.ConvLayer(3,
                           out,
                           pad='SAME_LEFT',
                           usebias=False,
                           batch_norm=True)
     if shortcut:
         self.sc = M.ConvLayer(1,
                               out,
                               usebias=False,
                               stride=stride,
                               batch_norm=True)
예제 #8
0
    def initialize(self):
        self.branch1x1 = M.ConvLayer(
            1, 320, activation=M.PARAM_LRELU, usebias=False, batch_norm=True
        )  #M.ConvLayer(1,320,activation=M.PARAM_LRELU, usebias=False, batch_norm=True)

        self.branch3x3_1 = M.ConvLayer(1,
                                       384,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.branch3x3_2a = M.ConvLayer([1, 3],
                                        384,
                                        activation=M.PARAM_LRELU,
                                        usebias=False,
                                        batch_norm=True)
        self.branch3x3_2b = M.ConvLayer([3, 1],
                                        384,
                                        activation=M.PARAM_LRELU,
                                        usebias=False,
                                        batch_norm=True)

        self.branch3x3dbl_1 = M.ConvLayer(1,
                                          448,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_2 = M.ConvLayer(3,
                                          384,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_3a = M.ConvLayer([1, 3],
                                           384,
                                           activation=M.PARAM_LRELU,
                                           usebias=False,
                                           batch_norm=True)
        self.branch3x3dbl_3b = M.ConvLayer([3, 1],
                                           384,
                                           activation=M.PARAM_LRELU,
                                           usebias=False,
                                           batch_norm=True)

        self.branch_pool = M.ConvLayer(1,
                                       192,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.avg_pool = M.AvgPool(3, 1)
예제 #9
0
 def initialize(self, steps, inp, o):
     self.mods = []
     for i in range(steps):
         if i == (steps - 1):
             self.mods.append(
                 M.ConvLayer(3,
                             o,
                             stride=2,
                             pad='SAME_LEFT',
                             batch_norm=True,
                             usebias=False))
         else:
             self.mods.append(
                 M.ConvLayer(3,
                             inp,
                             stride=2,
                             pad='SAME_LEFT',
                             activation=M.PARAM_RELU,
                             batch_norm=True,
                             usebias=False))
예제 #10
0
 def initialize(self, outchns, strides):
     self.trans = []
     for i, (o, s) in enumerate(zip(outchns, strides)):
         if o is None or s is None:
             self.trans.append(None)
         elif s == 1:
             self.trans.append(
                 M.ConvLayer(3,
                             o,
                             stride=s,
                             pad='SAME_LEFT',
                             activation=M.PARAM_RELU,
                             usebias=False,
                             batch_norm=True))
         else:
             self.trans.append(
                 M.ConvLayer(3,
                             o,
                             stride=s,
                             pad='SAME_LEFT',
                             activation=M.PARAM_RELU,
                             usebias=False,
                             batch_norm=True))
예제 #11
0
 def initialize(self):
     self.c1 = M.ConvLayer(3,
                           64,
                           pad='SAME_LEFT',
                           stride=2,
                           activation=M.PARAM_RELU,
                           usebias=False,
                           batch_norm=True)
     self.c2 = M.ConvLayer(3,
                           64,
                           pad='SAME_LEFT',
                           stride=2,
                           activation=M.PARAM_RELU,
                           usebias=False,
                           batch_norm=True)
     self.layer1 = ResBlock(256, 1, 4)
     self.stage1 = Stage([32, 64], [1, 2], 1, 4, 1)
     self.stage2 = Stage([32, 64, 128], [None, None, 2], 2, 4, 4)
     self.stage3 = Stage([32, 64, 128, 256], [None, None, None, 2],
                         3,
                         4,
                         3,
                         d=True)
     self.lastfuse = FuseLast([32, 64, 128, 256])
예제 #12
0
    def initialize(self, pool_features):

        self.branch1x1 = M.ConvLayer(1,
                                     64,
                                     activation=M.PARAM_LRELU,
                                     usebias=False,
                                     batch_norm=True)

        self.branch5x5_1 = M.ConvLayer(1,
                                       48,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.branch5x5_2 = M.ConvLayer(5,
                                       64,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)

        self.branch3x3dbl_1 = M.ConvLayer(1,
                                          64,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_2 = M.ConvLayer(1,
                                          96,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)
        self.branch3x3dbl_3 = M.ConvLayer(3,
                                          96,
                                          activation=M.PARAM_LRELU,
                                          usebias=False,
                                          batch_norm=True)

        self.branch_pool = M.ConvLayer(
            1,
            pool_features,
            activation=M.PARAM_LRELU,
            usebias=False,
            batch_norm=True
        )  #__init__(self, size, stride, pad='SAME')  __init__(self, size, stride, pad='SAME')
        self.avg_pool = M.AvgPool(3, 1)
예제 #13
0
    def initialize(
        self, channels_7x7
    ):  #M.ConvLayer(1, 192,activation=M.PARAM_LRELU, usebias=False, batch_norm=True)
        self.branch3x3_1 = M.ConvLayer(1,
                                       192,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)
        self.branch3x3_2 = M.ConvLayer(3,
                                       320,
                                       stride=2,
                                       activation=M.PARAM_LRELU,
                                       usebias=False,
                                       batch_norm=True)

        self.branch7x7x3_1 = M.ConvLayer(1,
                                         192,
                                         activation=M.PARAM_LRELU,
                                         usebias=False,
                                         batch_norm=True)
        self.branch7x7x3_2 = M.ConvLayer([1, 7],
                                         192,
                                         activation=M.PARAM_LRELU,
                                         usebias=False,
                                         batch_norm=True)
        self.branch7x7x3_3 = M.ConvLayer([7, 1],
                                         192,
                                         activation=M.PARAM_LRELU,
                                         usebias=False,
                                         batch_norm=True)
        self.branch7x7x3_4 = M.ConvLayer(3,
                                         192,
                                         stride=2,
                                         activation=M.PARAM_LRELU,
                                         usebias=False,
                                         batch_norm=True)
        self.max_pool = M.MaxPool(3,
                                  2)  #__init__(self, size, stride, pad='SAME')
예제 #14
0
 def initialize(self, num_pts):
     self.backbone = hrnet.ResNet()
     self.lastconv = M.ConvLayer(1, num_pts)
예제 #15
0
 def initialize(self, o):
     self.c1 = M.ConvLayer(1, o, batch_norm=True, usebias=False)