Ejemplo n.º 1
0
    def xceptionv3_exit(self, sc1, sc2, x):
        """

        :param sc1:
        :param sc2:
        :param x:
        :return:
        """
        sc_1 = keras.layers.SeparableConv2D(filters=sc1,
                                            kernel_size=3,
                                            strides=1,
                                            padding='same',
                                            depth_multiplier=1)
        ac1 = Mish()
        bn1 = keras.layers.BatchNormalization()

        sc_2 = keras.layers.SeparableConv2D(filters=sc2,
                                            kernel_size=3,
                                            strides=1,
                                            padding='same',
                                            depth_multiplier=1)
        ac2 = Mish()
        bn2 = keras.layers.BatchNormalization()

        # maxpool 3×3
        mp_3 = keras.layers.MaxPool2D(pool_size=3, strides=2, padding='same')

        shortcut_conv = keras.layers.Conv2D(filters=sc2,
                                            kernel_size=1,
                                            strides=2,
                                            padding='same')
        p1 = mp_3(ac2(bn2(sc_2(ac1(bn1(sc_1(x)))))))

        return Mish()(p1 + shortcut_conv(x))
Ejemplo n.º 2
0
    def xceptionv3_middle(self, x):
        sc_1 = keras.layers.SeparableConv2D(filters=728,
                                            kernel_size=3,
                                            strides=1,
                                            padding='same',
                                            depth_multiplier=1)
        ac1 = Mish()
        bn1 = keras.layers.BatchNormalization()

        sc_2 = keras.layers.SeparableConv2D(filters=728,
                                            kernel_size=3,
                                            strides=1,
                                            padding='same',
                                            depth_multiplier=1)
        ac2 = Mish()
        bn2 = keras.layers.BatchNormalization()

        sc_3 = keras.layers.SeparableConv2D(filters=728,
                                            kernel_size=3,
                                            strides=1,
                                            padding='same',
                                            depth_multiplier=1)

        p1 = sc_3(ac2(bn2(sc_2(ac1(bn1(sc_1(x)))))))

        return Mish()(p1 + x)
Ejemplo n.º 3
0
def InesNet(c_out):
    initial = nn.Sequential(NiceDownscale(3,64,k=6,activation=False))

    blockA = nn.Sequential(concat_downscale(64,128),
                           ResBlock(128),
                           NiceDownscale(128,256),
                           ResBlock(256,g=2),
                           ResBlock(256,bottle=192),
                          )
    
    blockB = nn.Sequential(concat_downscale(256,512),
                           ResBlock(512,bottle=256,g=4),
                           ResBlock(512,bottle=384,g=2),
                           ResBlock(512,g=4),
                           ResBlock(512,g=2),
                           concat_downscale(512,768),
                           ResBlock(768,bottle=512,g=4),
                           ResBlock(768,g=2),
                           ResBlock(768,bottle=384,g=3),
                           ResBlock(768))
                     
    
    classifier = nn.Sequential(nn.AdaptiveAvgPool2d(1),
                               fv.Flatten(),
                               Mish(),
                               nn.BatchNorm1d(768),
                               nn.Linear(768,c_out))
    
    return nn.Sequential(initial, blockA, blockB, classifier)
Ejemplo n.º 4
0
    def alexnet(self):
        """
        定义AlexNet的结构
        :return:
        """
        output_num = 3
        net = keras.models.Sequential()

        net.add(
            keras.layers.Conv2D(filters=96,
                                kernel_size=11,
                                strides=4,
                                padding='same',
                                input_shape=[width, height, channels]))
        net.add(keras.layers.BatchNormalization())
        net.add(Mish())
        net.add(keras.layers.MaxPool2D(pool_size=3, strides=2))

        net.add(
            keras.layers.Conv2D(filters=256,
                                kernel_size=5,
                                strides=1,
                                padding='same'))
        net.add(keras.layers.BatchNormalization())
        net.add(Mish())
        net.add(keras.layers.MaxPool2D(pool_size=3, strides=2))

        net.add(keras.layers.Conv2D(filters=384, kernel_size=3,
                                    padding='same'))
        net.add(keras.layers.BatchNormalization())
        net.add(Mish())
        net.add(keras.layers.Conv2D(filters=384, kernel_size=3,
                                    padding='same'))
        net.add(keras.layers.BatchNormalization())
        net.add(Mish())
        net.add(keras.layers.Conv2D(filters=256, kernel_size=3,
                                    padding='same'))
        net.add(keras.layers.BatchNormalization())
        net.add(Mish())
        net.add(keras.layers.MaxPool2D(pool_size=2, strides=2))

        net.add(keras.layers.Flatten())
        net.add(keras.layers.Dense(1024))
        net.add(Mish())

        net.add(keras.layers.Dropout(0.5))
        net.add(keras.layers.Dense(512))
        net.add(Mish())

        net.add(keras.layers.Dropout(0.5))
        net.add(keras.layers.Dense(output_num, activation=r'softmax'))

        net.compile(loss='categorical_crossentropy',
                    optimizer='adam',
                    metrics=['accuracy'])
        net.summary()
        return net
Ejemplo n.º 5
0
    def xception(self):
        output_num = 3
        inpt = keras.layers.Input(shape=[width, height, channels])

        # block 1
        x = keras.layers.Conv2D(filters=32,
                                kernel_size=3,
                                strides=2,
                                padding='valid')(inpt)
        x = keras.layers.BatchNormalization()(x)
        x = Mish()(x)
        x = keras.layers.Conv2D(filters=64,
                                kernel_size=3,
                                strides=1,
                                padding='same')(x)
        x = keras.layers.BatchNormalization()(x)
        x = Mish()(x)

        # block 2
        x = self.xceptionv3_entry(sc1=128, sc2=128, x=x)
        # block 3
        x = self.xceptionv3_entry(sc1=256, sc2=256, x=x)
        # block 4
        x = self.xceptionv3_entry(sc1=728, sc2=728, x=x)

        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)
        x = self.xceptionv3_middle(x)

        x = self.xceptionv3_exit(sc1=728, sc2=1024, x=x)

        x = keras.layers.SeparableConv2D(filters=1536,
                                         kernel_size=3,
                                         strides=1,
                                         padding='same',
                                         depth_multiplier=1)(x)
        x = keras.layers.BatchNormalization()(x)
        x = Mish()(x)

        x = keras.layers.SeparableConv2D(filters=2048,
                                         kernel_size=3,
                                         strides=1,
                                         padding='same',
                                         depth_multiplier=1)(x)
        x = keras.layers.BatchNormalization()(x)
        x = Mish()(x)

        x = keras.layers.GlobalAveragePooling2D()(x)

        # 6 layers
        x = keras.layers.Flatten()(x)
        x = keras.layers.Dropout(0.5)(x)
        x = keras.layers.Dense(output_num, activation=r'softmax')(x)

        model = keras.Model(inputs=inpt, outputs=x)
        model.summary()
        model.compile(loss='categorical_crossentropy',
                      optimizer='adam',
                      metrics=['accuracy'])
        return model
    def inceptionv2(self):
        output_num = 3
        inpt = keras.layers.Input(shape=[width, height, channels])

        # 1 layers
        x = keras.layers.SeparableConv2D(filters=64,
                                         kernel_size=7,
                                         strides=2,
                                         padding='same')(inpt)
        x = keras.layers.BatchNormalization()(x)
        x = Mish()(x)
        x = keras.layers.MaxPool2D(pool_size=3, strides=2, padding='same')(x)

        # 2 layers
        x = keras.layers.Conv2D(filters=64,
                                kernel_size=1,
                                strides=1,
                                padding='same')(x)
        x = Mish()(x)
        x = keras.layers.BatchNormalization()(x)

        x = keras.layers.Conv2D(filters=192,
                                kernel_size=3,
                                strides=1,
                                padding='same')(x)
        x = Mish()(x)
        x = keras.layers.BatchNormalization()(x)
        x = keras.layers.MaxPool2D(pool_size=3, strides=2, padding='same')(x)

        # 3 layers
        x = self.inceptionv2_module_v1(n1_1=64,
                                       n2_1=64,
                                       n2_3=64,
                                       n3_1=64,
                                       n3_5=96,
                                       n4_1=32,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=64,
                                       n2_1=64,
                                       n2_3=96,
                                       n3_1=64,
                                       n3_5=96,
                                       n4_1=64,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=0,
                                       n2_1=128,
                                       n2_3=160,
                                       n3_1=64,
                                       n3_5=96,
                                       n4_1=0,
                                       x=x,
                                       is_max=True,
                                       strides=2)

        # 4 layers
        x = self.inceptionv2_module_v1(n1_1=224,
                                       n2_1=64,
                                       n2_3=96,
                                       n3_1=96,
                                       n3_5=128,
                                       n4_1=128,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=192,
                                       n2_1=96,
                                       n2_3=128,
                                       n3_1=96,
                                       n3_5=128,
                                       n4_1=128,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=128,
                                       n2_1=128,
                                       n2_3=160,
                                       n3_1=128,
                                       n3_5=160,
                                       n4_1=128,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=96,
                                       n2_1=128,
                                       n2_3=160,
                                       n3_1=160,
                                       n3_5=192,
                                       n4_1=128,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=0,
                                       n2_1=128,
                                       n2_3=192,
                                       n3_1=192,
                                       n3_5=256,
                                       n4_1=0,
                                       x=x,
                                       is_max=True,
                                       strides=2)

        # 5 layers
        x = self.inceptionv2_module_v1(n1_1=352,
                                       n2_1=192,
                                       n2_3=320,
                                       n3_1=160,
                                       n3_5=224,
                                       n4_1=128,
                                       x=x)
        x = self.inceptionv2_module_v1(n1_1=352,
                                       n2_1=192,
                                       n2_3=320,
                                       n3_1=192,
                                       n3_5=224,
                                       n4_1=128,
                                       x=x,
                                       is_max=True)
        x = keras.layers.AvgPool2D(pool_size=7, strides=7, padding='same')(x)

        # 6 layers
        x = keras.layers.Flatten()(x)
        x = keras.layers.Dropout(0.7)(x)
        x = keras.layers.Dense(output_num, activation=r'softmax')(x)

        model = keras.Model(inputs=inpt, outputs=x)
        model.summary()
        model.compile(loss='categorical_crossentropy',
                      optimizer='adam',
                      metrics=['accuracy'])
        return model
    def inceptionv2_module_v1(self,
                              n1_1,
                              n2_1,
                              n2_3,
                              n3_1,
                              n3_5,
                              n4_1,
                              x,
                              is_max=False,
                              strides=1):
        p1_conv_1 = keras.layers.Conv2D(filters=n1_1,
                                        kernel_size=1,
                                        strides=strides,
                                        padding='same')
        p1_ac1 = Mish()

        p2_conv_1 = keras.layers.Conv2D(filters=n2_1,
                                        kernel_size=1,
                                        strides=1,
                                        padding='same')
        p2_ac1 = Mish()
        p2_conv_3 = keras.layers.Conv2D(filters=n2_3,
                                        kernel_size=3,
                                        strides=strides,
                                        padding='same')
        p2_ac2 = Mish()

        p3_conv_1 = keras.layers.Conv2D(filters=n3_1,
                                        kernel_size=1,
                                        strides=strides,
                                        padding='same')
        p3_ac1 = Mish()
        p3_conv_3_1 = keras.layers.Conv2D(filters=n3_5,
                                          kernel_size=3,
                                          strides=1,
                                          padding='same')
        p3_ac2 = Mish()
        p3_conv_3_2 = keras.layers.Conv2D(filters=n3_5,
                                          kernel_size=3,
                                          strides=1,
                                          padding='same')
        p3_ac3 = Mish()

        if is_max:
            p4_maxpool_3 = keras.layers.MaxPool2D(pool_size=3,
                                                  strides=strides,
                                                  padding='same')
        else:
            p4_maxpool_3 = keras.layers.AvgPool2D(pool_size=3,
                                                  strides=strides,
                                                  padding='same')

        p4_conv_1 = keras.layers.Conv2D(filters=n4_1,
                                        kernel_size=1,
                                        strides=1,
                                        padding='same')
        p4_ac1 = Mish()

        p1 = p1_ac1(p1_conv_1(x))
        p2 = p2_ac2(p2_conv_3(p2_ac1(p2_conv_1(x))))
        p3 = p3_ac3(p3_conv_3_2(p3_ac2(p3_conv_3_1(p3_ac1(p3_conv_1(x))))))
        if n4_1 > 0:
            p4 = p4_ac1(p4_conv_1(p4_maxpool_3(x)))
        else:
            p4 = p4_maxpool_3(x)

        if n1_1 > 0:
            return keras.layers.concatenate([p1, p2, p3, p4], 3)
        else:
            return keras.layers.concatenate([p2, p3, p4], 3)
Ejemplo n.º 8
0
x = tf.add(x,c3)
x = PointPooling(batch_sample_xyz=xyz128,sampling=xyz32, poolN=9)(x)
x = BatchNormalization()(x)

c4 = Conv1D(1024,1)(x)
x = FPAC_Layer(xyz = xyz32, cin = 512, cout = 1024, m1=[3,9,1], m2=[524288,64,32], mr=[16384,64], mid=32, maxn=32, framepoints=framepoints4,numframe=num_framepoint,N=2048, l2=l2, dtype=dtype)(x)
x = tf.add(x,c4)
x = PointPooling(batch_sample_xyz=xyz32,sampling=xyz8,poolN=8)(x)
x = BatchNormalization()(x)



x = MaxPooling1D(pool_size=8)(x)

x = Dense(512)(x)
x = Mish()(x)
x = BatchNormalization()(x)
x = Dropout(rate=0.3)(x)
x = Dense(256)(x)
x = Mish()(x)
x = BatchNormalization()(x)
x = Dropout(rate=0.3)(x)
x = Dense(128)(x)
x = Mish()(x)
x = BatchNormalization()(x)
x = Dropout(rate=0.3)(x)
x = Dense(40, activation = 'softmax')(x)
prediction = Flatten()(x)


model = Model(inputs=[inputs,idx512,idx128,idx32,idx8], outputs=prediction)