def network2(self, is_training=True, reuse=False): # with tf.variable_scope(self.name, reuse = reuse, initializer = self.init, regularizer = self.reg): with tf.variable_scope(self.name, reuse=reuse, initializer=self.init): conv1 = self.block('conv1', self.inputs, [32], is_training) # 64x64 conv2 = self.block('conv2', conv1, [128] * 4, is_training) # 64x64 ccat1 = L.concat('ccat1', [conv1, conv2]) # 64x64 pool1 = L.pool('max_pool1', ccat1, (2, 2), (2, 2)) # 32x32 conv3 = self.block('conv3', pool1, [256] * 4, is_training) # 32x32 ccat2 = L.concat('ccat2', [pool1, conv3]) # 32x32 pool2 = L.pool('max_pool2', ccat2, (2, 2), (2, 2)) # 16x16 conv4 = self.block('conv4', pool2, [512] * 4, is_training) # 16x16 ccat3 = L.concat('ccat3', [pool2, conv4]) # 16x16 pool3 = L.pool('max_pool3', ccat3, (2, 2), (2, 2)) # 8x8 conv5 = L.conv_bn( 'conv5', pool3, self.num_labels, (1, 1), # 8x8 is_training=is_training, regularizer=self.reg) pool4 = L.global_avg_pool('global_avg_pool', conv5) # 1x1 logits = tf.squeeze(pool4, [1, 2]) return logits # FILE END.
def network1(self, is_training=True, reuse=False): with tf.variable_scope(self.name, reuse=reuse, initializer=self.init): # input's shape = (n, 64, 64, 3) # block1 conv1 = self.block('conv1', self.inputs, [32, 64, 128, 256, 512], is_training) # 64x64 pool1 = L.pool('max_pool1', conv1, (2, 2), (2, 2)) # 32x32 # block2 conv2 = self.block('conv2', pool1, [64, 128, 256, 512, 1024], is_training) # 32x32 pool2 = L.pool('max_pool2', conv2, (2, 2), (2, 2)) # 16x16 skip1 = self.space_to_depth_x2(pool1) # 16x16 ccat1 = L.concat('concat1', [skip1, pool2]) # 16x16 # block3 conv3 = self.block('conv3', ccat1, [32, 128, 256, 512, 1024], is_training) # 16x16 pool3 = L.pool('max_pool3', conv3, (2, 2), (2, 2)) # 8x8 skip2 = self.space_to_depth_x2(ccat1) # 8x8 ccat2 = L.concat('concat2', [skip2, pool3]) # 8x8 # conv classification conv4 = L.conv_bn('conv4', ccat2, self.num_labels, (1, 1), is_training=is_training) # 8x8 pool4 = L.global_avg_pool('global_avg_pool', conv4) # 1x1 logits = tf.squeeze(pool4, [1, 2]) return logits
def squeeze_excite(name_scope, x, reduction=4): with tf.variable_scope(name_scope): in_size = x.get_shape()[-1] gap = L.global_avg_pool('gap', x) conv1_relu1 = L.conv_relu('squeeze', gap, in_size // reduction, (1, 1), (1, 1)) conv2 = L.conv("excite", conv1_relu1, in_size, (1, 1), (1, 1)) hsigmoid1 = L.hsigmoid(conv2) return x * hsigmoid1
def large(self, inputs, is_training=True, reuse=False): with tf.variable_scope(self.name + '_large', reuse=reuse, initializer=self.weight_init, regularizer=self.reg): # ksize, exp_size, out_size, SE, NL, # 224x224x3 -> 112x112x16 16 BNETS = [ [(3, 3), 16, 16, 'F', 'RE', 1], # 112x112x16 -> 112x112x16 16 [(3, 3), 64, 24, 'F', 'RE', 2], # 112x112x16 -> 56x56x24 8 [(3, 3), 72, 24, 'F', 'RE', 1], # 56x56x24 -> 56x56x24 8 [(5, 5), 72, 40, 'T', 'RE', 2], # 56x56x24 -> 28x28x40 4 [(5, 5), 120, 40, 'T', 'RE', 1], # 28x28x40 -> 28x28x40 4 [(5, 5), 120, 40, 'T', 'RE', 1], # 28x28x40 -> 28x28x40 4 [(3, 3), 240, 80, 'F', 'HS', 2], # 28x28x40 -> 14x14x80 2 [(3, 3), 200, 80, 'F', 'HS', 1], # 14x14x80 -> 14x14x80 2 [(3, 3), 184, 80, 'F', 'HS', 1], # 14x14x80 -> 14x14x80 2 [(3, 3), 184, 80, 'F', 'HS', 1], # 14x14x80 -> 14x14x80 2 [(3, 3), 480, 112, 'T', 'HS', 1], # 14x14x80 -> 14x14x112 2 [(3, 3), 672, 112, 'T', 'HS', 1], # 14x14x112 -> 14x14x112 1 [(5, 5), 672, 160, 'T', 'HS', 2], # 14x14x112 -> 7x7x160 1 [(5, 5), 960, 160, 'T', 'HS', 1], # 7x7x160 -> 7x7x160 1 [(5, 5), 960, 160, 'T', 'HS', 1] ] # 7x7x160 -> 7x7x160 1 x = conv_bn_relu6('conv1', inputs, 16, (3, 3), (2, 2), is_training=is_training, nl='HS') for idx, (ksize, exp_size, out_size, se, nl, strides) in enumerate(BNETS): name = "bneck{}".format(idx + 1) x = bneck(name, x, ksize, exp_size, out_size, se, nl, strides, is_training) x = conv_bn_relu6('conv2', x, 960, (1, 1), (1, 1), is_training=is_training, nl='HS') x = L.global_avg_pool('gap', x) x = L.conv('conv3', x, 1280, (1, 1), (1, 1)) x = L.hswish('conv3/hswich', x) x = L.dropout('dropout', x, 0.2, is_training=is_training) x = L.conv('conv4', x, self.num_labels, (1, 1), (1, 1)) x = tf.squeeze(x, [1, 2]) return x
def small(self, inputs, is_training=True, reuse=False): with tf.variable_scope(self.name + '_small', reuse=reuse, initializer=self.weight_init, regularizer=self.reg): # k e o SE NL s # 224x224x3 -> 112x12x16 16 index BNETS = [ [(3, 3), 16, 16, 'T', 'RE', 2], # 112x112x16 -> 56x56x16 8 0 [(3, 3), 72, 24, 'F', 'RE', 2], # 56x56x16 -> 28x28x24 4 1 [(3, 3), 88, 24, 'F', 'RE', 1], # 28x28x24 -> 28x28x24 4 2 + [(5, 5), 96, 40, 'T', 'HS', 2], # 28x28x24 -> 14x14x40 2 3 [(5, 5), 240, 40, 'T', 'HS', 1], # 14x14x40 -> 14x14x40 2 4 + [(5, 5), 240, 40, 'T', 'HS', 1], # 14x14x40 -> 14x14x40 2 5 + [(5, 5), 120, 48, 'T', 'HS', 1], # 14x14x40 -> 14x14x48 2 6 [(5, 5), 144, 48, 'T', 'HS', 1], # 14x14x48 -> 14x14x48 2 7 + [(5, 5), 288, 96, 'T', 'HS', 2], # 14x14x48 -> 7x7x96 1 8 [(5, 5), 576, 96, 'T', 'HS', 1], # 7x7x96 -> 7x7x96 1 9 + [(5, 5), 576, 96, 'T', 'HS', 1] ] # 7x7x96 -> 7x7x96 1 10 + x = conv_bn_relu6('conv1', inputs, 16, (3, 3), (2, 2), is_training=is_training, nl='HS') for idx, (ksize, exp_size, out_size, se, nl, strides) in enumerate(BNETS): name = "bneck{}".format(idx + 1) x = bneck(name, x, ksize, exp_size, out_size, se, nl, strides, is_training) x = conv_bn_relu6('conv2', x, 576, (1, 1), (1, 1), is_training=is_training, nl='HS') x = L.global_avg_pool('gap', x) x = L.conv('conv3', x, 1024, (1, 1), (1, 1)) # should be 1024 # dropout ? x = L.dropout('dropout', x, 0.2, is_training=is_training) x = L.hswish('conv3/hswich', x) x = L.conv('conv4', x, self.num_labels, (1, 1), (1, 1)) x = tf.squeeze(x, [1, 2]) return x