def __init__(self, channels, in_channels=3, hidden_size=4096, dropout_rate=.5, num_classes=1000, data_format='channels_last', **kwargs): super(VGG, self).__init__() features = tf.keras.Sequential(name='feature_extractor') for i, channels_per_stage in enumerate(channels): for j, out_channels in enumerate(channels_per_stage): features.add( ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=3, strides=1, padding=1, data_format=data_format, name='features/stage{}/conv{}'.format(i, j))) in_channels = out_channels features.add( MaxPool2d(pool_size=2, strides=2, padding=0, ceil_mode=False, data_format=data_format, name='features/stage{}/pool'.format(i))) features.add(Flatten(data_format=data_format)) self.features = features self.classifier = Classifier(hidden_size=hidden_size, dropout_rate=dropout_rate, num_classes=num_classes)
def __init__(self, channels, residuals, init_kernel_size, init_channels, in_channels=3, input_shape=(224, 224), num_classes=1000, dropout_rate=.5, data_format='channels_last', name='squeezenet'): super(SqueezeNet, self).__init__() self.features = tf.keras.Sequential(name=name + '/features') # init conv self.features.add( Conv2d(in_channels=in_channels, out_channels=init_channels, kernel_size=init_kernel_size, strides=2, use_bias=True, data_format=data_format, name=name + '/features/init_conv')) self.features.add(ReLU(name=name + '/features/init_activ')) # stages of fire blocks in_channels = init_channels for i, channels_per_stage in enumerate(channels): self.features.add( MaxPool2d(pool_size=3, strides=2, ceil_mode=True, data_format=data_format, name=name + '/features/stage{}/pool'.format(i))) for j, out_channels in enumerate(channels_per_stage): squeeze_channels = out_channels // 8 expand_channels = out_channels // 2 self.features.add( Fire(in_channels=in_channels, squeeze_channels=squeeze_channels, expand1x1_channels=expand_channels, expand3x3_channels=expand_channels, residual=(residuals is not None) and (residuals[i][j] == 1), data_format=data_format, name=name + '/features/stage{}/fire{}'.format(i, j))) in_channels = out_channels # classifier head self.classifier = tf.keras.Sequential([ Dropout(rate=dropout_rate, name=name + '/classifier/dropout'), Conv2d(in_channels=in_channels, out_channels=num_classes, kernel_size=1, data_format=data_format, name=name + '/classifier/final_conv'), ReLU(name=name + '/classifier/activ'), GlobalAveragePooling2D(name=name + '/classifier/pool'), Flatten(name=name + '/classifier/flatten') ])
def __init__(self, bn_size=4, growth_rate=32, blocks=(6, 12, 24, 16), in_channels=3, init_channels=64, dropout_rate=.1, num_classes=1000, data_format='channels_last', name='densenet'): super(DenseNet, self).__init__(name=name) self.init = tf.keras.Sequential( layers=[ Conv2d(in_channels=in_channels, out_channels=init_channels, kernel_size=7, strides=2, padding=3, use_bias=False, data_format=data_format, name=name + '/init/conv0'), BatchNormalization(axis=-1 if data_format == 'channels_last' else 1, name=name + '/init/bn'), ReLU(name=name + '/init/activ'), MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name=name + '/init/pool') ], name=name + '/init' ) self.dense_blocks = tf.keras.Sequential(name=name + '/dense_blocks') num_channels = init_channels for i, num_layers in enumerate(blocks, 1): dense_block = DenseBlock( in_channels=num_channels, num_layers=num_layers, bn_size=bn_size, growth_rate=growth_rate, dropout_rate=dropout_rate, data_format=data_format, name=name + 'dense_blocks/dense_block{}'.format(i) ) self.dense_blocks.add(dense_block) num_channels += num_layers * growth_rate if i != len(blocks): transit_block = TransitBlock( in_channels=num_channels, out_channels=num_channels // 2, data_format=data_format, name=name + 'dense_blocks/transit_block{}'.format(i) ) num_channels = num_channels // 2 self.dense_blocks.add(transit_block) self.features = tf.keras.Sequential( layers=[ BatchNormalization(axis=-1 if data_format == 'channels_last' else 1, name=name + '/features/bn'), ReLU(name=name + '/features/activ'), GlobalAveragePooling2D(data_format=data_format, name=name + '/features/final_pool') ], name=name + '/features' ) self.classifier = Dense(units=num_classes, name=name + '/classifier')
def __init__(self, in_channels, ch1x1, ch3x3red, ch3x3, ch5x5red, ch5x5, pool_proj, data_format='channels_last', name='inception'): super(InceptionBlock, self).__init__() self.branch1 = ConvBlock(in_channels, ch1x1, 1, data_format, name=name + '/1x1conv') self.branch2 = tf.keras.Sequential([ ConvBlock(in_channels, ch3x3red, 1, data_format, name=name + '/3x3reduction'), ConvBlock(ch3x3red, ch3x3, 3, data_format, name=name + '/3x3conv', padding=1) ]) self.branch3 = tf.keras.Sequential([ ConvBlock(in_channels, ch5x5red, 1, data_format, name=name + '/5x5reduction'), ConvBlock(ch5x5red, ch5x5, 5, data_format, name=name + '/5x5conv', padding=2) ]) self.branch4 = tf.keras.Sequential([ MaxPool2d(pool_size=3, strides=1, padding=1, ceil_mode=True, data_format=data_format, name=name + '/3x3pool'), ConvBlock(in_channels, pool_proj, 1, data_format, name=name + '/pool_projection') ]) self.concat = tf.keras.layers.Concatenate( axis=-1 if data_format == 'channels_last' else 1, name=name + 'concat')
def __init__(self, in_channels=3, data_format='channels_last', aux_logits=True, num_classes=1000): super(GoogleNet, self).__init__() self.aux_logits = aux_logits self.conv1 = ConvBlock(in_channels, 64, 7, data_format, name='conv1', strides=2, padding=3) self.maxpool1 = MaxPool2d(3, strides=2, ceil_mode=True, data_format=data_format, name='pool1') self.conv2 = ConvBlock(64, 64, 1, data_format, name='conv2') self.conv3 = ConvBlock(64, 192, 3, data_format, name='conv3') self.maxpool2 = MaxPool2d(3, strides=2, ceil_mode=True, data_format=data_format, name='pool2') self.inception3a = InceptionBlock(192, 64, 96, 128, 16, 32, 32, data_format, name='inception3a') self.inception3b = InceptionBlock(256, 128, 128, 192, 32, 96, 64, data_format, name='inception3b') self.maxpool3 = MaxPool2d(3, strides=2, ceil_mode=True, data_format=data_format, name='pool3') self.inception4a = InceptionBlock(480, 192, 96, 208, 16, 48, 64, data_format, name='inception4a') self.inception4b = InceptionBlock(512, 160, 112, 224, 24, 64, 64, data_format, name='inception4b') self.inception4c = InceptionBlock(512, 128, 128, 256, 24, 64, 64, data_format, name='inception4c') self.inception4d = InceptionBlock(512, 112, 144, 288, 32, 64, 64, data_format, name='inception4d') self.inception4e = InceptionBlock(528, 256, 160, 320, 32, 128, 128, data_format, name='inception4e') self.maxpool4 = MaxPool2d(2, strides=2, ceil_mode=True, data_format=data_format, name='pool4') self.inception5a = InceptionBlock(832, 256, 160, 320, 32, 128, 128, data_format, name='inception5a') self.inception5b = InceptionBlock(832, 384, 192, 384, 48, 128, 128, data_format, name='inception5b') self.avgpool = tf.keras.layers.GlobalAveragePooling2D(data_format, name='avgpool') self.aux_classifier1 = InceptionAux(512, num_classes, data_format, name='aux1') self.aux_classifier2 = InceptionAux(528, num_classes, data_format, name='aux2') self.classifier = tf.keras.Sequential([ tf.keras.layers.Dropout(rate=.2, name='classifier/dropout'), tf.keras.layers.Dense(num_classes, name='classifier/fc') ])
def __init__(self, in_channels=3, num_classes=1000, n_groups=2, se_reduction=None, residual=False, init_channels=INIT_CHANNELS, out_channels=OUT_CHANNELS, c=C, n=N, data_format='channels_last', name='ShuffleNetV2'): super(ShuffleNetV2, self).__init__(name=name) self.init_conv = tf.keras.Sequential([ Conv2d(in_channels, init_channels, kernel_size=3, strides=2, padding=1, use_bias=False, data_format=data_format, name=name + '/init/conv0'), BatchNormalization( axis=-1 if data_format == 'channels_last' else 1, name=name + '/init/bn0'), ReLU(name=name + '/init/activ0'), MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name=name + '/init/pool0') ]) self.features = tf.keras.Sequential() input_channels = init_channels for i, (nn, nc) in enumerate(zip(n, c)): stage = tf.keras.Sequential(name=name + '/stage{}'.format(i)) for j in range(nn): if j == 0: stage.add( ShuffleNetV2DownsampleUnit(input_channels, nc, n_groups, se_reduction, data_format)) else: stage.add( ShuffleNetV2BasicUnit(nc, n_groups, se_reduction, residual, data_format)) input_channels = nc self.features.add(stage) final_conv = tf.keras.Sequential([ Conv2d(input_channels, out_channels, kernel_size=1, strides=1, padding=0, use_bias=False, data_format=data_format, name=name + '/final_conv'), BatchNormalization( axis=-1 if data_format == 'channels_last' else 1, name=name + '/final_bn'), ReLU(name=name + '/final_activ') ]) self.features.add(final_conv) self.features.add( AveragePooling2D(pool_size=7, data_format=data_format, name='global_avg_pool')) self.classifier = tf.keras.Sequential([ Conv2d(out_channels, num_classes, kernel_size=1, strides=1, padding=0, use_bias=True, data_format=data_format, name='classifier'), Flatten() ])
def __init__(self, res_block, layers, in_channels, num_classes=1000, groups=1, width_per_group=64, dilation_for_stride=None, se_reduction=0, data_format='channels_last', name='resnet'): super(ResNet, self).__init__() self.channels = 64 self.dilation = 1 if dilation_for_stride is None: dilation_for_stride = [False, False, False] self.se_reduction = se_reduction self.groups = groups self.base_width = width_per_group self.stage0 = tf.keras.Sequential([ Conv2d(in_channels, self.channels, kernel_size=7, strides=2, padding=3, use_bias=False, name=name + '/stage0/conv1'), BatchNormalization( axis=-1 if data_format == 'channels_last' else 1, name=name + '/stage0/bn1'), ReLU(name=name + '/stage0/relu'), MaxPool2d(pool_size=3, strides=2, padding=1, data_format=data_format, name=name + '/stage0/maxpool') ]) self.stage1 = self._make_layer(block=res_block, channels=64, blocks=layers[0], se_reduction=se_reduction) self.stage2 = self._make_layer(block=res_block, channels=128, blocks=layers[1], strides=2, dilation=dilation_for_stride[0], se_reduction=se_reduction) self.stage3 = self._make_layer(block=res_block, channels=256, blocks=layers[2], strides=2, dilation=dilation_for_stride[1], se_reduction=se_reduction) self.stage4 = self._make_layer(block=res_block, channels=512, blocks=layers[3], strides=2, dilation=dilation_for_stride[2], se_reduction=se_reduction) self.globalavgpool = GlobalAveragePooling2D(data_format=data_format, name='features') self.fc = Dense(units=num_classes, name='last_linear')