def LeNetModel(input_tensor): layer1_conv = Conv2D(filters=6, kernel_size=[5, 5], strides=(1, 1), activation='relu', padding='SAME')(input_tensor) layer2_pooling = MaxPooling2D(pool_size=[3, 3], strides=[2, 2], padding='SAME')(layer1_conv) layer3_conv = Conv2D(filters=6, kernel_size=[5, 5], strides=(1, 1), activation='relu', padding='SAME')(layer2_pooling) layer4_pooling = MaxPooling2D(pool_size=[3, 3], strides=[2, 2], padding='SAME')(layer3_conv) layer5_conv = Conv2D(filters=16, kernel_size=[5, 5], strides=[1, 1], activation='relu', padding='SAME')(layer4_pooling) shape = layer5_conv.get_shape().as_list() flatten = Flatten()(layer5_conv) flatten.set_shape([None, shape[1] * shape[2] * shape[3]]) layer6_fc1 = Dense(units=120, activation='relu')(flatten) layer7_fc2 = Dense(units=84, activation='relu')(layer6_fc1) layer8_fc3 = Dense(units=10, activation='relu')(layer7_fc2) return layer8_fc3
def VGG16Model(input_tensor): # input_tensor = Lambda(lambda x: ktf.resize_images(tf.assert_type(input_tensor, tf.float32), 224, 224, 'channels_last'))(input_tensor) # Block1 x = Conv2D(64, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block1_conv1')(input_tensor) x = Conv2D(64, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block1_conv2')(x) x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same', name='block1_pooling')(x) # Block2 x = Conv2D(64, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block2_conv1')(x) x = Conv2D(64, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block2_conv2')(x) x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same', name='block2_pooling')(x) # Block3 x = Conv2D(128, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block3_conv1')(x) x = Conv2D(128, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block3_conv2')(x) x = Conv2D(128, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block3_conv3')(x) x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same', name='block3_pooling')(x) # Block4 x = Conv2D(256, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block4_conv1')(x) x = Conv2D(256, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block4_conv2')(x) x = Conv2D(256, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block4_conv3')(x) x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same', name='block4_pooling')(x) # Block5 x = Conv2D(512, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block5_conv1')(x) x = Conv2D(512, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block5_conv2')(x) x = Conv2D(512, (3, 3), strides=[1, 1], activation='relu', padding='same', name='block5_conv3')(x) x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), padding='same', name='block5_pooling')(x) shape = x.get_shape().as_list() flatten = Flatten()(x) flatten.set_shape([None, shape[1]*shape[2]*shape[3]]) layer6_fc1 = Dense(units=1024, activation='relu')(flatten) layer7_fc2 = Dense(units=512, activation='relu')(layer6_fc1) layer8_fc3 = Dense(units=10, activation='relu')(layer7_fc2) return layer8_fc3