def build_conv6_lottery(args): return SequentialNetwork([ Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), Activation('relu'), Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(128, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'), Activation('relu'), Conv2D(128, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(256, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'), Activation('relu'), Conv2D(256, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), # Dropout(0.5), Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), # Dropout(0.5), Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), # Dropout(0.5), Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_fc_lottery(args): return SequentialNetwork([ Flatten(), # BatchNormalization(momentum=0, name='batch_norm_1'), Dense(300, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(100, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_network_fc(args): return SequentialNetwork([ Flatten(), Dense(100, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(50, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') # can also try kernel_initializer=tfkeras.initializers.TruncatedNormal(mean=0.0, stddev=0.1) ])
def build_linknet_2(args): layers = conv_bn_relu(32, 3, stride=1, name="block1_conv1") for layer in conv_bn_relu(32, 3, stride=1, name="block1_conv2"): layers.append(layer) layers.append(MaxPooling2D((2, 2), strides=(2, 2), padding="same", name="block1_pool")) layers.append(Activation('relu')) layers.append(Flatten()) layers.append(Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(0), name='fc_1')) layers.append(Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(0), name='fc_2')) return SequentialNetwork(layers)
def build_network_fc_special(args): return SequentialNetwork([ Flatten(), Dense(100, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_1'), BatchNormalization(momentum=0, name='batch_norm_1'), Activation('relu'), Dense(50, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2'), BatchNormalization(momentum=0, name='batch_norm_1'), Activation('relu'), Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_vgg_mini(args): return SequentialNetwork([ Conv2D(64, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_1'), MaxPooling2D((2, 2), (2, 2)), Conv2D(128, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_2'), MaxPooling2D((2, 2), (2, 2)), Conv2D(256, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_3'), MaxPooling2D((2, 2), (2, 2)), Flatten(), Dense(512, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dropout(0.5), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2') ])
def build_lenet_conv(args): # ok this is a slightly modified lenet return SequentialNetwork([ Conv2D(20, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), # BatchNormalization(momentum=0.0, name='batch_norm_1'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(40, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), # BatchNormalization(momentum=0.0, name='batch_norm_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), Dropout(0.25), Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dropout(0.5), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2') ])
def build_basic_model(args): return SequentialNetwork([ Conv2D(16, 2, padding='same', name='conv2D_1'), # BatchNormalization(momentum=0.0, name='batch_norm_1'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(32, 2, padding='same', name='conv2D_2'), # BatchNormalization(momentum=0.0, name='batch_norm_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(64, 2, padding='same', name='conv2D_3'), Activation('relu'), MaxPooling2D((2,2), (2,2)), GlobalAveragePooling2D(), Dense(1000, activation=relu), Dropout(0.2), Dense(1000, activation=relu, name='fc_1'), Dropout(0.2), Dense(5, activation= None, name='fc_2') ])
def build_resnet(args): return SequentialNetwork([ # pre-blocks Conv2D(16, 3, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), BatchNormalization(momentum=0.0, name='batch_norm_1'), Activation('relu'), # set 1 ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1A_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 16, first_stride=(1, 1), name_prefix='1C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2), # set 2 ResidualBlock(3, 32, first_stride=(2, 2), name_prefix='2A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2B_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 32, first_stride=(1, 1), name_prefix='2C_', identity=True, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2), # set 3 ResidualBlock(3, 64, first_stride=(2, 2), name_prefix='3A_', identity=False, resize=args.resize_less, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3B_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2), ResidualBlock(3, 64, first_stride=(1, 1), name_prefix='3C_', identity=True, resize=args.resize_more, l2=args.l2, l2_shortcut=args.l2), # post-blocks GlobalAveragePooling2D(), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2_special), name='fc_last') ])
def build_fc_adjustable(args): if args.num_layers == 3: return SequentialNetwork([ Flatten(), Dense(455, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(67, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ]) elif args.num_layers == 4: return SequentialNetwork([ Flatten(), Dense(734, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(175, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(42, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_4') ]) elif args.num_layers == 5: return SequentialNetwork([ Flatten(), Dense(977, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(311, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(99, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'), Dense(31, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_4'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_5') ])