def build_fc_adjustable(args): if args.num_layers == 3: return SequentialNetwork([ Flatten(), Dense(455, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(67, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ]) elif args.num_layers == 4: return SequentialNetwork([ Flatten(), Dense(734, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(175, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(42, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_4') ]) elif args.num_layers == 5: return SequentialNetwork([ Flatten(), Dense(977, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(311, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(99, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_3'), Dense(31, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_4'), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_5') ])
def build_fc_supermask(args): kwargs = {} if args.signed_constant: kwargs['signed_constant'] = True kwargs['const_multiplier'] = args.signed_constant_multiplier if args.dynamic_scaling: kwargs['dynamic_scaling'] = True return SequentialNetwork([ Flatten(), MaskedDense(300, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1', **kwargs), MaskedDense(100, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2', **kwargs), MaskedDense(10, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3', **kwargs) ])
def build_frozen_conv6_lottery(args, init_values, mask_values): return SequentialNetwork([ FreezeConv2D(64, 3, init_values[0], init_values[1], mask_values[0], mask_values[1], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), Activation('relu'), FreezeConv2D(64, 3, init_values[2], init_values[3], mask_values[2], mask_values[3], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), FreezeConv2D(128, 3, init_values[4], init_values[5], mask_values[4], mask_values[5], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'), Activation('relu'), FreezeConv2D(128, 3, init_values[6], init_values[7], mask_values[6], mask_values[7], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), FreezeConv2D(256, 3, init_values[8], init_values[9], mask_values[8], mask_values[9], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'), Activation('relu'), FreezeConv2D(256, 3, init_values[10], init_values[11], mask_values[10], mask_values[11], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), # Dropout(0.5), FreezeDense(256, init_values[12], init_values[13], mask_values[12], mask_values[13], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), # Dropout(0.5), FreezeDense(256, init_values[14], init_values[15], mask_values[14], mask_values[15], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), # Dropout(0.5), FreezeDense(10, init_values[16], init_values[17], mask_values[16], mask_values[17], kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_conv6_lottery(args): return SequentialNetwork([ Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), Activation('relu'), Conv2D(64, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(128, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_3'), Activation('relu'), Conv2D(128, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_4'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(256, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_5'), Activation('relu'), Conv2D(256, 3, kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_6'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), # Dropout(0.5), Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), # Dropout(0.5), Dense(256, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), # Dropout(0.5), Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_fc_lottery(args): return SequentialNetwork([ Flatten(), # BatchNormalization(momentum=0, name='batch_norm_1'), Dense(300, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(100, kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(10, kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_network_fc(args): return SequentialNetwork([ Flatten(), Dense(100, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dense(50, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') # can also try kernel_initializer=tfkeras.initializers.TruncatedNormal(mean=0.0, stddev=0.1) ])
def build_conv2_supermask(args): kwargs = {} if args.signed_constant: kwargs['signed_constant'] = True kwargs['const_multiplier'] = args.signed_constant_multiplier if args.dynamic_scaling: kwargs['dynamic_scaling'] = True return SequentialNetwork([ MaskedConv2D(64, 3, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1', **kwargs), Activation('relu'), MaskedConv2D(64, 3, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2', **kwargs), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), MaskedDense(256, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1', **kwargs), MaskedDense(256, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2', **kwargs), MaskedDense(10, kernel_initializer=glorot_normal, sigmoid_bias=args.sigmoid_bias, round_mask=args.round_mask, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3', **kwargs) ])
def build_linknet_2(args): layers = conv_bn_relu(32, 3, stride=1, name="block1_conv1") for layer in conv_bn_relu(32, 3, stride=1, name="block1_conv2"): layers.append(layer) layers.append(MaxPooling2D((2, 2), strides=(2, 2), padding="same", name="block1_pool")) layers.append(Activation('relu')) layers.append(Flatten()) layers.append(Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(0), name='fc_1')) layers.append(Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(0), name='fc_2')) return SequentialNetwork(layers)
def build_frozen_fc_lottery(args, init_values, mask_values): return SequentialNetwork([ Flatten(), # BatchNormalization(momentum=0, name='batch_norm_1'), FreezeDense(300, init_values[0], init_values[1], mask_values[0], mask_values[1], kernel_initializer=glorot_normal, activation=relu, name='fc_1'), FreezeDense(100, init_values[2], init_values[3], mask_values[2], mask_values[3], kernel_initializer=glorot_normal, activation=relu, name='fc_2'), FreezeDense(10, init_values[4], init_values[5], mask_values[4], mask_values[5], kernel_initializer=glorot_normal, activation=None, name='fc_3') ])
def build_network_fc_special(args): return SequentialNetwork([ Flatten(), Dense(100, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_1'), BatchNormalization(momentum=0, name='batch_norm_1'), Activation('relu'), Dense(50, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2'), BatchNormalization(momentum=0, name='batch_norm_1'), Activation('relu'), Dense(5, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_masked_conv2_lottery(args, mask_values): return SequentialNetwork([ MaskedConv2D(64, 3, mask_values[0], mask_values[1], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), Activation('relu'), MaskedConv2D(64, 3, mask_values[2], mask_values[3], kernel_initializer=glorot_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), MaskedDense(256, mask_values[4], mask_values[5], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), MaskedDense(256, mask_values[6], mask_values[7], kernel_initializer=glorot_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_2'), MaskedDense(10, mask_values[8], mask_values[9], kernel_initializer=glorot_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_3') ])
def build_vgg_mini(args): return SequentialNetwork([ Conv2D(64, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_1'), MaxPooling2D((2, 2), (2, 2)), Conv2D(128, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_2'), MaxPooling2D((2, 2), (2, 2)), Conv2D(256, (3, 3), kernel_initializer=he_normal, padding='same', activation=relu, kernel_regularizer=l2reg(args.l2), name='conv2D_3'), MaxPooling2D((2, 2), (2, 2)), Flatten(), Dense(512, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dropout(0.5), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2') ])
def build_lenet_conv(args): # ok this is a slightly modified lenet return SequentialNetwork([ Conv2D(20, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_1'), # BatchNormalization(momentum=0.0, name='batch_norm_1'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Conv2D(40, 5, kernel_initializer=he_normal, padding='same', kernel_regularizer=l2reg(args.l2), name='conv2D_2'), # BatchNormalization(momentum=0.0, name='batch_norm_2'), Activation('relu'), MaxPooling2D((2, 2), (2, 2)), Flatten(), Dropout(0.25), Dense(400, kernel_initializer=he_normal, activation=relu, kernel_regularizer=l2reg(args.l2), name='fc_1'), Dropout(0.5), Dense(10, kernel_initializer=he_normal, activation=None, kernel_regularizer=l2reg(args.l2), name='fc_2') ])