예제 #1
0
# Prepare model
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
                                            include_top=False,
                                            weights='imagenet',
                                            pooling='avg')
# base_model.summary()


def dprelu_layer_factory():
    return DPReLU(shared_axes=[1, 2], name='dprelu')


# Replace ReLU activation layer
base_model = insert_layer_nonseq(base_model,
                                 '.*relu.*',
                                 dprelu_layer_factory,
                                 position='replace')
# Fix possible problems with new model
base_model.save(work_dir + '/temp2.h5')
base_model = load_model(work_dir + '/temp2.h5',
                        custom_objects={'DPReLU': DPReLU})

print(base_model.summary())

model = tf.keras.Sequential(
    [base_model, tf.keras.layers.Dense(10, activation='softmax')])

model.summary()

base_learning_rate = 0.0001
base_model.compile(
예제 #2
0
# Prepare model
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
                                            include_top=False,
                                            weights='imagenet',
                                            pooling='avg')
# base_model.summary()


def normal_layer_factory():
    return tf.keras.layers.Layer(name='nl')


# Skip batch normalization layer
base_model = insert_layer_nonseq(base_model,
                                 '.*bn',
                                 normal_layer_factory,
                                 position='replace')
# Fix possible problems with new model
base_model.save(work_dir + '/temp1.h5')
base_model = load_model(work_dir + '/temp1.h5')

print(base_model.summary())

model = tf.keras.Sequential(
    [base_model, tf.keras.layers.Dense(10, activation='softmax')])

model.summary()

base_learning_rate = 0.0001
model.compile(
    optimizer=tf.keras.optimizers.Adam(lr=base_learning_rate),
예제 #3
0

def normal_layer_factory():
    return tf.keras.layers.Layer(name='nl')


# Prepare the model
base_model = tf.keras.applications.ResNet50(input_shape=IMG_SHAPE,
                                            include_top=False,
                                            weights=None,
                                            pooling='avg')

# Replace ReLU activation layer
if args.activation == 'dprelu':
    base_model = insert_layer_nonseq(base_model,
                                     '.*relu.*',
                                     dprelu_layer_factory,
                                     position='replace')
    # Fix possible problems with new model
    base_model.save(work_dir + '/temp.h5')
    base_model = load_model(work_dir + '/temp.h5',
                            custom_objects={'DPReLU': DPReLU})

    base_model = insert_layer_nonseq(base_model,
                                     '.*out.*',
                                     dprelu_layer_factory,
                                     position='replace')
    # Fix possible problems with new model
    base_model.save(work_dir + '/temp.h5')
    base_model = load_model(work_dir + '/temp.h5',
                            custom_objects={'DPReLU': DPReLU})