Exemplo n.º 1
0
 def block_basic(name, x, fmaps, strides, dropout_rate=0.0):
     with tf.compat.v1.variable_scope('Block_{}'.format(name)):
         if x.shape[-1] == fmaps:
             r = layers.leaky_relu(batch_norm(x))
             s = x if strides == 1 else tf.nn.max_pool2d(
                 x, ksize=1, strides=2, padding='SAME')
         else:
             x = layers.leaky_relu(batch_norm(x))
             with tf.compat.v1.variable_scope('Shortcut'):
                 s = layers.conv2d(x,
                                   fmaps,
                                   kernel=1,
                                   strides=strides,
                                   regularizer_rate=regularizer_rate)
         with tf.compat.v1.variable_scope('Conv2D_0'):
             r = layers.leaky_relu(
                 batch_norm(
                     layers.conv2d(r if x.shape[-1] == fmaps else x,
                                   fmaps=fmaps,
                                   kernel=3,
                                   strides=strides,
                                   regularizer_rate=regularizer_rate)))
         if dropout_rate > 0:
             r = tf.cond(training,
                         lambda: tf.nn.dropout(r, rate=dropout_rate),
                         lambda: r,
                         name='use_dropout')
         with tf.compat.v1.variable_scope('Conv2D_1'):
             r = layers.conv2d(r,
                               fmaps=fmaps,
                               kernel=3,
                               regularizer_rate=regularizer_rate)
         return r + s
 def dense_layer(x, fmaps, number):
     with tf.compat.v1.variable_scope('Dense_{}'.format(number)):
         x = layers.dense(x, fmaps=fmaps)
         x = layers.bias(x)
         if fmaps > 1:
             x = layers.leaky_relu(x)
     return x
Exemplo n.º 3
0
 def BAN(x, use_bias=True, use_act=True, use_norm=True):
     if use_bias: x = layers.bias(x, regularizer_rate=regularizer_rate)
     if use_act: x = layers.leaky_relu(x, alpha=0.2)
     # if use_norm:
     #     regularizer=tf.keras.regularizers.l2(regularizer_rate)
     #     x = tf.layers.batch_normalization(x, training=training, momentum=0.9, gamma_regularizer=regularizer, beta_regularizer=regularizer)
     if use_norm:
         x = layers.batch_norm(x,
                               training=training,
                               decay=0.99,
                               regularizer_rate=regularizer_rate)
     return x
Exemplo n.º 4
0
 def block_res(name, x, fmaps, strides=1, use_conv=False):
     f1, f2, f3 = fmaps, fmaps, 4 * fmaps
     with tf.compat.v1.variable_scope('Block_{}'.format(name)):
         if use_conv:  # conv block
             s = conv_layer('shortcut',
                            x,
                            f3,
                            kernel=1,
                            strides=strides,
                            use_act=False)
         else:  # identity block
             s = x
             strides = 1
         r = conv_layer(name + '_0', x, fmaps=f1, kernel=1, strides=strides)
         r = conv_layer(name + '_1', r, fmaps=f2, kernel=3, strides=1)
         r = conv_layer(name + '_2',
                        r,
                        fmaps=f3,
                        kernel=1,
                        strides=1,
                        use_act=False)
         return layers.leaky_relu(r + s)
Exemplo n.º 5
0
 def BAN(x, use_bias=True, use_act=True, use_norm=True):
     if use_bias: x = layers.bias(x, regularizer_rate=regularizer_rate)
     if use_norm: x = batch_norm(x)
     if use_act: x = layers.leaky_relu(x, alpha=0.2)
     return x
 def BA(x):
     x = layers.bias(x)
     x = layers.leaky_relu(x)
     return x
 def BAN(x):
     x = layers.bias(x)
     x = layers.leaky_relu(x)
     x = layers.pixel_norm(x)
     return x
Exemplo n.º 8
0
def wideresnet(inputs,
               training,
               nbof_labels,
               regularizer_rate=0,
               fmaps=[160, 320, 640],
               nbof_unit=[4, 4, 4],
               strides=[1, 2, 2],
               dropouts=[0., 0., 0.]):
    def batch_norm(x):
        with tf.compat.v1.variable_scope('BN'):
            # x = layers.batch_norm(x, training=training, regularizer_rate=regularizer_rate)
            x = tf.compat.v1.layers.batch_normalization(
                x,
                training=training,
                momentum=0.99,
                gamma_regularizer=tf.keras.regularizers.l2(regularizer_rate),
                beta_regularizer=tf.keras.regularizers.l2(regularizer_rate))
            # x = tf.nn.l2_normalize(x, axis=-1)
            # if len(x.shape)>2: x = layers.pixel_norm(x)
            return x

    def block_basic(name, x, fmaps, strides, dropout_rate=0.0):
        with tf.compat.v1.variable_scope('Block_{}'.format(name)):
            if x.shape[-1] == fmaps:
                r = layers.leaky_relu(batch_norm(x))
                s = x if strides == 1 else tf.nn.max_pool2d(
                    x, ksize=1, strides=2, padding='SAME')
            else:
                x = layers.leaky_relu(batch_norm(x))
                with tf.compat.v1.variable_scope('Shortcut'):
                    s = layers.conv2d(x,
                                      fmaps,
                                      kernel=1,
                                      strides=strides,
                                      regularizer_rate=regularizer_rate)
            with tf.compat.v1.variable_scope('Conv2D_0'):
                r = layers.leaky_relu(
                    batch_norm(
                        layers.conv2d(r if x.shape[-1] == fmaps else x,
                                      fmaps=fmaps,
                                      kernel=3,
                                      strides=strides,
                                      regularizer_rate=regularizer_rate)))
            if dropout_rate > 0:
                r = tf.cond(training,
                            lambda: tf.nn.dropout(r, rate=dropout_rate),
                            lambda: r,
                            name='use_dropout')
            with tf.compat.v1.variable_scope('Conv2D_1'):
                r = layers.conv2d(r,
                                  fmaps=fmaps,
                                  kernel=3,
                                  regularizer_rate=regularizer_rate)
            return r + s

    # Inputs
    with tf.compat.v1.variable_scope('Conv2D_1'):
        x = layers.conv2d(inputs,
                          fmaps=fmaps[0],
                          kernel=3,
                          regularizer_rate=regularizer_rate)
    # Middle layers
    for i in range(len(fmaps)):
        x = block_basic('{}_{}'.format(i, 0),
                        x,
                        fmaps[i],
                        strides=strides[i],
                        dropout_rate=dropouts[i])
        for j in range(nbof_unit[i] - 1):
            x = block_basic('{}_{}'.format(i, j + 1),
                            x,
                            fmaps[i],
                            strides=1,
                            dropout_rate=dropouts[i])
    # Output
    with tf.compat.v1.variable_scope('Output'):
        x = layers.leaky_relu(batch_norm(x))
        x = layers.global_avg_pool(x)
        logit = layers.dense(x,
                             fmaps=nbof_labels,
                             regularizer_rate=regularizer_rate)
    return logit