Esempio n. 1
0
def resblock(x_init, stride=1, channel_mult=1):
    channel_in = x_init.get_shape().as_list()[-1]

    x = ConvNorm(channel_in // 2, kernel_size=1)(tf.nn.leaky_relu(x_init, 0.2))

    x = tf.nn.leaky_relu(x, 0.2)
    x = ConvNorm(channel_in // 2, strides=stride)(x)

    x = tf.nn.leaky_relu(x, 0.2)
    x = ConvNorm(int(channel_in * channel_mult), kernel_size=1)(x)

    if channel_mult != 1 or stride != 1:
        x_init = ConvNorm(int(channel_in * channel_mult),
                          kernel_size=1,
                          strides=stride)(tf.nn.leaky_relu(x_init, 0.2))

    return x + x_init
def Att_Classifier(in_channels, out_channels=128, h=3, n=64):
    x_init = tf.keras.layers.Input(shape=[None, None, in_channels])
    b = tf.shape(x_init)[0]

    x = tf.nn.leaky_relu(x_init, 0.2)
    x = ConvNorm(in_channels, kernel_size=1)(x)
    x = tf.nn.leaky_relu(x, 0.2)
    x = ConvNorm(in_channels, kernel_size=1)(x)

    c = int(h * n)
    q = tf.keras.layers.Dense(units=c)(tf.math.reduce_mean(x, axis=(1, 2)))
    q = tf.split(q, h, axis=1)

    h0 = tf.shape(x)[1]
    w0 = tf.shape(x)[2]
    loc = tf.tile(
        tf.expand_dims(
            tf.concat([
                tf.expand_dims(
                    tf.repeat(tf.expand_dims(tf.linspace(0.0, 1.0, h0), 1),
                              w0,
                              axis=1), 2),
                tf.expand_dims(
                    tf.repeat(tf.expand_dims(tf.linspace(0.0, 1.0, w0), 0),
                              h0,
                              axis=0), 2)
            ], 2), 0), (b, 1, 1, 1))
    x = tf.concat([x, loc], axis=3)

    k = tf.split(conv(x, c, 1), h, axis=3)
    k = [tf.reshape(i, (b, -1, n)) for i in k]
    v = tf.split(conv(x, c, 1), h, axis=3)
    v = [tf.reshape(i, (b, -1, n)) for i in v]

    k = [
        tf.nn.softmax(
            tf.matmul(q[i], tf.transpose(k[i], (0, 2, 1))) /
            tf.math.sqrt(tf.cast(n, tf.float32))) for i in range(len(k))
    ]
    x = [tf.matmul(k[i], v[i]) for i in range(len(k))]
    x = tf.reshape(tf.reduce_mean(tf.concat(x, axis=2), axis=1), (-1, h * n))
    #x = tf.reduce_mean(tf.concat(x, axis=1), axis=1)
    x = tf.keras.layers.Dense(units=out_channels)(x)

    return tf.keras.Model(inputs=x_init, outputs=x)
def regnet_block(x_init, channels, g, stride=1):
    channel_in = x_init.get_shape().as_list()[-1]

    x_init = tf.nn.leaky_relu(x_init, 0.2)
    #x_init = tf.nn.relu(x_init)
    x = ConvNorm(channels, kernel_size=1)(x_init)

    x = tf.nn.leaky_relu(x, 0.2)
    #x = tf.nn.relu(x)
    x = ConvNorm(channels, kernel_size=3, group_size=g, strides=stride)(x)

    x = tf.nn.leaky_relu(x, 0.2)
    #x = tf.nn.relu(x)
    x = ConvNorm(channels, kernel_size=1)(x)

    if channel_in != channels:
        x_init = ConvNorm(channels, kernel_size=1, strides=stride)(x_init)

    return x + x_init
def resblock(x_init, stride=1):
    channel_in = x_init.get_shape().as_list()[-1]
    n = 1
    if stride > 1:
        n = 2

    x_init = tf.nn.leaky_relu(x_init, 0.2)
    #x = conv(x_init, channel_middle, stride=stride)
    x = ConvNorm(channel_in // 2, kernel_size=1)(x_init)

    x = tf.nn.leaky_relu(x, 0.2)
    #x = conv(x, channel_middle)
    x = ConvNorm(channel_in // 2, strides=stride)(x)

    x = tf.nn.leaky_relu(x, 0.2)
    #x = conv(x, channels)
    x = ConvNorm(channel_in * n, kernel_size=1)(x)

    if stride > 1:
        #x_init = conv(x_init, channels, kernel=1, stride=stride)
        x_init = ConvNorm(channel_in * n, kernel_size=1,
                          strides=stride)(x_init)

    return x + x_init
def Att_Classifier(in_channels, out_channels=128, h=3, n=64):
    x_init = tf.keras.layers.Input(shape=[None, None, in_channels])
    b = tf.shape(x_init)[0]

    x = tf.nn.leaky_relu(x_init, 0.2)
    x = ConvNorm(in_channels, kernel_size=1)(x)

    q = tf.keras.layers.Dense(units=h * n)(tf.nn.leaky_relu(
        tf.math.reduce_mean(x, axis=(1, 2)), 0.2))
    q = tf.nn.relu(tf.reshape(q, (-1, h, n)))
    x = tf.nn.leaky_relu(x, 0.2)
    k = tf.reshape(conv(x, n, 1), (b, -1, n))
    v = tf.reshape(conv(x, n, 1), (b, -1, n))

    k = tf.nn.softmax(
        tf.matmul(q, tf.transpose(k, (0, 2, 1))) /
        tf.math.sqrt(tf.cast(n, tf.float32)))
    x = tf.reshape(tf.matmul(k, v), (-1, h * n))
    x = tf.keras.layers.Dense(units=out_channels)(x)

    return tf.keras.Model(inputs=x_init, outputs=x)