def f(input):
     conv = Conv2D(
         filter,
         k,
         strides=s,
         padding=pad,
         kernel_regularizer=l2(w_decay),
         kernel_initializer=initial(seed),
     )(input)
     return _bn_relu()(conv)
예제 #2
0
 def f(input):
     activation = _bn_relu()(input)
     conv = Conv2D(
         filter,
         k,
         strides=s,
         padding=pad,
         kernel_regularizer=l2(w_decay),
         kernel_initializer=initial(seed),
     )(activation)
     return conv
 def f(input):
     conv = _conv_bn_relu(filter, (3, 3), s=strides, w_decay=w_decay)(input)
     residual = Conv2D(
         filter,
         (3, 3),
         strides=1,
         padding='same',
         kernel_regularizer=l2(w_decay),
         kernel_initializer=initial(seed),
     )(conv)
     residual = BatchNormalization(axis=3)(residual)
     sum = _shortcut(input, residual, w_decay)
     return Activation('relu')(sum)
예제 #4
0
 def f(input):
     if fst_layer_fst_stage:
         conv = Conv2D(
             filter,
             (3, 3),
             strides=1,
             padding='same',
             kernel_regularizer=l2(w_decay),
             kernel_initializer=initial(seed),
         )(input)
     else:
         conv = _bn_relu_conv(filter, 3, s=strides, w_decay=w_decay)(input)
     residual = _bn_relu_conv(filter, 3, 1, w_decay=w_decay)(conv)
     return _shortcut(input, residual, w_decay)
def _shortcut(input, residual, w_decay):
    input_shape = K.int_shape(input)
    res_shape = K.int_shape(residual)
    size_equality = int(round(input_shape[1] / res_shape[1]))
    channel_equality = input_shape[3] == res_shape[3]
    x = input
    if size_equality > 1 or not channel_equality:
        x = Conv2D(
            res_shape[3],
            (1, 1),
            strides=(size_equality, size_equality),
            padding='same',
            kernel_regularizer=l2(w_decay),
            kernel_initializer=initial(seed),
        )(input)
    return Add()([x, residual])
def nonlocal_resnet(input_shape=(
    32,
    32,
    3,
),
                    num_classes=10,
                    w_decay=1e-4,
                    depth=34,
                    stage=None):
    if depth >= 50:
        block_function = _bottleneck_block
    else:
        block_function = _normal_block
    if stage is None:
        stage = [3, 4, 6, 3]

    # network begins
    filter = filter_num
    input = Input(input_shape)
    conv1 = _conv_bn_relu(filter=filter, k=3, s=1, w_decay=w_decay)(input)

    # repetition to build res stage
    block = conv1
    for i, r in enumerate(stage):
        block = _resblock(block_function, filter, r, i, w_decay, i == 0)(block)
        filter *= 2
        if i == 3:
            block = non_local(block)

    # classifier
    avg = GlobalAvgPool2D()(block)
    output = Dense(
        num_classes,
        activation='softmax',
        kernel_initializer=initial(seed),
    )(avg)

    model = Model(input, output)
    model.summary()
    return model
def non_local(input,
              bottle_dim=None,
              compression=2,
              mode='embedded',
              residual=True):
    if mode not in ['gaussian', 'embedded', 'dot', 'concatenate']:
        raise ValueError('mode is not right.')

    shape = K.int_shape(input)
    length, dim1, dim2, channel = shape

    # define bottle_dim
    if bottle_dim is None:
        bottle_dim = ceil(shape[3]) // 2
    else:
        bottle_dim = int(bottle_dim)
        if bottle_dim < 1:
            raise ValueError('bottle_dim must > 1')

    # compute f
    if mode == 'gaussian':
        xi = Reshape((-1, bottle_dim))(input)
        xj = Reshape((-1, bottle_dim))(input)
        f = Dot(axes=2)([xi, xj])
        f = Activation('softmax')(f)

    if mode == 'embedded':
        theta = Conv2D(bottle_dim, (1, 1),
                       padding='same',
                       kernel_initializer=initial())(input)
        theta = Reshape((-1, bottle_dim))(theta)
        phi = Conv2D(bottle_dim, (1, 1),
                     padding='same',
                     kernel_initializer=initial())(input)
        phi = Reshape((-1, bottle_dim))(phi)
        if compression > 1:
            phi = MaxPool1D(compression)(phi)
        f = Dot(axes=2)([theta, phi])
        f = Activation('softmax')(f)

    if mode == 'dot':
        theta = Conv2D(bottle_dim, (1, 1),
                       padding='same',
                       kernel_initializer=initial())(input)
        theta = Reshape((-1, bottle_dim))(theta)
        phi = Conv2D(bottle_dim, (1, 1),
                     padding='same',
                     kernel_initializer=initial())(input)
        phi = Reshape((-1, bottle_dim))(phi)
        f = Dot(axes=2)([theta, phi])
        size = K.int_shape(f)
        f = Lambda(lambda out: (1. / float(size[-1]) * out))(f)

    if mode == 'concatenate':
        theta = Conv2D(bottle_dim, (1, 1),
                       padding='same',
                       kernel_initializer=initial())(input)
        phi = Conv2D(bottle_dim, (1, 1),
                     padding='same',
                     kernel_initializer=initial())(input)
        new_x = Concatenate(axis=3)(theta, phi)
        f = Conv2D(bottle_dim, (1, 1),
                   padding='same',
                   kernel_initializer=initial())(new_x)
        f = Reshape((-1, bottle_dim))(f)
        f = Activation('relu')(f)

    # compute g
    g = Conv2D(bottle_dim, (1, 1),
               padding='same',
               kernel_initializer=initial())(input)
    g = Reshape((-1, bottle_dim))(g)
    if compression > 1:
        g = MaxPool1D(compression)(g)

    # compute y
    y = Dot(axes=[2, 1])([f, g])
    y = Reshape((dim1, dim2, bottle_dim))(y)
    y = Conv2D(channel, (1, 1), padding='same',
               kernel_initializer=initial())(y)

    # residual
    if residual == True:
        y = Add()([y, input])

    return y