Пример #1
0
def preresnet_basicblock(l, ch_out, stride, preact, block_type='B'):
    l, shortcut = apply_preactivation(l, preact, 'basic')
    l = Conv2DQuant('conv1', l, ch_out, 3, stride=stride, nl=BNReLU)
    l = QuantizedActiv('quant2', l)
    l = Conv2DQuant('conv2', l, ch_out, 3, nl=get_bn(zero_init=False))
    return l + resnet_shortcut(shortcut,
                               ch_out,
                               stride,
                               nl=get_bn(zero_init=False),
                               block_type=block_type)
Пример #2
0
def add_layer(name, l):
    shape = l.get_shape().as_list()
    in_channel = shape[1]
    with tf.variable_scope(name) as scope:
        c = Conv2DQuant('conv1x1', l, 4 * GROWTH_RATE, 1)
        c = BNReLU('bnrelu_2', c)
        c = QuantizedActiv('quant2', c)
        c = Conv2DQuant('conv3x3', c, GROWTH_RATE, 3)
        c = BNReLU('bnrelu_3', c)
        c = QuantizedActiv('quant3', c)
        l = tf.concat([c, l], 1)
    return l
Пример #3
0
def preresnet_bottleneck(l, ch_out, stride, preact, block_type='A'):
    # stride is applied on the second conv, following fb.resnet.torch
    l, shortcut = apply_preactivation(l, preact, 'basic')
    l = Conv2DQuant('conv1', l, ch_out, 1, nl=BNReLU)
    l = QuantizedActiv('quant2', l)
    l = Conv2DQuant('conv2', l, ch_out, 3, stride=stride, nl=BNReLU)
    l = QuantizedActiv('quant3', l)
    l = Conv2DQuant('conv3', l, ch_out * 4, 1, nl=get_bn(zero_init=False))
    return l + resnet_shortcut(shortcut,
                               ch_out * 4,
                               stride,
                               nl=get_bn(zero_init=False),
                               block_type=block_type)
Пример #4
0
def add_transition(name, l):
    shape = l.get_shape().as_list()
    in_channel = shape[1]
    out_channel = math.floor(in_channel * REDUCTION)
    with tf.variable_scope(name) as scope:
        l = Conv2DQuant('conv1', l, out_channel, 1, stride=1, use_bias=False)
        l = AvgPooling('pool', l, 2)
    return l
Пример #5
0
def resnet_bottleneck(l, ch_out, stride, stride_first=False):
    """
    stride_first: original resnet put stride on first conv. fb.resnet.torch put stride on second conv.
    """
    shortcut = l
    l = Conv2DQuant('conv1',
                    l,
                    ch_out,
                    1,
                    stride=stride if stride_first else 1,
                    nl=BNReLU)
    l = Conv2DQuant('conv2',
                    l,
                    ch_out,
                    3,
                    stride=1 if stride_first else stride,
                    nl=BNReLU)
    l = Conv2DQuant('conv3', l, ch_out * 4, 1, nl=get_bn(zero_init=True))
    return l + resnet_shortcut(
        shortcut, ch_out * 4, stride, nl=get_bn(zero_init=False))
Пример #6
0
def resnet_shortcut(l, n_out, stride, nl=tf.identity, block_type='B'):
    data_format = get_arg_scope()['Conv2DQuant']['data_format']
    n_in = l.get_shape().as_list()[1 if data_format == 'NCHW' else 3]
    if n_in != n_out:  # change dimension when channel is not the same
        if block_type == 'B':
            return Conv2DQuant('convshortcut',
                               l,
                               n_out,
                               1,
                               stride=stride,
                               nl=nl)
        else:
            l = AvgPooling('poolshortcut', l, stride, stride, padding='VALID')
            if data_format == 'NCHW':
                paddings = [[0, 0], [0, n_out - n_in], [0, 0], [0, 0]]
            else:
                paddings = [[0, 0], [0, 0], [0, 0], [0, n_out - n_in]]
            l = tf.pad(l, paddings, 'CONSTANT')
            return l
    else:
        return l
Пример #7
0
def inception_block(l,
                    name,
                    ch_1x1,
                    ch_3x3,
                    ch_5x5,
                    is_last_block=False,
                    is_last=False):
    data_format = get_arg_scope()['Conv2DQuant']['data_format']
    with tf.variable_scope(name):
        conv1x1 = Conv2DQuant(
            '1x1',
            l,
            ch_1x1,
            1,
            nl=getBNReLUQuant if not is_last_block else tf.identity)
        conv3x3_reduce = Conv2DQuant('3x3_reduce',
                                     l,
                                     ch_3x3,
                                     1,
                                     nl=getBNReLUQuant)
        conv3x3 = Conv2DQuant(
            '3x3',
            conv3x3_reduce,
            ch_3x3,
            3,
            nl=getBNReLUQuant if not is_last_block else tf.identity)
        conv5x5_reduce = Conv2DQuant('5x5_reduce',
                                     l,
                                     ch_5x5,
                                     1,
                                     nl=getBNReLUQuant)
        conv5x5 = Conv2DQuant(
            '5x5',
            conv5x5_reduce,
            ch_5x5,
            5,
            nl=getBNReLUQuant if not is_last_block else tf.identity)
        if is_last_block and not is_last:
            conv1x1 = MaxPooling('pool_1x1',
                                 conv1x1,
                                 shape=3,
                                 stride=2,
                                 padding='SAME')
            conv1x1 = BNReLU('conv1x1_bn', conv1x1)
            conv1x1 = QuantizedActiv('conv1x1_quant', conv1x1)
            conv3x3 = MaxPooling('pool_3x3',
                                 conv3x3,
                                 shape=3,
                                 stride=2,
                                 padding='SAME')
            conv3x3 = BNReLU('conv3x3_bn', conv3x3)
            conv3x3 = QuantizedActiv('conv3x3_quant', conv3x3)
            conv5x5 = MaxPooling('pool_5x5',
                                 conv5x5,
                                 shape=3,
                                 stride=2,
                                 padding='SAME')
            conv5x5 = BNReLU('conv5x5_bn', conv5x5)
            conv5x5 = QuantizedActiv('conv5x5_quant', conv5x5)
        l = tf.concat([conv1x1, conv3x3, conv5x5],
                      1 if data_format == 'NCHW' else 3,
                      name='concat')
        if is_last:
            l = BNReLU('output_bn', l)
    return l
Пример #8
0
def resnet_basicblock(l, ch_out, stride):
    shortcut = l
    l = Conv2DQuant('conv1', l, ch_out, 3, stride=stride, nl=BNReLU)
    l = Conv2DQuant('conv2', l, ch_out, 3, nl=get_bn(zero_init=True))
    return l + resnet_shortcut(
        shortcut, ch_out, stride, nl=get_bn(zero_init=False))