Example #1
0
def bottleneck_block(data, name, filters, stride=1, dilate=1,
                     identity_map=True, shortcut_kernel=1, dropout_rate=0.):
    branch2a = res_conv_stage(data, '{}_branch2a'.format(name),
                              filters,
                              kernel=1,
                              stride=stride)
    branch2b = res_conv_stage(branch2a, '{}_branch2b'.format(name),
                              filters,
                              kernel=3,
                              dilate=dilate)
    branch2c = res_conv_stage(branch2b, '{}_branch2c'.format(name),
                              filters*4,
                              kernel=1,
                              has_relu=False,
                              dropout_rate=dropout_rate)
    if identity_map:
        return relu(data + branch2c, 'res{}_relu'.format(name))
    else:
        assert shortcut_kernel in (1, 2)
        branch1 = res_conv_stage(data, '{}_branch1'.format(name),
                                 filters*4,
                                 kernel=shortcut_kernel,
                                 stride=stride,
                                 dilate=dilate,
                                 pad=-1 if shortcut_kernel % 2 else 0,
                                 has_relu=False)
        return relu(branch1 + branch2c, 'res{}_relu'.format(name))
Example #2
0
def conv_stage(data,
               names,
               filters,
               kernel=3,
               stride=1,
               dilate=1,
               pad=-1,
               groups=1,
               no_bias=True,
               dropout_rate=0.):
    i = 0
    bn1 = bn(data, names[i])
    i += 1
    relu1 = relu(bn1, names[i])
    i += 1
    if dropout_rate > 0.:
        dropout1 = dropout(relu1, names[i], p=dropout_rate)
    i += 1
    top = conv(dropout1 if dropout_rate > 0. else relu1,
               names[i],
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    return relu1, top
Example #3
0
def conv_stage(data,
               name,
               filters,
               kernel=3,
               stride=1,
               dilate=1,
               pad=-1,
               groups=1,
               no_bias=False,
               has_bn=False,
               dropout_rate=0.,
               has_relu=True):
    top = conv(data,
               name,
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    if has_bn:
        top = bn(top, name='{}_bn'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, '{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, '{}_relu'.format(name))
    return top
Example #4
0
def res_conv_stage(data,
                   name,
                   filters,
                   kernel=3,
                   stride=1,
                   dilate=1,
                   pad=-1,
                   groups=1,
                   no_bias=True,
                   has_relu=True,
                   dropout_rate=0.):
    top = conv(data,
               'res{}'.format(name),
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    top = bn(data=top, name='bn{}'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, 'res{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, 'res{}_relu'.format(name))
    return top
Example #5
0
def bottleneck_block(data,
                     name,
                     filters,
                     stride=1,
                     dilate=1,
                     identity_map=True,
                     shortcut_kernel=1,
                     dropout_rate=0.):
    branch2a = res_conv_stage(data,
                              '{}_branch2a'.format(name),
                              filters,
                              kernel=1,
                              stride=stride)
    branch2b = res_conv_stage(branch2a,
                              '{}_branch2b'.format(name),
                              filters,
                              kernel=3,
                              dilate=dilate)
    branch2c = res_conv_stage(branch2b,
                              '{}_branch2c'.format(name),
                              filters * 4,
                              kernel=1,
                              has_relu=False,
                              dropout_rate=dropout_rate)
    if identity_map:
        return relu(data + branch2c, 'res{}_relu'.format(name))
    else:
        assert shortcut_kernel in (1, 2)
        branch1 = res_conv_stage(data,
                                 '{}_branch1'.format(name),
                                 filters * 4,
                                 kernel=shortcut_kernel,
                                 stride=stride,
                                 dilate=dilate,
                                 pad=-1 if shortcut_kernel % 2 else 0,
                                 has_relu=False)
        return relu(branch1 + branch2c, 'res{}_relu'.format(name))
Example #6
0
def res_conv_stage(data, name, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=True, has_relu=True, dropout_rate=0.):
    top = conv(data, 'res{}'.format(name),
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    top = bn(data=top, name='bn{}'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, 'res{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, 'res{}_relu'.format(name))
    return top
Example #7
0
def conv_stage(data, name, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=False, has_bn=False, dropout_rate=0., has_relu=True):
    top = conv(data, name,
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    if has_bn:
        top = bn(top, name='{}_bn'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, '{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, '{}_relu'.format(name))
    return top
Example #8
0
def conv_stage(data, names, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=True, dropout_rate=0.):
    i = 0
    bn1 = bn(data, names[i])
    i += 1
    relu1 = relu(bn1, names[i])
    i += 1
    if dropout_rate > 0.:
        dropout1 = dropout(relu1, names[i], p=dropout_rate)
    i += 1
    top = conv(dropout1 if dropout_rate > 0. else relu1, names[i],
               filters, kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    return relu1, top
Example #9
0
def rna_feat(conv1_layers, level_blocks):
    '''RNA features'''
    def _widen(filters, max_filters, min_filters=-1):
        filters = min(max_filters, int(filters * level_blocks[level].width))
        if min_filters > 0:
            filters = max(min_filters, filters)
        return filters

    # TODO: these may only work for b33
    def _ds_by_pl(data, name, level, dilate):
        assert level > 0
        level_block = level_blocks[level]
        if not level_block.downsample == 'p':
            return data, dilate
        pool_stride = stride = 2
        inc_dilate = level_block.dilate
        pad = -1
        if inc_dilate:
            pool_stride = 1
            pad = dilate
        print 'Pooling stride: {}, dilate: {}, pad: {}'.format(
            pool_stride, dilate, pad)
        top = pool(data,
                   name,
                   stride=pool_stride,
                   dilate=dilate,
                   pad=pad,
                   pool_type=pool_type)
        if inc_dilate:
            dilate *= stride
        return top, dilate

    def _ds_by_cv(data,
                  name,
                  filters,
                  level,
                  dilate,
                  kernel=3,
                  dropout_rate=0.):
        assert level > 1
        level_block = level_blocks[level - 1]
        if not level_block.downsample == 'c':
            print 'First block on level {}, dilate: {}'.format(level, dilate)
            top = res_block(data,
                            name,
                            filters,
                            kernel=kernel,
                            dilate=dilate,
                            identity_map=False,
                            dropout_rate=dropout_rate)
        else:
            stride = 2
            inc_dilate = level_block.dilate
            print 'First block on level {}, stride: {}, dilate: {}'.format(
                level, stride, dilate)
            top = res_block(data,
                            name,
                            filters,
                            kernel=kernel,
                            stride=stride,
                            dilate=dilate,
                            inc_dilate=inc_dilate,
                            identity_map=False,
                            dropout_rate=dropout_rate)
            if inc_dilate:
                dilate *= stride
        return top, dilate

    dilate = 1
    pool_type = 'max'
    crop_size = 224

    data = mx.sym.Variable('data')
    # 224^2 3

    level = 0
    print 'Level {}'.format(level)
    conv0 = data
    # 224^2 3
    print conv0.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    conv0._set_attr(mirror_stage='True')

    level = 1
    print 'Level {}'.format(level)
    filters = _widen(64, 1024)
    res1 = conv(conv0,
                'conv1a',
                filters,
                kernel=conv1_layers[0].kernel,
                stride=1,
                no_bias=True)
    for i, conv1_layer in enumerate(conv1_layers[1:]):
        names = [
            'conv1b{}_bn'.format(i),
            'conv1b{}_relu'.format(i),
            'conv1b{}_do'.format(i),
            'conv1b{}'.format(i),
        ]
        res1 = conv_stage(res1,
                          names,
                          filters,
                          kernel=conv1_layer.kernel,
                          stride=1,
                          no_bias=True)
    for i in xrange(level_blocks[level].num):
        res1 = res_block(res1, '1b{}'.format(i + 1), filters)
    # pass dilate from now on
    assert dilate == 1
    res1, dilate = _ds_by_pl(res1, 'pool1', level, dilate)
    # 112^2 64
    print res1.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res1._set_attr(mirror_stage='True')

    level = 2
    print 'Level {}'.format(level)
    filters = _widen(128, 1024)
    res2, dilate = _ds_by_cv(res1, '2a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res2 = res_block(res2, '2b{}'.format(i), filters, dilate=dilate)
        res2._set_attr(mirror_stage='True')
    res2, dilate = _ds_by_pl(res2, 'pool2', level, dilate)
    # 56^2 128
    print res2.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res2._set_attr(mirror_stage='True')

    level = 3
    print 'Level {}'.format(level)
    filters = _widen(256, 1024)
    res3, dilate = _ds_by_cv(res2, '3a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res3 = res_block(res3, '3b{}'.format(i), filters, dilate=dilate)
        res3._set_attr(mirror_stage='True')
    res3, dilate = _ds_by_pl(res3, 'pool3', level, dilate)
    # 28^2 256
    print res3.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res3._set_attr(mirror_stage='True')

    level = 4
    print 'Level {}'.format(level)
    filters = _widen(512, 1024)
    res4, dilate = _ds_by_cv(res3, '4a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res4 = res_block(res4, '4b{}'.format(i), filters, dilate=dilate)
        res4._set_attr(mirror_stage='True')
    res4, dilate = _ds_by_pl(res4, 'pool4', level, dilate)
    # 14^2 512
    print res4.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res4._set_attr(mirror_stage='True')

    level = 5
    print 'Level {}'.format(level)
    # wd = 0.5 : (512, 512)
    # wd = 1.0 : (512, 1024)
    # wd = 2.0 : (1024, 1024)
    filters_l1 = _widen(512, 1024, filters)
    filters = [filters_l1, _widen(1024, 1024, filters_l1)]
    res5, dilate = _ds_by_cv(res4, '5a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res5 = res_block(res5, '5b{}'.format(i), filters, dilate=dilate)
        res5._set_attr(mirror_stage='True')
    res5, dilate = _ds_by_pl(res5, 'pool5', level, dilate)
    # 7^2 1024
    print res5.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res5._set_attr(mirror_stage='True')

    level = 6
    print 'Level {}'.format(level)
    res6, dilate = _ds_by_cv(res5,
                             '6a', [512, 1024, 2048],
                             level,
                             dilate,
                             kernel=[1, 3, 1],
                             dropout_rate=level_blocks[level].dropout)
    # 7^2 2048
    print res6.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res6._set_attr(mirror_stage='True')

    level = 7
    print 'Level {}'.format(level)
    res7 = res_block(res6,
                     '7a', [1024, 2048, 4096],
                     kernel=[1, 3, 1],
                     dilate=dilate,
                     identity_map=False,
                     dropout_rate=level_blocks[level].dropout)
    # 7^2 4096
    print res7.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    res7._set_attr(mirror_stage='True')

    bn7 = bn(res7, 'bn7')
    relu7 = relu(bn7, 'relu7')
    relu7._set_attr(mirror_stage='True')
    return relu7
Example #10
0
def rna_feat(conv1_layers, level_blocks):
    '''RNA features'''
    
    def _widen(filters, max_filters, min_filters=-1):
        filters = min(max_filters, int(filters * level_blocks[level].width))
        if min_filters > 0:
            filters = max(min_filters, filters)
        return filters
    
    # TODO: these may only work for b33
    def _ds_by_pl(data, name, level, dilate):
        assert level > 0
        level_block = level_blocks[level]
        if not level_block.downsample == 'p':
            return data, dilate
        pool_stride = stride = 2
        inc_dilate = level_block.dilate
        pad = -1
        if inc_dilate:
            pool_stride = 1
            pad = dilate
        print 'Pooling stride: {}, dilate: {}, pad: {}'.format(pool_stride, dilate, pad)
        top = pool(data, name, stride=pool_stride, dilate=dilate, pad=pad, pool_type=pool_type)
        if inc_dilate:
            dilate *= stride
        return top, dilate
    def _ds_by_cv(data, name, filters, level, dilate, kernel=3, dropout_rate=0.):
        assert level > 1
        level_block = level_blocks[level-1]
        if not level_block.downsample == 'c':
            print 'First block on level {}, dilate: {}'.format(level, dilate)
            top = res_block(data, name, filters, kernel=kernel,
                            dilate=dilate, identity_map=False,
                            dropout_rate=dropout_rate)
        else:
            stride = 2
            inc_dilate = level_block.dilate
            print 'First block on level {}, stride: {}, dilate: {}'.format(level, stride, dilate)
            top = res_block(data, name, filters, kernel=kernel, stride=stride,
                            dilate=dilate, inc_dilate=inc_dilate, identity_map=False,
                            dropout_rate=dropout_rate)
            if inc_dilate:
                dilate *= stride
        return top, dilate
    
    dilate = 1
    pool_type = 'max'
    crop_size = 224
    
    data = mx.sym.Variable('data')
    # 224^2 3
    
    level = 0; print 'Level {}'.format(level)
    conv0 = data
    # 224^2 3
    print conv0.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 1; print 'Level {}'.format(level)
    filters = _widen(64, 1024)
    res1 = conv(conv0, 'conv1a',
                filters,
                kernel=conv1_layers[0].kernel,
                stride=1,
                no_bias=True)
    for i, conv1_layer in enumerate(conv1_layers[1:]):
        names = ['conv1b{}_bn'.format(i),
                 'conv1b{}_relu'.format(i),
                 'conv1b{}_do'.format(i),
                 'conv1b{}'.format(i),]
        res1 = conv_stage(res1, names,
                          filters,
                          kernel=conv1_layer.kernel,
                          stride=1,
                          no_bias=True)
    for i in xrange(level_blocks[level].num):
        res1 = res_block(res1, '1b{}'.format(i+1), filters)
    # pass dilate from now on
    assert dilate == 1
    res1, dilate = _ds_by_pl(res1, 'pool1', level, dilate)
    # 112^2 64
    print res1.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 2; print 'Level {}'.format(level)
    filters = _widen(128, 1024)
    res2, dilate = _ds_by_cv(res1, '2a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res2 = res_block(res2, '2b{}'.format(i), filters, dilate=dilate)
    res2, dilate = _ds_by_pl(res2, 'pool2', level, dilate)
    # 56^2 128
    print res2.infer_shape(data=(64, 3, crop_size, crop_size))[1]

    level = 3; print 'Level {}'.format(level)
    filters = _widen(256, 1024)
    res3, dilate = _ds_by_cv(res2, '3a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res3 = res_block(res3, '3b{}'.format(i), filters, dilate=dilate)
    res3, dilate = _ds_by_pl(res3, 'pool3', level, dilate)
    # 28^2 256
    print res3.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 4; print 'Level {}'.format(level)
    filters = _widen(512, 1024)
    res4, dilate = _ds_by_cv(res3, '4a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res4 = res_block(res4, '4b{}'.format(i), filters, dilate=dilate)
    res4, dilate = _ds_by_pl(res4, 'pool4', level, dilate)
    # 14^2 512
    print res4.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 5; print 'Level {}'.format(level)
    # wd = 0.5 : (512, 512)
    # wd = 1.0 : (512, 1024)
    # wd = 2.0 : (1024, 1024)
    filters_l1 = _widen(512, 1024, filters)
    filters = [filters_l1, _widen(1024, 1024, filters_l1)]
    res5, dilate = _ds_by_cv(res4, '5a', filters, level, dilate)
    for i in xrange(1, level_blocks[level].num):
        res5 = res_block(res5, '5b{}'.format(i), filters, dilate=dilate)
    res5, dilate = _ds_by_pl(res5, 'pool5', level, dilate)
    # 7^2 1024
    print res5.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 6; print 'Level {}'.format(level)
    res6, dilate = _ds_by_cv(res5, '6a', [512, 1024, 2048],
                             level, dilate, kernel=[1, 3, 1],
                             dropout_rate=level_blocks[level].dropout)
    # 7^2 2048
    print res6.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    level = 7; print 'Level {}'.format(level)
    res7 = res_block(res6, '7a', [1024, 2048, 4096], kernel=[1, 3, 1],
                     dilate=dilate, identity_map=False,
                     dropout_rate=level_blocks[level].dropout)
    # 7^2 4096
    print res7.infer_shape(data=(64, 3, crop_size, crop_size))[1]
    
    bn7 = bn(res7, 'bn7')
    relu7 = relu(bn7, 'relu7')
    return relu7