예제 #1
0
def resnet_conv(input, num_filters):
    c1 = conv_bn_relu(input, (3, 3), num_filters)
    c2 = conv_bn(c1, (3, 3), num_filters, bn_init_scale=1)

    b1 = conv_bn_relu(input, (1, 1), num_filters)
    p = c2 + b1
    return relu(p)
예제 #2
0
def resnet_basic_constant(input, num_filters):
    c1 = conv_bn_relu(input, (3, 3), num_filters)
    c2 = conv_bn(c1, (3, 3), num_filters, bn_init_scale=1)
    c2 = c2 * 0.5
    input = input * 0.5
    p = c2 + input
    return relu(p)
예제 #3
0
def conv_bn_relu(input,
                 filter_size,
                 num_filters,
                 strides=(1, 1),
                 init=he_normal()):
    r = conv_bn(input, filter_size, num_filters, strides, init, 1)
    return relu(r)
예제 #4
0
def resnet_basic(input, out_feature_map_count, bn_time_const):
    c1 = conv_bn_relu_layer(input, out_feature_map_count, [3, 3], [1, 1],
                            bn_time_const)
    c2 = conv_bn_layer(c1, out_feature_map_count, [3, 3], [1, 1],
                       bn_time_const)
    p = c2 + input
    return relu(p)
예제 #5
0
def resnet_bottleneck_inc(input, out_num_filters, inter_out_num_filters, stride1x1, stride3x3):
    c1 = conv_bn_relu(input, (1, 1), inter_out_num_filters, strides=stride1x1)
    c2 = conv_bn_relu(c1, (3, 3), inter_out_num_filters, strides=stride3x3)
    c3 = conv_bn(c2, (1, 1), out_num_filters, bn_init_scale=0)
    stride = np.multiply(stride1x1, stride3x3)
    s = conv_bn(input, (1, 1), out_num_filters, strides=stride) # Shortcut
    p = c3 + s
    return relu(p)
예제 #6
0
def resnet_basic_inc(input, num_filters):
    c1 = convolution_bn(input, (3,3), num_filters, strides=(2,2))
    c2 = convolution_bn(c1, (3,3), num_filters, activation=None)

    s = convolution_bn(input, (1,1), num_filters, strides=(2,2), activation=None)
    
    p = c2 + s
    return relu(p)
예제 #7
0
def resnet_bottleneck_inc(input, out_num_filters, inter_out_num_filters, stride1x1, stride3x3):
    c1 = conv_bn_relu(input, (1, 1), inter_out_num_filters, strides=stride1x1)
    c2 = conv_bn_relu(c1, (3, 3), inter_out_num_filters, strides=stride3x3)
    c3 = conv_bn(c2, (1, 1), out_num_filters, bn_init_scale=0)
    stride = np.multiply(stride1x1, stride3x3)
    s = conv_bn(input, (1, 1), out_num_filters, strides=stride) # Shortcut
    p = c3 + s
    return relu(p)
예제 #8
0
def resnet_drop(input, num_filters):
    c1 = conv_bn_relu(input, (3,3), num_filters)
    c2 = conv_bn(c1, (3,3), num_filters, bn_init_scale = 1)

    b1 = Dropout(0.5)(input)

    p = b1 + c2

    return relu(p)
예제 #9
0
def resnet_exclusive(input, num_filters):
    c1 = conv_bn_relu(input, (3, 3), num_filters)
    c2 = conv_bn(c1, (3, 3), num_filters, bn_init_scale=1)

    b1 = conv_bn_relu(input, (1, 1), num_filters)
    b2 = 1 - C.sigmoid(b1)

    input = input * b2

    p = input + c2

    return relu(p)
예제 #10
0
def Res_C(input, n, m):
    A1 = conv_bn(input, (1,1), n, bn_init_scale = 1)
    
    B1 = conv_bn(input, (1,1), n, bn_init_scale = 1)
    B2 = conv_bn(B1, (1,3),n, bn_init_scale = 1)
    B3 = conv_bn(B2, (3,1), n, bn_init_scale = 1)

    C = splice(A1, B3, axis = 0)

    D = conv_bn(C, (1,1), m, bn_init_scale = 1)
    
    p = D + input
    return relu(p)
예제 #11
0
def Res_C(input, a1, b1, b2, b3, c1):
    A1 = conv_bn(input, (1, 1), a1, bn_init_scale=1)

    B1 = conv_bn(input, (1, 1), b1, bn_init_scale=1)
    B2 = conv_bn(B1, (1, 3), b2, bn_init_scale=1)
    B3 = conv_bn(B2, (3, 1), b3, bn_init_scale=1)

    C = splice(A1, B3, axis=0)

    D = conv_bn(C, (1, 1), c1, bn_init_scale=1)

    p = D + input
    return relu(p)
예제 #12
0
def conv_dw(input, fillter_size, num_filters, strides=(1, 1),
            init=he_normal()):
    r = Convolution(fillter_size,
                    num_filters,
                    activation=None,
                    init=init,
                    pad=True,
                    strides=strides,
                    bias=False,
                    groups=1)(input)

    print('r.shape ', r.shape)

    return relu(r)
예제 #13
0
def Res_A(input, a1, b1, c1, c2, c3, d1):
    A1 = conv_bn_relu(input, (1, 1), a1)

    B1 = conv_bn(input, (1, 1), b1, bn_init_scale=1)
    B2 = conv_bn(B1, (3, 3), b1, bn_init_scale=1)

    C1 = conv_bn(input, (1, 1), c1, bn_init_scale=1)
    C2 = conv_bn(C1, (3, 3), c2, bn_init_scale=1)
    C3 = conv_bn(C2, (3, 3), c3, bn_init_scale=1)

    out = splice(A1, B2, C3, axis=0)
    out2 = conv_bn(out, (1, 1), d1, bn_init_scale=1)

    p = out2 + input
    return relu(p)
예제 #14
0
    def relu(cntk_layer, inputs):
        '''
         Setup ReLU op with given parameters

        Args:
            cntk_layer (:class:`~cntk.contrib.crosstalkcaffe.unimodel.cntkmodel.CntkLayersDefinition`):
                the layer definition of ReLU op
            inputs (list): a list contains all :class:`~cntk.ops.functions.Function` or
                :class:`~cntk.input`

        Return:
            :func:`~cntk.ops.functions.Function`: instaced cntk ReLU op
        '''
        sanitize_input = internal.sanitize_input(inputs[0])
        return ops.relu(sanitize_input, name=cntk_layer.op_name)
예제 #15
0
    def relu(cntk_layer, inputs):
        '''
         Setup ReLU op with given parameters

        Args:
            cntk_layer (:class:`~cntk.contrib.crosstalkcaffe.unimodel.cntkmodel.CntkLayersDefinition`):
                the layer definition of ReLU op
            inputs (list): a list contains all :class:`~cntk.ops.functions.Function` or
                :class:`~cntk.input`

        Return:
            :func:`~cntk.ops.functions.Function`: instaced cntk ReLU op
        '''
        sanitize_input = internal.sanitize_input(inputs[0])
        return ops.relu(sanitize_input, name=cntk_layer.op_name)
예제 #16
0
def resnet_basic(layer_input, filter_size, num_filters, strides, prefix):
    """
    Returns a resnet basic building block
    """
    c1 = conv_bn_relu(layer_input,
                      filter_size,
                      num_filters,
                      strides,
                      name='{}_1'.format(prefix))
    c2 = conv_bn(c1,
                 filter_size,
                 num_filters,
                 strides,
                 name='{}_2'.format(prefix))
    p = plus(c2, layer_input, name='{}_res'.format(prefix))
    return relu(p, name='{}_relu'.format(prefix))
예제 #17
0
def conv_bn_relu(layer_input,
                 filter_size,
                 num_filters,
                 strides,
                 init=he_normal(),
                 name=''):
    """
    Returns a convolutional layer followed by a batch normalization layer and then ReLU activation
    """
    r = conv_bn(layer_input,
                filter_size,
                num_filters,
                strides,
                init,
                name=name)
    return relu(r, name='{}_relu'.format(name))
예제 #18
0
def Res_A(input, n, m):
    a1 = conv_bn(input, (1,1), n, bn_init_scale = 1)

    b1 = conv_bn(input, (1,1), n, bn_init_scale = 1)
    b2 = conv_bn(b1, (3,3), n, bn_init_scale = 1)

    c1 = conv_bn(input, (1,1), n, bn_init_scale = 1)
    c2 = conv_bn(c1, (3,3), n, bn_init_scale = 1)
    c3 = conv_bn(c2, (3,3), n, bn_init_scale = 1)

    out = splice(a1, b2, c3, axis = 0)
    out2 = conv_bn(out, (1,1), m, bn_init_scale = 1)


    p = out2 + input
    return relu(p)
예제 #19
0
def conv_bn_relu_layer(input,
                       num_filters,
                       filter_size,
                       strides=(1, 1),
                       pad=True,
                       bnTimeConst=4096,
                       init=he_normal()):
    conv = Convolution(filter_size,
                       num_filters,
                       activation=None,
                       init=init,
                       pad=pad,
                       strides=strides,
                       bias=False)(input)
    bn = BatchNormalization(map_rank=1,
                            normalization_time_constant=bnTimeConst,
                            use_cntk_engine=False)(conv)
    return relu(bn)
예제 #20
0
def resnet_basic_inc(layer_input, filter_size, num_filters, strides, prefix):
    """
    Returns a ResNet basic bulding block with projection
    Use when there is a change in layer_input/output channels
    """
    ones = np.ones_like(strides)
    c1 = conv_bn_relu(layer_input,
                      filter_size,
                      num_filters,
                      strides,
                      name='{}_1'.format(prefix))
    c2 = conv_bn(c1,
                 filter_size,
                 num_filters,
                 ones,
                 name='{}_2'.format(prefix))
    s = conv_bn(layer_input,
                ones,
                num_filters,
                strides,
                name='{}_3'.format(prefix))
    p = plus(c2, s, name='{}_res'.format(prefix))
    return relu(p, name='{}_relu'.format(prefix))
예제 #21
0
def Res_B(input, n, m):
    a1 = conv_bn(input, (1,1), n, bn_init_scale = 1)

    b1 = conv_bn(input, (1,1), n, bn_init_scale = 1)

  

    b2 = conv_bn(b1, (1,3), n, bn_init_scale = 1)

   

    b3 = conv_bn(b2, (3,1), n, bn_init_scale = 1)

    

    c = splice(a1, b3, axis = 0)

    d = conv_bn(c, (1,1), m, bn_init_scale = 1)

  

    p = d + input
    return relu(p)
예제 #22
0
def conv_bn_relu_layer(input, num_filters, filter_size, strides=(1,1), pad=True, bnTimeConst=4096, init=he_normal()):
    conv = Convolution(filter_size, num_filters, activation=None, init=init, pad=pad, strides=strides, bias=False)(input)
    bn   = BatchNormalization(map_rank=1, normalization_time_constant=bnTimeConst, use_cntk_engine=False)(conv)
    return relu(bn)
예제 #23
0
def resnet_basic_inc(input, out_feature_map_count, strides, bn_time_const):
    c1 = conv_bn_relu_layer(input, out_feature_map_count, [3, 3], strides, bn_time_const)
    c2 = conv_bn_layer(c1, out_feature_map_count, [3, 3], [1, 1], bn_time_const)
    s  = conv_bn_layer(input, out_feature_map_count, [1, 1], strides, bn_time_const)
    p = c2 + s
    return relu(p)
예제 #24
0
def resnet_basic_inc(input, num_filters, strides=(2, 2)):
    c1 = conv_bn_relu(input, (3, 3), num_filters, strides)
    c2 = conv_bn(c1, (3, 3), num_filters)
    s = conv_bn(input, (1, 1), num_filters, strides)
    p = c2 + s
    return relu(p)
예제 #25
0
def resnet_basic(input, num_filters):
    c1 = conv_bn_relu(input, (3, 3), num_filters)
    c2 = conv_bn(c1, (3, 3), num_filters)
    p = c2 + input
    return relu(p)
예제 #26
0
파일: CifarResNet.py 프로젝트: Soukiy/CNTK
def resnet_basic(input, num_filters):
    c1 = convolution_bn(input, (3,3), num_filters)
    c2 = convolution_bn(c1, (3,3), num_filters, activation=None)
    p  = c2 + input
    return relu(p)
def resnet_basic_inc(input, num_filters, strides=(2, 2)):
    c1 = conv_bn_relu(input, (3, 3), num_filters, strides)
    c2 = conv_bn(c1, (3, 3), num_filters, bn_init_scale=1)
    s = conv_bn(input, (1, 1), num_filters, strides)  # Shortcut
    p = c2 + s
    return relu(p)
예제 #28
0
def resnet_basic_inc(input, num_filters, strides=(2,2)):
    c1 = conv_bn_relu(input, (3,3), num_filters, strides)
    c2 = conv_bn(c1, (3,3), num_filters)
    s  = conv_bn(input, (1,1), num_filters, strides)
    p  = c2 + s
    return relu(p)
예제 #29
0
def resnet_basic(input, num_filters):
    c1 = conv_bn_relu(input, (3,3), num_filters)
    c2 = conv_bn(c1, (3,3), num_filters)
    p  = c2 + input
    return relu(p)
예제 #30
0
def conv_bn_relu(input, filter_size, num_filters, strides=(1,1), init=he_normal()):
    r = conv_bn(input, filter_size, num_filters, strides, init) 
    return relu(r)
예제 #31
0
def resnet_bottleneck(input, out_num_filters, inter_out_num_filters):
    c1 = conv_bn_relu(input, (1, 1), inter_out_num_filters)
    c2 = conv_bn_relu(c1, (3, 3), inter_out_num_filters)
    c3 = conv_bn(c2, (1, 1), out_num_filters, bn_init_scale=0)
    p = c3 + input
    return relu(p)
예제 #32
0
def resnet_bottleneck(input, out_num_filters, inter_out_num_filters):
    c1 = conv_bn_relu(input, (1, 1), inter_out_num_filters)
    c2 = conv_bn_relu(c1, (3, 3), inter_out_num_filters)
    c3 = conv_bn(c2, (1, 1), out_num_filters, bn_init_scale=0)
    p = c3 + input
    return relu(p)
예제 #33
0
def resnet_basic(input, num_filters):
    c1 = convolution_bn(input, (3, 3), num_filters)
    c2 = convolution_bn(c1, (3, 3), num_filters, activation=None)
    p = c2 + input
    return relu(p)
예제 #34
0
def resnet_basic_inc(input, num_filters, strides=(2, 2)):
    c1 = conv_bn_relu(input, (3, 3), num_filters, strides)
    c2 = conv_bn(c1, (3, 3), num_filters, bn_init_scale=1)
    s = conv_bn(input, (1, 1), num_filters, strides) # Shortcut
    p = c2 + s
    return relu(p)