Ejemplo n.º 1
0
def conv_stage(data,
               names,
               filters,
               kernel=3,
               stride=1,
               dilate=1,
               pad=-1,
               groups=1,
               no_bias=True,
               dropout_rate=0.):
    i = 0
    bn1 = bn(data, names[i])
    i += 1
    relu1 = relu(bn1, names[i])
    i += 1
    if dropout_rate > 0.:
        dropout1 = dropout(relu1, names[i], p=dropout_rate)
    i += 1
    top = conv(dropout1 if dropout_rate > 0. else relu1,
               names[i],
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    return relu1, top
Ejemplo n.º 2
0
def conv_stage(data,
               name,
               filters,
               kernel=3,
               stride=1,
               dilate=1,
               pad=-1,
               groups=1,
               no_bias=False,
               has_bn=False,
               dropout_rate=0.,
               has_relu=True):
    top = conv(data,
               name,
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    if has_bn:
        top = bn(top, name='{}_bn'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, '{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, '{}_relu'.format(name))
    return top
Ejemplo n.º 3
0
def res_conv_stage(data,
                   name,
                   filters,
                   kernel=3,
                   stride=1,
                   dilate=1,
                   pad=-1,
                   groups=1,
                   no_bias=True,
                   has_relu=True,
                   dropout_rate=0.):
    top = conv(data,
               'res{}'.format(name),
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    top = bn(data=top, name='bn{}'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, 'res{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, 'res{}_relu'.format(name))
    return top
Ejemplo n.º 4
0
def res_conv_stage(data, name, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=True, has_relu=True, dropout_rate=0.):
    top = conv(data, 'res{}'.format(name),
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    top = bn(data=top, name='bn{}'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, 'res{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, 'res{}_relu'.format(name))
    return top
Ejemplo n.º 5
0
def conv_stage(data, name, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=False, has_bn=False, dropout_rate=0., has_relu=True):
    top = conv(data, name,
               filters,
               kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    if has_bn:
        top = bn(top, name='{}_bn'.format(name))
    if dropout_rate > 0.0:
        top = dropout(top, '{}_do'.format(name), p=dropout_rate)
    if has_relu:
        top = relu(top, '{}_relu'.format(name))
    return top
Ejemplo n.º 6
0
def conv_stage(data, names, filters, kernel=3, stride=1, dilate=1, pad=-1,
               groups=1, no_bias=True, dropout_rate=0.):
    i = 0
    bn1 = bn(data, names[i])
    i += 1
    relu1 = relu(bn1, names[i])
    i += 1
    if dropout_rate > 0.:
        dropout1 = dropout(relu1, names[i], p=dropout_rate)
    i += 1
    top = conv(dropout1 if dropout_rate > 0. else relu1, names[i],
               filters, kernel=kernel,
               stride=stride,
               dilate=dilate,
               pad=pad,
               groups=groups,
               no_bias=no_bias)
    return relu1, top