Пример #1
0
def get_symbol():
    data = mx.sym.var(name='data')
    resnet101 = get_symbol_data(data, num_classes=1000, num_layers=101, image_shape=(3,500,500))
    internals = resnet101.get_internals()
    resnet101_22b = internals['_plus29_output']
    resnet101_3c = internals['_plus6_output']

    score_res4 = mx.sym.Convolution(data=resnet101_22b, num_filter=150, kernel=(1,1), stride=(1,1), pad=(0,0), name='score_res4')
    bilinear_weight = mx.sym.Variable(name='bilinear_weight', init=mx.init.Bilinear(), attr={'lr_mult':'0.0'})
    score4_up = mx.sym.UpSampling(*[score_res4, bilinear_weight], num_filter=150, scale=2, sample_type='bilinear',name='score4_up', num_args=2)
    score4 = mx.sym.Crop(*[score4_up, resnet101_3c], name='score4')

    score_res3 = mx.sym.Convolution(data=resnet101_3c, num_filter=150, kernel=(1,1), stride=(1,1), name='score_res3')
    score_fused = score4 + score_res3

    cls_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=0, end=50)
    cls_map = mx.sym.reshape(data=cls_map, name='reshape_cls_map', shape=(0,2,-1,0))

    cls_map = mx.sym.softmax(data=cls_map, name='cls_map_prob', axis=1)

    reg_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=50, end=150)

    net = mx.sym.Group([cls_map, reg_map])

    return net
Пример #2
0
def get_symbol_focal_loss():
    data = mx.sym.var(name='data')
    labels = mx.sym.var(name='labels')
    bbox_targets = mx.sym.var(name='bbox_targets')
    bbox_inside_weights = mx.sym.var(name='bbox_inside_weights')

    resnet101 = get_symbol_data(data, num_classes=1000, num_layers=101, image_shape=(3,500,500))
    internals = resnet101.get_internals()
    resnet101_22b = internals['_plus29_output']
    resnet101_3c = internals['_plus6_output']

    score_res4 = mx.sym.Convolution(data=resnet101_22b, num_filter=150, kernel=(1,1), stride=(1,1), pad=(0,0), name='score_res4')
    bilinear_weight = mx.sym.Variable(name='bilinear_weight', init=mx.init.Bilinear(), attr={'lr_mult':'0.0'})
    score4_up = mx.sym.UpSampling(*[score_res4, bilinear_weight], num_filter=150, scale=2, sample_type='bilinear',name='score4_up', num_args=2)
    score4 = mx.sym.Crop(*[score4_up, resnet101_3c], name='score4')

    score_res3 = mx.sym.Convolution(data=resnet101_3c, num_filter=150, kernel=(1,1), stride=(1,1), name='score_res3')
    score_fused = score4 + score_res3

    cls_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=0, end=50)
    cls_map = mx.sym.reshape(data=cls_map, name='reshape_cls_map', shape=(0,2,-1,0))

    cls_map = mx.sym.softmax(data=cls_map, name='cls_map_prob', axis=1)
    labels = mx.sym.reshape(data=labels, name='reshape_label', shape=(-1,25*63,63))

    cls_prob = mx.sym.pick(data=cls_map, index=labels, axis=1)
    cls_prob = mx.sym.reshape(data=cls_prob, shape=(-1,))
    focal_loss_ =  100 * mx.sym.mean(- 0.25 * mx.sym.pow(1 - cls_prob, 2) * mx.sym.log(mx.sym.maximum(cls_prob, 1e-10)))
    #focal_loss_ =  1000 * mx.sym.mean(- 0.25 * mx.sym.pow(1 - cls_prob, 10) * mx.sym.log(mx.sym.maximum(cls_prob, 1e-10)))
    focal_loss = mx.sym.MakeLoss(data=focal_loss_, grad_scale=1.0, name='focal_loss')

    reg_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=50, end=150)
    loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \
                data=bbox_inside_weights * (reg_map - bbox_targets), scalar=1.0)
    loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \
                normalization='valid', name="loc_loss")

    cls_label = mx.sym.MakeLoss(data=labels, grad_scale=0, name="cls_label")
    cls_pred = mx.sym.MakeLoss(data=cls_map, grad_scale=0, name='cls_pred')

    net = mx.sym.Group([focal_loss, loc_loss])

    return net
Пример #3
0
def get_symbol_softmax_loss(is_train=True):
    data = mx.sym.var(name='data')
    labels = mx.sym.var(name='labels')
    bbox_targets = mx.sym.var(name='bbox_targets')
    bbox_inside_weights = mx.sym.var(name='bbox_inside_weights')

    resnet101 = get_symbol_data(data, num_classes=1000, num_layers=101, image_shape=(3,500,500))
    internals = resnet101.get_internals()
    resnet101_22b = internals['_plus29_output']
    resnet101_3c = internals['_plus6_output']

    score_res4 = mx.sym.Convolution(data=resnet101_22b, num_filter=150, kernel=(1,1), stride=(1,1), pad=(0,0), name='score_res4')
    bilinear_weight = mx.sym.Variable(name='bilinear_weight', init=mx.init.Bilinear(), attr={'lr_mult':'0.0'})
    score4_up = mx.sym.UpSampling(*[score_res4, bilinear_weight], num_filter=150, scale=2, sample_type='bilinear', name='score4_up', num_args=2)
    score4 = mx.sym.Crop(*[score4_up, resnet101_3c], name='score4')

    score_res3 = mx.sym.Convolution(data=resnet101_3c, num_filter=150, kernel=(1,1), stride=(1,1), name='score_res3')
    score_fused = score4 + score_res3

    cls_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=0, end=50)
    labels = mx.sym.reshape(data=labels, name='reshape_label', shape=(0,1,25*63,63))
    cls_map = mx.sym.reshape(data=cls_map, name='reshape_cls_map', shape=(0,2,-1,0))
    if not is_train:
        cls_map = mx.sym.softmax(data=cls_map, axis=1, name='cls_map_prob')
    reg_map = mx.sym.slice_axis(data=score_fused, axis=1, begin=50, end=150)

    cls_prob = mx.symbol.SoftmaxOutput(data=cls_map, label=labels, \
                ignore_label=-1, use_ignore=True, grad_scale=1, multi_output=True, \
                normalization='valid', name="cls_prob")

    loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \
                data=bbox_inside_weights * (reg_map - bbox_targets), scalar=3.0)
    loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \
                normalization='valid', name="loc_loss")

    cls_label = mx.sym.MakeLoss(data=labels, grad_scale=0, name="cls_label")
    cls_pred = mx.sym.MakeLoss(data=cls_map, grad_scale=0, name='cls_pred')

    out = mx.symbol.Group([cls_prob, loc_loss, cls_label, cls_pred])
    #out = mx.symbol.Group([score4_up, labels, bbox_targets, bbox_inside_weights])
    return out