def fcn_top_1(feat, classifier1, fc_name, classifier2, fc_name2, classes, bootstrapping=False): crop_size = 224 top = feat for j, layer in enumerate(classifier1[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top = conv_stage_v1(top, 'conv6{}'.format(chr(j+97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer = classifier1[-1] scores = conv(top, fc_name, layer.channels, kernel=layer.kernel, dilate=layer.dilate) print 'Scores' print scores.infer_shape(data=(64, 3, crop_size, crop_size))[1] top2 = feat for j, layer in enumerate(classifier2[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top2 = conv_stage_v1(top2, 'conv6{}'.format(chr(j+97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer2 = classifier2[-1] scores2 = conv(top2, fc_name2, layer2.channels, kernel=layer2.kernel, dilate=layer2.dilate) print 'Scores' #print scores.infer_shape(data=(64, 3, crop_size, crop_size))[1] classifier21 = rn_top_1(scores2, 'linear{}'.format(classes), classes) if not bootstrapping: return softmax_out(scores, multi_output=True), softmax_out(scores) else: from layer import OhemSoftmax, OhemSoftmaxProp return mx.sym.Custom(data=scores, name='softmax', op_type='ohem_softmax', ignore_label=255, # ignore_label=65, thresh=0.6, min_kept=256, margin=-1), softmax_out(scores)
def fcn_top(feat, classifier, fc_name, bootstrapping=False): crop_size = 224 top = feat for j, layer in enumerate(classifier[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top = conv_stage_v1(top, 'conv6{}'.format(chr(j+97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer = classifier[-1] scores = conv(top, fc_name, layer.channels, kernel=layer.kernel, dilate=layer.dilate) print 'Scores' print scores.infer_shape(data=(64, 3, crop_size, crop_size))[1] if not bootstrapping: return softmax_out(scores, multi_output=True) else: from layer import OhemSoftmax, OhemSoftmaxProp return mx.sym.Custom(data=scores, name='softmax', op_type='ohem_softmax', ignore_label=255, thresh=0.6, min_kept=256, margin=-1)
def fcn_top(feat, classifier, fc_name): top = feat for j, layer in enumerate(classifier[:-1]): top = conv_state_v1(top, 'conv6{}'.format(chr(j + 97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer = classifier[-1] scores = conv(top, fc_name, layer.channels, kernel=layer.kernel, dilate=layer.dilate) return softmax_out(scores, multi_output=True)
def fcn_top(feat, classifier, fc_name): top = feat for j, layer in enumerate(classifier[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top = conv_state_v1(top, 'conv6{}'.format(chr(j + 97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer = classifier[-1] scores = conv(top, fc_name, layer.channels, kernel=layer.kernel, dilate=layer.dilate) return softmax_out(scores, multi_output=True)
def rn_top(feat, fc_name, classes): pool7 = pool(feat, 'pool7', pool_type='avg', global_pool=True) scores = fc(pool7, fc_name, classes) return softmax_out(scores)
def fcn_top_1(feat, classifier1, fc_name, classifier2, fc_name2, classes, bootstrapping=False): crop_size = 224 top = feat for j, layer in enumerate(classifier1[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top = conv_stage_v1(top, 'conv6{}'.format(chr(j + 97)), layer.channels, kernel=layer.kernel, dilate=layer.dilate, dropout_rate=0.) layer = classifier1[-1] scores = conv(top, fc_name, layer.channels, kernel=layer.kernel, dilate=layer.dilate) print 'Scores' print scores.infer_shape(data=(64, 3, crop_size, crop_size))[1] top2 = feat for j, layer2 in enumerate(classifier2[:-1]): # This naming (conv6) is derived from the ResNets (with five levels), # which is not accurate for our networks (with seven levels). top2 = conv_stage_v1(top2, 'conv61{}'.format(chr(j + 97)), layer2.channels, kernel=layer2.kernel, dilate=layer2.dilate, dropout_rate=0.) layer2 = classifier2[-1] scores2 = conv(top2, fc_name2, layer2.channels, kernel=layer2.kernel, dilate=layer2.dilate) print 'Scores' print scores2.infer_shape(data=(64, 3, crop_size, crop_size))[1] #print ('I am here') classifier21 = rn_top_1(scores2, 'bCls0', classes) print classifier21.infer_shape(data=(64, 3, crop_size, crop_size))[1] #mx.symbol.Group([sm1, sm2]) #print 'I am here' print bootstrapping, classes, classifier21 if not bootstrapping: #print 'I am here' return mx.symbol.Group( [softmax_out(scores, multi_output=True), classifier21]) else: print 'I am here' from layer import OhemSoftmax, OhemSoftmaxProp return mx.symbol.Group([ mx.sym.Custom( data=scores, name='softmax', op_type='ohem_softmax', ignore_label=255, # ignore_label=65, thresh=0.6, min_kept=256, margin=-1), classifier21 ]) #def fcrna_model_a1_1(classes, inv_resolution=8, bootstrapping=False): '''FCRNA Model A1_1''' '''feat = rna_feat_a1(inv_resolution, dropout=True)