def get_test_symbol(self, num_classes, cfg): """ get symbol for training :param num_classes: num of classes :return: the symbol for training """ # shared convolutional symbols data_shape = (cfg.TEST.BATCH_IMAGES, 3, cfg.SCALES[0][0], cfg.SCALES[0][1]) data = mx.sym.Variable('data', shape=data_shape) seg_cls_gt = mx.symbol.Variable(name='label') # shared convolutional symbols P_feature = resnet38_v5_dcn.get_conv_feature( data, is_train=False, workspace=self.workspace, fix_gamma=self.fix_gamma, use_global_stats=self.use_global_stats, eps=self.eps) rnn_feature_list = [] scale_name = ['a', 'b', 'c'] if cfg.network.scale_list == [1, 2, 4]: scale_name = ['', '', ''] if cfg.network.use_weight: self.num_hidden = 512 for idx, i in enumerate(cfg.network.scale_list): if cfg.network.use_weight: num_hidden = self.num_hidden / i else: num_hidden = self.num_hidden print "num_hidden", num_hidden rnn_feature = RnnMap_cudnn_v2(P_feature[3], name="RNNRelation_" + str(i) + scale_name[idx], type=self.rnn_type, PatchSize=self.patch_size, num_hidden=num_hidden, use_memory=True, old_type=False, skip_step=i, parallel=True) rnn_feature_list.append(rnn_feature) rnn_feature = mx.symbol.concat(*rnn_feature_list) fcn_fusion = self.get_fcn_top(rnn_feature, seg_cls_gt, 'FUSION', num_classes=num_classes, cfg=cfg, is_train=False) return mx.symbol.Group([fcn_fusion, rnn_feature])
def get_train_symbol(self, num_classes, cfg): """ get symbol for training :param num_classes: num of classes :return: the symbol for training """ if cfg.TRAIN.enable_crop: data_shape = (cfg.TRAIN.BATCH_IMAGES, 3, cfg.TRAIN.crop_size[0], cfg.TRAIN.crop_size[1]) else: data_shape = (cfg.TRAIN.BATCH_IMAGES, 3, cfg.SCALES[0][0], cfg.SCALES[0][1]) data = mx.sym.Variable('data', shape=data_shape) seg_cls_gt = mx.symbol.Variable(name='label') num_instance = data_shape[0] * data_shape[2] * data_shape[3] # shared convolutional symbols array = cfg.network.use_mult_label_weight P_feature = resnet38_v5_dcn.get_conv_feature(data, is_train=True, workspace=self.workspace, fix_gamma=self.fix_gamma, use_global_stats=self.use_global_stats, eps=self.eps) rnn_feature_list = [] metric_top_list = [] scale_name = ['a','b','c'] if cfg.network.scale_list == [1,2,4]: scale_name=['','',''] metric_grad_scale = float(array[1]) / num_instance / 9 print "metric grad scale", metric_grad_scale if cfg.network.use_weight: self.num_hidden= 512 for idx, i in enumerate(cfg.network.scale_list): if cfg.network.use_weight: num_hidden = self.num_hidden/i else: num_hidden = self.num_hidden print "num_hidden", num_hidden rnn_feature = RnnMap_cudnn_v2(P_feature[3], name="RNNRelation_"+str(i)+scale_name[idx], type=self.rnn_type, PatchSize=self.patch_size, num_hidden=num_hidden, use_memory=True,old_type=False,skip_step=i) metric_gt = mx.symbol.Variable(name='metric_label_'+str(i)+scale_name[idx]) metric_top_list.append(self.get_metric_top(conv_feature=rnn_feature, metric_label=metric_gt, grad_scale=metric_grad_scale,skip_step=i,scale_name=scale_name[idx])) rnn_feature_list.append(rnn_feature) rnn_feature = mx.symbol.concat(*rnn_feature_list) if cfg.network.use_origin: rnn_feature = mx.symbol.concat(rnn_feature,P_feature[3]) fcn_fusion = self.get_fcn_top(rnn_feature, seg_cls_gt, 'FUSION', num_classes=num_classes, cfg=cfg,is_train=True,grad_scale=array[0]) loss = [fcn_fusion, metric_top_list[0],metric_top_list[1],metric_top_list[2]] sym = mx.symbol.Group(loss) return sym
def get_train_symbol(self, num_classes, cfg): """ get symbol for training :param num_classes: num of classes :return: the symbol for training """ if cfg.TRAIN.enable_crop: data_shape = (cfg.TRAIN.BATCH_IMAGES, 3, cfg.TRAIN.crop_size[0], cfg.TRAIN.crop_size[1]) else: data_shape = (cfg.TRAIN.BATCH_IMAGES, 3, cfg.SCALES[0][0], cfg.SCALES[0][1]) data = mx.sym.Variable('data', shape=data_shape) seg_cls_gt = mx.symbol.Variable(name='label') metric_gt = mx.symbol.Variable(name='metric_label') array = cfg.network.use_mult_label_weight num_instance = data_shape[0] * data_shape[2] * data_shape[3] metric_grad_scale = float(array[1]) / num_instance / 9 # shared convolutional symbols P_feature = resnet38_v5_dcn.get_conv_feature( data, is_train=True, workspace=self.workspace, fix_gamma=self.fix_gamma, use_global_stats=self.use_global_stats, eps=self.eps) rnn_feature = RnnMap_cudnn(P_feature[3], name="RNNRelation", type=self.rnn_type, PatchSize=self.patch_size, num_hidden=self.num_hidden, use_memory=True) array = cfg.network.use_mult_label_weight fcn_fusion, metric_feature = self.get_fcn_top(rnn_feature, seg_cls_gt, 'FUSION', num_classes=num_classes, cfg=cfg, is_train=True, grad_scale=array[0]) metric_top = self.get_metric_top(conv_feature=metric_feature, metric_label=metric_gt, grad_scale=metric_grad_scale) sym = mx.symbol.Group([fcn_fusion, metric_top]) return sym
def get_test_symbol(self, num_classes, cfg): """ get symbol for training :param num_classes: num of classes :return: the symbol for training """ # shared convolutional symbols data_shape = (cfg.TEST.BATCH_IMAGES, 3, cfg.SCALES[0][0], cfg.SCALES[0][1]) data = mx.sym.Variable('data', shape=data_shape) seg_cls_gt = mx.symbol.Variable(name='label') # shared convolutional symbols P_feature = resnet38_v5_dcn.get_conv_feature( data, is_train=True, workspace=self.workspace, fix_gamma=self.fix_gamma, use_global_stats=self.use_global_stats, eps=self.eps) rnn_feature = RnnMap_cudnn(P_feature[3], name="RNNRelation", type=self.rnn_type, PatchSize=self.patch_size, num_hidden=self.num_hidden, use_memory=True) fcn_fusion, _ = self.get_fcn_top(rnn_feature, seg_cls_gt, 'FUSION', num_classes=num_classes, cfg=cfg, is_train=False) return mx.sym.Group([fcn_fusion, P_feature[3], rnn_feature])