def model_fn(features, labels, mode, params, config): """ Model function for estimator :param features: :param labels: :param mode: :param params: :param config: :return: """ image = features['image'] # Init network. ssdnet = ssd_resnet_50.init(params['class_num'], params['weight_decay'], params['is_training']) # Compute output. logits, locations, endpoints = ssdnet(image) if mode == tf.estimator.ModeKeys.TRAIN: # Compute SSD loss and put it to global loss. ssd_resnet_50.ssdLoss(logits, locations, labels, params['alpha']) total_loss = tf.losses.get_total_loss() # Create train op optimazer = tf.train.GradientDescentOptimizer( learning_rate=params['learning_rate']) train_op = optimazer.minimize( total_loss, global_step=tf.train.get_or_create_global_step()) return tf.estimator.EstimatorSpec(mode, loss=total_loss, train_op=train_op) if mode == tf.estimator.ModeKeys.EVAL: plogits = tf.unstack(logits, axis=0) probs = tf.nn.softmax(plogits, axis=1) pbboxes = tf.unstack(locations, axis=0) # Remove all background bboxes pbboxes, probs = evalUtil.rmBackgroundBox(pbboxes, probs) #TODO Apply non maximum suppression. pbboxes_list = multiclass_non_max_suppression( pbboxes, probs, ) eval_metrics = {} eval_metrics.update( evalUtil.get_evaluate_ops(probs, pbboxes_list, labels, categories=labels['category'])) return eval_metrics if mode == tf.estimator.ModeKeys.PREDICT: return logits, locations
def test_predict(): # Test passed. image = tf.zeros([1, 300, 300, 3], dtype=tf.float32) class_num = 50 weight_decay = 0.9 ssd = ssd_resnet_50.init(class_num, weight_decay, False) logits, locations, end_feats = ssd(image) init = tf.global_variables_initializer() with tf.Session() as ss: ss.run(init) out = ss.run(locations) print(out.shape)
def model_fn(features, labels, mode, params, config): """ Model function for estimator :param features: :param labels: :param mode: :param params: :param config: :return: """ image = features['image'] # Init network. ssdnet = ssd_resnet_50.init(params['class_num'], params['weight_decay'], params['is_training']) # Compute output. logits, locations, endpoints = ssdnet(image) # Compute SSD loss and put it to global loss. ssd_resnet_50.ssdLoss(logits, locations, labels, params['alpha']) total_loss = tf.losses.get_total_loss() if mode == tf.estimator.ModeKeys.TRAIN: # Create train op optimazer = tf.train.GradientDescentOptimizer( learning_rate=params['learning_rate']) train_op = optimazer.minimize( total_loss, global_step=tf.train.get_or_create_global_step()) return tf.estimator.EstimatorSpec(mode, loss=total_loss, train_op=train_op) if mode == tf.estimator.ModeKeys.EVAL: pass # TODO if mode == tf.estimator.ModeKeys.PREDICT: prob_pred = tf.nn.softmax(logits, axis=4) predictions = {'prob': prob_pred, 'location': locations} return tf.estimator.EstimatorSpec(mode, predictions=predictions)