def testMultiClassificationWithWeights(self): labels = [0, 1, 2, 3] predictions = [ [0.7, 0.2, 0.1, 0.0], # Predicted label = 0 [0.2, 0.4, 0.2, 0.2], # Predicted label = 1 [0.0, 0.0, 0.0, 1.0], # Predicted label = 4 [0.1, 0.1, 0.7, 0.1], # Predicted label = 3 ] weights = [0, 1, 0, 1] batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.local_variables_initializer() with self.test_session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose({ "num_examples": 2, "accuracy/num_correct": 1, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose({ "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, }, sess.run(value_ops))
def testBinaryClassificationWithWeights(self): labels = [0, 1, 1, 0] predictions = [ [0.4], # Predicted label = 0 [0.6], # Predicted label = 1 [0.0], # Predicted label = 0 [1.0], # Predicted label = 1 ] weights = [0, 1, 0, 1] batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses, output_dim=1) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.local_variables_initializer() with self.session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose( { "num_examples": 2, "accuracy/num_correct": 1, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, "auc": 0, "confusion_matrix/label_0_pred_0": 0, "confusion_matrix/label_0_pred_1": 1, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 1, }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose( { "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, "auc": 0, "confusion_matrix/label_0_pred_0": 0, "confusion_matrix/label_0_pred_1": 2, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 2, }, sess.run(value_ops))
def __call__(self, features, labels, mode, params): """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec.""" hparams = copy.deepcopy(self._base_hparams) if "batch_size" in params: hparams.batch_size = params["batch_size"] # Allow labels to be passed in the features dictionary. if "labels" in features: if labels is not None and labels is not features["labels"]: raise ValueError( "Conflicting labels: features['labels'] = {}, labels = {}". format(features["labels"], labels)) labels = features.pop("labels") model = self._model_class(features, labels, hparams, mode) model.build() # Possibly create train_op. use_tpu = self._use_tpu train_op = None if mode == tf.estimator.ModeKeys.TRAIN: learning_rate = training.create_learning_rate( hparams, model.global_step) optimizer = training.create_optimizer(hparams, learning_rate, use_tpu) train_op = training.create_train_op(model, optimizer) # Possibly create evaluation metrics. eval_metrics = None if mode == tf.estimator.ModeKeys.EVAL: eval_metrics = (metrics.create_metric_fn(model) if use_tpu else metrics.create_metrics(model)) if use_tpu: estimator = tf.contrib.tpu.TPUEstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metrics=eval_metrics) else: estimator = tf.estimator.EstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metric_ops=eval_metrics) return estimator
def __call__(self, features, labels, mode, params): """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec.""" hparams = copy.deepcopy(self._base_hparams) if "batch_size" in params: hparams.batch_size = params["batch_size"] # Allow labels to be passed in the features dictionary. if "labels" in features: if labels is not None and labels is not features["labels"]: raise ValueError( "Conflicting labels: features['labels'] = {}, labels = {}".format( features["labels"], labels)) labels = features.pop("labels") model = self._model_class(features, labels, hparams, mode) model.build() # Possibly create train_op. use_tpu = self._use_tpu train_op = None if mode == tf.estimator.ModeKeys.TRAIN: learning_rate = training.create_learning_rate(hparams, model.global_step) optimizer = training.create_optimizer(hparams, learning_rate, use_tpu) train_op = training.create_train_op(model, optimizer) # Possibly create evaluation metrics. eval_metrics = None if mode == tf.estimator.ModeKeys.EVAL: eval_metrics = ( metrics.create_metric_fn(model) if use_tpu else metrics.create_metrics(model)) if use_tpu: estimator = tf.contrib.tpu.TPUEstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metrics=eval_metrics) else: estimator = tf.estimator.EstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metric_ops=eval_metrics) return estimator
def model_fn(features, labels, mode, params): """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec.""" # For TPUEstimator, params contains the batch size per TPU core. if "batch_size" in params: hparams.batch_size = params["batch_size"] # Allow labels to be passed in the features dictionary. if "labels" in features: if labels is not None and labels is not features["labels"]: raise ValueError( "Conflicting labels: features['labels'] = %s, labels = %s" % (features["labels"], labels)) labels = features.pop("labels") model = model_class(features, labels, hparams, mode) model.build() # Possibly create train_op. train_op = None if mode == tf.estimator.ModeKeys.TRAIN: learning_rate = training.create_learning_rate( hparams, model.global_step) optimizer = training.create_optimizer(hparams, learning_rate, use_tpu) train_op = training.create_train_op(model, optimizer) # Possibly create evaluation metrics. eval_metrics = None if mode == tf.estimator.ModeKeys.EVAL: eval_metrics = (metrics.create_metric_fn(model) if use_tpu else metrics.create_metrics(model)) if use_tpu: estimator = tf.compat.v1.estimator.tpu.TPUEstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metrics=eval_metrics) else: estimator = tf.estimator.EstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metric_ops=eval_metrics) return estimator
def testBinaryClassificationWithoutWeights(self): labels = [0, 1, 1, 0] predictions = [ [0.4], # Predicted label = 0 [0.6], # Predicted label = 1 [0.0], # Predicted label = 0 [1.0], # Predicted label = 1 ] weights = None batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.compat.v1.local_variables_initializer() with self.test_session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose( { "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1.5, "auc": 0.25, "confusion_matrix/true_positives": 1, "confusion_matrix/true_negatives": 1, "confusion_matrix/false_positives": 1, "confusion_matrix/false_negatives": 1, }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose( { "num_examples": 8, "accuracy/num_correct": 4, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1.5, "auc": 0.25, "confusion_matrix/true_positives": 2, "confusion_matrix/true_negatives": 2, "confusion_matrix/false_positives": 2, "confusion_matrix/false_negatives": 2, }, sess.run(value_ops))
def testBinaryClassificationWithWeights(self): labels = [0, 1, 1, 0] predictions = [ [0.4], # Predicted label = 0 [0.6], # Predicted label = 1 [0.0], # Predicted label = 0 [1.0], # Predicted label = 1 ] weights = [0, 1, 0, 1] batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses, output_dim=1) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.local_variables_initializer() with self.test_session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose({ "num_examples": 2, "accuracy/num_correct": 1, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, "auc": 0, "confusion_matrix/label_0_pred_0": 0, "confusion_matrix/label_0_pred_1": 1, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 1, }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose({ "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, "auc": 0, "confusion_matrix/label_0_pred_0": 0, "confusion_matrix/label_0_pred_1": 2, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 2, }, sess.run(value_ops))
def model_fn(features, labels, mode, params): """Builds the model and returns an EstimatorSpec or TPUEstimatorSpec.""" # For TPUEstimator, params contains the batch size per TPU core. if "batch_size" in params: hparams.batch_size = params["batch_size"] model = model_class(features, labels, hparams, mode) model.build() # Possibly create train_op. train_op = None if mode == tf.estimator.ModeKeys.TRAIN: learning_rate = training.create_learning_rate(hparams, model.global_step) optimizer = training.create_optimizer(hparams, learning_rate, use_tpu) train_op = training.create_train_op(model, optimizer) # Possibly create evaluation metrics. eval_metrics = None if mode == tf.estimator.ModeKeys.EVAL: eval_metrics = ( metrics.create_metric_fn(model) if use_tpu else metrics.create_metrics(model)) if use_tpu: estimator = tf.contrib.tpu.TPUEstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metrics=eval_metrics) else: estimator = tf.estimator.EstimatorSpec( mode=mode, predictions=model.predictions, loss=model.total_loss, train_op=train_op, eval_metric_ops=eval_metrics) return estimator
def testMultiClassificationWithWeights(self): labels = [0, 1, 2, 3] predictions = [ [0.7, 0.2, 0.1, 0.0], # Predicted label = 0 [0.2, 0.4, 0.2, 0.2], # Predicted label = 1 [0.0, 0.0, 0.0, 1.0], # Predicted label = 4 [0.1, 0.1, 0.7, 0.1], # Predicted label = 3 ] weights = [0, 1, 0, 1] batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.local_variables_initializer() with self.test_session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose( { "num_examples": 2, "accuracy/num_correct": 1, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose( { "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1, }, sess.run(value_ops))
def testMultiClassificationWithoutWeights(self): labels = [0, 1, 2, 3] predictions = [ [0.7, 0.2, 0.1, 0.0], # Predicted label = 0 [0.2, 0.4, 0.2, 0.2], # Predicted label = 1 [0.0, 0.0, 0.0, 1.0], # Predicted label = 3 [0.1, 0.1, 0.7, 0.1], # Predicted label = 2 ] weights = None batch_losses = [0, 0, 4, 2] model = _MockModel(labels, predictions, weights, batch_losses, output_dim=4) metric_map = metrics.create_metrics(model) value_ops, update_ops = _unpack_metric_map(metric_map) initializer = tf.local_variables_initializer() with self.test_session() as sess: sess.run(initializer) sess.run(update_ops) self.assertAllClose({ "num_examples": 4, "accuracy/num_correct": 2, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1.5, "confusion_matrix/label_0_pred_0": 1, "confusion_matrix/label_0_pred_1": 0, "confusion_matrix/label_0_pred_2": 0, "confusion_matrix/label_0_pred_3": 0, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 1, "confusion_matrix/label_1_pred_2": 0, "confusion_matrix/label_1_pred_3": 0, "confusion_matrix/label_2_pred_0": 0, "confusion_matrix/label_2_pred_1": 0, "confusion_matrix/label_2_pred_2": 0, "confusion_matrix/label_2_pred_3": 1, "confusion_matrix/label_3_pred_0": 0, "confusion_matrix/label_3_pred_1": 0, "confusion_matrix/label_3_pred_2": 1, "confusion_matrix/label_3_pred_3": 0 }, sess.run(value_ops)) sess.run(update_ops) self.assertAllClose({ "num_examples": 8, "accuracy/num_correct": 4, "accuracy/accuracy": 0.5, "losses/weighted_cross_entropy": 1.5, "confusion_matrix/label_0_pred_0": 2, "confusion_matrix/label_0_pred_1": 0, "confusion_matrix/label_0_pred_2": 0, "confusion_matrix/label_0_pred_3": 0, "confusion_matrix/label_1_pred_0": 0, "confusion_matrix/label_1_pred_1": 2, "confusion_matrix/label_1_pred_2": 0, "confusion_matrix/label_1_pred_3": 0, "confusion_matrix/label_2_pred_0": 0, "confusion_matrix/label_2_pred_1": 0, "confusion_matrix/label_2_pred_2": 0, "confusion_matrix/label_2_pred_3": 2, "confusion_matrix/label_3_pred_0": 0, "confusion_matrix/label_3_pred_1": 0, "confusion_matrix/label_3_pred_2": 2, "confusion_matrix/label_3_pred_3": 0 }, sess.run(value_ops))