def test_build_standardized_signature_def_classify_classes_only(self): """Tests classification with one output tensor.""" input_tensors = { "input-1": array_ops.placeholder(dtypes.string, 1, name="input-tensor-1") } classes = array_ops.placeholder(dtypes.string, 1, name="output-tensor-1") export_output = export_output_lib.ClassificationOutput(classes=classes) actual_signature_def = export_output.as_signature_def(input_tensors) expected_signature_def = meta_graph_pb2.SignatureDef() shape = tensor_shape_pb2.TensorShapeProto( dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]) dtype_string = types_pb2.DataType.Value("DT_STRING") expected_signature_def.inputs[ signature_constants.CLASSIFY_INPUTS].CopyFrom( meta_graph_pb2.TensorInfo(name="input-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.outputs[ signature_constants.CLASSIFY_OUTPUT_CLASSES].CopyFrom( meta_graph_pb2.TensorInfo(name="output-tensor-1:0", dtype=dtype_string, tensor_shape=shape)) expected_signature_def.method_name = ( signature_constants.CLASSIFY_METHOD_NAME) self.assertEqual(actual_signature_def, expected_signature_def)
def test_build_all_signature_defs_explicit_default(self): receiver_tensor = constant_op.constant(["11"]) output_1 = constant_op.constant([1.]) output_2 = constant_op.constant(["2"]) output_3 = constant_op.constant(["3"]) export_outputs = { signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: export_output.RegressionOutput(value=output_1), "head-2": export_output.ClassificationOutput(classes=output_2), "head-3": export_output.PredictOutput(outputs={ "some_output_3": output_3 }), } signature_defs = export.build_all_signature_defs( receiver_tensor, export_outputs) expected_signature_defs = { "serving_default": signature_def_utils.regression_signature_def(receiver_tensor, output_1), "head-2": signature_def_utils.classification_signature_def(receiver_tensor, output_2, None), "head-3": signature_def_utils.predict_signature_def({ "input": receiver_tensor }, {"output": output_3}) } self.assertDictEqual(expected_signature_defs, signature_defs)
def test_classify_scores_must_be_float(self): scores = array_ops.placeholder(dtypes.string, 1, name="output-tensor-1") with self.assertRaises(ValueError) as e: export_output_lib.ClassificationOutput(scores=scores) self.assertEqual( 'Classification scores must be a float32 Tensor; got ' 'Tensor("output-tensor-1:0", shape=(1,), dtype=string)', str(e.exception))
def test_classify_classes_must_be_strings(self): classes = array_ops.placeholder(dtypes.float32, 1, name="output-tensor-1") with self.assertRaises(ValueError) as e: export_output_lib.ClassificationOutput(classes=classes) self.assertEqual( 'Classification classes must be a string Tensor; got ' 'Tensor("output-tensor-1:0", shape=(1,), dtype=float32)', str(e.exception))
def testExportOutputsNoDict(self): with ops.Graph().as_default(), self.test_session(): predictions = {'loss': constant_op.constant(1.)} classes = constant_op.constant('hello') with self.assertRaisesRegexp(TypeError, 'export_outputs must be dict'): model_fn.EstimatorSpec( mode=model_fn.ModeKeys.PREDICT, predictions=predictions, export_outputs=export_output.ClassificationOutput( classes=classes))
def _model_fn_for_export_tests(features, labels, mode): _, _ = features, labels variables.Variable(1., name='weight') scores = constant_op.constant([3.]) classes = constant_op.constant(['wumpus']) return model_fn_lib.EstimatorSpec(mode, predictions=constant_op.constant(10.), loss=constant_op.constant(1.), train_op=constant_op.constant(2.), export_outputs={ 'test': export_output.ClassificationOutput( scores, classes) })
def _model_fn_scaffold(features, labels, mode): _, _ = features, labels variables.Variable(1., name='weight') real_saver = saver.Saver() self.mock_saver = test.mock.Mock(wraps=real_saver, saver_def=real_saver.saver_def) scores = constant_op.constant([3.]) return model_fn_lib.EstimatorSpec( mode=mode, predictions=constant_op.constant([[1.]]), loss=constant_op.constant(0.), train_op=constant_op.constant(0.), scaffold=training.Scaffold(saver=self.mock_saver), export_outputs={ 'test': export_output.ClassificationOutput(scores) })
def testAllArgumentsSet(self): """Tests that no errors are raised when all arguments are set.""" with ops.Graph().as_default(), self.test_session(): loss = constant_op.constant(1.) predictions = {'loss': loss} classes = constant_op.constant('hello') model_fn.EstimatorSpec( mode=model_fn.ModeKeys.TRAIN, predictions=predictions, loss=loss, train_op=control_flow_ops.no_op(), eval_metric_ops={'loss': (control_flow_ops.no_op(), loss)}, export_outputs={ 'head_name': export_output.ClassificationOutput(classes=classes) }, training_chief_hooks=[_FakeHook()], training_hooks=[_FakeHook()], scaffold=monitored_session.Scaffold())
def testExportOutputsMultiheadWithDefault(self): with ops.Graph().as_default(), self.test_session(): predictions = {'loss': constant_op.constant(1.)} output_1 = constant_op.constant([1.]) output_2 = constant_op.constant(['2']) output_3 = constant_op.constant(['3']) export_outputs = { signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: export_output.RegressionOutput(value=output_1), 'head-2': export_output.ClassificationOutput(classes=output_2), 'head-3': export_output.PredictOutput( outputs={'some_output_3': output_3}) } estimator_spec = model_fn.EstimatorSpec( mode=model_fn.ModeKeys.PREDICT, predictions=predictions, export_outputs=export_outputs) self.assertEqual(export_outputs, estimator_spec.export_outputs)
def _model_fn_scaffold(features, labels, mode): _, _ = features, labels my_int = variables.Variable( 1, name='my_int', collections=[ops.GraphKeys.LOCAL_VARIABLES]) scores = constant_op.constant([3.]) with ops.control_dependencies([ variables.local_variables_initializer(), data_flow_ops.tables_initializer() ]): assign_op = state_ops.assign(my_int, 12345) # local_initSop must be an Operation, not a Tensor. custom_local_init_op = control_flow_ops.group(assign_op) return model_fn_lib.EstimatorSpec( mode=mode, predictions=constant_op.constant([[1.]]), loss=constant_op.constant(0.), train_op=constant_op.constant(0.), scaffold=training.Scaffold(local_init_op=custom_local_init_op), export_outputs={ 'test': export_output.ClassificationOutput(scores) })
def testExportOutputsMultiheadMissingDefault(self): with ops.Graph().as_default(), self.test_session(): predictions = {'loss': constant_op.constant(1.)} output_1 = constant_op.constant([1.]) output_2 = constant_op.constant(['2']) output_3 = constant_op.constant(['3']) export_outputs = { 'head-1': export_output.RegressionOutput(value=output_1), 'head-2': export_output.ClassificationOutput(classes=output_2), 'head-3': export_output.PredictOutput( outputs={'some_output_3': output_3}) } with self.assertRaisesRegexp( ValueError, 'Multiple export_outputs were provided, but none of them is ' 'specified as the default. Do this by naming one of them with ' 'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.'): model_fn.EstimatorSpec(mode=model_fn.ModeKeys.PREDICT, predictions=predictions, export_outputs=export_outputs)
def test_classify_requires_classes_or_scores(self): with self.assertRaises(ValueError) as e: export_output_lib.ClassificationOutput() self.assertEqual("At least one of scores and classes must be set.", str(e.exception))