Пример #1
0
    def test_saved_model_iomap(self):
        with _make_temp_directory() as tmp_dir:
            saved_model_dir = os.path.join(tmp_dir, 'saved_model')
            graph = tf.Graph()
            with tf.Session(graph=graph) as sess, graph.as_default():
                _build_graph()
                _build_saved_model(sess, saved_model_dir)
                # Build the transformer from exported serving model
                # We are using signatures, thus must provide the keys
                gin = TFInputGraph.fromSavedModelWithSignature(saved_model_dir, _serving_tag,
                                                               _serving_sigdef_key)

                _input_mapping_with_sigdef = {'inputCol': _tensor_input_signature}
                # Input mapping for the Transformer
                _translated_input_mapping = gin.translateInputMapping(_input_mapping_with_sigdef)
                _expected_input_mapping = {'inputCol': tfx.tensor_name(_tensor_input_name)}
                # Output mapping for the Transformer
                _output_mapping_with_sigdef = {_tensor_output_signature: 'outputCol'}
                _translated_output_mapping = gin.translateOutputMapping(_output_mapping_with_sigdef)
                _expected_output_mapping = {tfx.tensor_name(_tensor_output_name): 'outputCol'}

                err_msg = "signature based input mapping {} and output mapping {} " + \
                          "must be translated correctly into tensor name based mappings"
                assert _translated_input_mapping == _expected_input_mapping \
                    and _translated_output_mapping == _expected_output_mapping, \
                    err_msg.format(_translated_input_mapping, _translated_output_mapping)
Пример #2
0
    def test_saved_model_iomap(self):
        with _make_temp_directory() as tmp_dir:
            saved_model_dir = os.path.join(tmp_dir, 'saved_model')
            graph = tf.Graph()
            with tf.Session(graph=graph) as sess, graph.as_default():
                _build_graph()
                _build_saved_model(sess, saved_model_dir)
                # Build the transformer from exported serving model
                # We are using signatures, thus must provide the keys
                gin = TFInputGraph.fromSavedModelWithSignature(saved_model_dir, _serving_tag,
                                                               _serving_sigdef_key)

                _input_mapping_with_sigdef = {'inputCol': _tensor_input_signature}
                # Input mapping for the Transformer
                _translated_input_mapping = gin.translateInputMapping(_input_mapping_with_sigdef)
                _expected_input_mapping = {'inputCol': tfx.tensor_name(_tensor_input_name)}
                # Output mapping for the Transformer
                _output_mapping_with_sigdef = {_tensor_output_signature: 'outputCol'}
                _translated_output_mapping = gin.translateOutputMapping(_output_mapping_with_sigdef)
                _expected_output_mapping = {tfx.tensor_name(_tensor_output_name): 'outputCol'}

                err_msg = "signature based input mapping {} and output mapping {} " + \
                          "must be translated correctly into tensor name based mappings"
                assert _translated_input_mapping == _expected_input_mapping \
                    and _translated_output_mapping == _expected_output_mapping, \
                    err_msg.format(_translated_input_mapping, _translated_output_mapping)
Пример #3
0
 def test_graph_novar(self):
     transformer = _build_transformer(
         lambda session: TFInputGraph.fromGraph(session.graph, session, [
             _tensor_input_name
         ], [_tensor_output_name]))
     gin = transformer.getTFInputGraph()
     local_features = _build_local_features()
     expected = _get_expected_result(gin, local_features)
     dataset = self.session.createDataFrame(local_features)
     _check_transformer_output(transformer, dataset, expected)
Пример #4
0
 def _transform(self, dataset):
     with KSessionWrap() as (sess, keras_graph):
         tfGraph, inputTensorName, outputTensorName = self._loadTFGraph(sess=sess,
                                                                        graph=keras_graph)
         inputGraph = TFInputGraph.fromGraph(graph=tfGraph, sess=sess,
                                             feed_names=[inputTensorName],
                                             fetch_names=[outputTensorName])
     # Create TFTransformer & use it to apply the loaded Keras model graph to our dataset
     transformer = TFTransformer(tfInputGraph=inputGraph,
                                 inputMapping={self.getInputCol() : inputTensorName},
                                 outputMapping={outputTensorName: self.getOutputCol()})
     return transformer.transform(dataset)
Пример #5
0
 def _transform(self, dataset):
     with KSessionWrap() as (sess, keras_graph):
         tfGraph, inputTensorName, outputTensorName = self._loadTFGraph(sess=sess,
                                                                        graph=keras_graph)
         inputGraph = TFInputGraph.fromGraph(graph=tfGraph, sess=sess,
                                             feed_names=[inputTensorName],
                                             fetch_names=[outputTensorName])
     # Create TFTransformer & use it to apply the loaded Keras model graph to our dataset
     transformer = TFTransformer(tfInputGraph=inputGraph,
                                 inputMapping={self.getInputCol(): inputTensorName},
                                 outputMapping={outputTensorName: self.getOutputCol()})
     return transformer.transform(dataset)
 def test_graph_array_types(self):
     test_dtypes = [(tf.int32, ArrayType(IntegerType()), np.int32),
                    (tf.int64, ArrayType(LongType()), np.int64),
                    (tf.float32, ArrayType(FloatType()), np.float32),
                    (tf.float64, ArrayType(DoubleType()), np.float64)]
     for tf_dtype, spark_dtype, np_type in test_dtypes:
         transformer = _build_transformer(lambda session:
                                          TFInputGraph.fromGraph(session.graph, session,
                                                                 [_tensor_input_name],
                                                                 [_tensor_output_name]),
                                          tf_dtype)
         gin = transformer.getTFInputGraph()
         local_features = _build_local_features(np_type)
         expected = _get_expected_result(gin, local_features)
         schema = StructType([StructField('inputCol', spark_dtype)])
         dataset = self.session.createDataFrame(local_features, schema)
         _check_transformer_output(transformer, dataset, expected)
Пример #7
0
 def gin_fun(session):
     _build_saved_model(session, saved_model_dir)
     # Build the transformer from exported serving model
     # We are using signatures, thus must provide the keys
     return TFInputGraph.fromSavedModelWithSignature(saved_model_dir, _serving_tag,
                                                     _serving_sigdef_key)
Пример #8
0
 def test_graphdef_novar(self):
     gin = _build_graph_input(lambda session:
                              TFInputGraph.fromGraphDef(session.graph.as_graph_def(),
                                                        [_tensor_input_name], [_tensor_output_name]))
     _check_input_novar(gin)
Пример #9
0
 def test_graphdef_novar_2(self):
     gin = _build_graph_input_2(lambda session:
                                TFInputGraph.fromGraphDef(session.graph.as_graph_def(),
                                                          [_tensor_input_name], [_tensor_output_name]))
     _check_output_2(gin, np.array([1, 2, 3]), np.array([2, 2, 2]), 1)
Пример #10
0
 def gin_fun(session):
     _build_checkpointed_model(session, tmp_dir)
     return TFInputGraph.fromGraph(session.graph, session,
                                   [_tensor_input_name], [_tensor_output_name])
Пример #11
0
 def gin_fun(session):
     _build_saved_model(session, saved_model_dir)
     return TFInputGraph.fromGraph(session.graph, session,
                                   [_tensor_input_name],
                                   [_tensor_output_name])
Пример #12
0
 def test_graphdef_novar(self):
     gin = _build_graph_input(lambda session: TFInputGraph.fromGraphDef(
         session.graph.as_graph_def(), [_tensor_input_name],
         [_tensor_output_name]))
     _check_input_novar(gin)
Пример #13
0
 def test_graphdef_novar_2(self):
     gin = _build_graph_input_2(lambda session: TFInputGraph.fromGraphDef(
         session.graph.as_graph_def(), [_tensor_input_name],
         [_tensor_output_name]))
     _check_output_2(gin, np.array([1, 2, 3]), np.array([2, 2, 2]), 1)
Пример #14
0
 def gin_fun(session):
     _build_checkpointed_model(session, tmp_dir)
     return TFInputGraph.fromGraph(session.graph, session,
                                   [_tensor_input_name],
                                   [_tensor_output_name])
Пример #15
0
 def gin_fun(session):
     _build_checkpointed_model(session, tmp_dir)
     return TFInputGraph.fromCheckpointWithSignature(
         tmp_dir, _serving_sigdef_key)
Пример #16
0
 def gin_fun(session):
     _build_saved_model(session, saved_model_dir)
     return TFInputGraph.fromGraph(session.graph, session,
                                   [_tensor_input_name], [_tensor_output_name])
Пример #17
0
 def gin_fun(session):
     _build_checkpointed_model(session, tmp_dir)
     return TFInputGraph.fromCheckpointWithSignature(tmp_dir, _serving_sigdef_key)
Пример #18
0
 def gin_fun(session):
     _build_saved_model(session, saved_model_dir)
     # Build the transformer from exported serving model
     # We are using signatures, thus must provide the keys
     return TFInputGraph.fromSavedModelWithSignature(
         saved_model_dir, _serving_tag, _serving_sigdef_key)