def _format_example_as_numpy_dict(example, feature_shape_dict): result = example_coder.ExampleToNumpyDict(example) for key, value in result.items(): shape = feature_shape_dict[key] value = value.reshape(shape) if not shape: value = value.squeeze(0) result[key] = value return result
def test_decode_example_none_ref_count(self): example = text_format.Parse( ''' features { feature { key: 'x' value { } } } ''', tf.train.Example()) before_refcount = sys.getrefcount(None) _ = example_coder.ExampleToNumpyDict(example.SerializeToString()) after_refcount = sys.getrefcount(None) self.assertEqual(before_refcount + 1, after_refcount)
def _ParseExample(extracts: types.Extracts, eval_config: config.EvalConfig): """Parses serialized tf.train.Example to create additional extracts. Args: extracts: PCollection containing serialized examples under tfma.INPUT_KEY. eval_config: Eval config. Returns: Extracts with additional keys added for features, labels, and example weights. """ features = example_coder.ExampleToNumpyDict(extracts[constants.INPUT_KEY]) extracts = copy.copy(extracts) def add_to_extracts( # pylint: disable=invalid-name key: Text, model_name: Text, feature_values: Any): """Adds features_values to extracts and feature_keys to keys_to_pop.""" # Only key by model name if multiple models. if len(eval_config.model_specs) > 1: if key not in extracts: extracts[key] = {} extracts[key][model_name] = feature_values else: extracts[key] = feature_values keys_to_pop = [] for spec in eval_config.model_specs: if spec.label_key or spec.label_keys: keys, values = _keys_and_values( spec.label_key or dict(spec.label_keys), features) add_to_extracts(constants.LABELS_KEY, spec.name, values) keys_to_pop.extend(keys) if spec.example_weight_key or spec.example_weight_keys: keys, values = _keys_and_values( spec.example_weight_key or dict(spec.example_weight_keys), features) add_to_extracts(constants.EXAMPLE_WEIGHTS_KEY, spec.name, values) keys_to_pop.extend(keys) if spec.prediction_key or spec.prediction_keys: keys, values = _keys_and_values( spec.prediction_key or dict(spec.prediction_keys), features) add_to_extracts(constants.PREDICTIONS_KEY, spec.name, values) keys_to_pop.extend(keys) for key in keys_to_pop: if key in features: features.pop(key) extracts[constants.FEATURES_KEY] = features return extracts
def test_decode_example(self, example_proto_text, decoded_example): example = tf.train.Example() text_format.Merge(example_proto_text, example) self._check_decoding_results( example_coder.ExampleToNumpyDict(example.SerializeToString()), decoded_example)
def decode(self, serialized_example_proto: bytes) -> types.Example: """Decodes serialized tf.Example to tf data validation input dict.""" return example_coder.ExampleToNumpyDict(serialized_example_proto)