def _load_eval_run( output_path: Text ) -> Tuple[config.EvalConfig, Text, Text, Dict[Text, Text]]: """Returns eval config, data location, file format, and model locations.""" path = os.path.join(output_path, _EVAL_CONFIG_FILE) if tf.io.gfile.exists(path): with tf.io.gfile.GFile(path, 'r') as f: pb = json_format.Parse(f.read(), config_pb2.EvalRun()) _check_version(pb.version, output_path) return (pb.eval_config, pb.data_location, pb.file_format, pb.model_locations) else: # Legacy suppport (to be removed in future). # The previous version did not include file extension. path = os.path.splitext(path)[0] serialized_record = six.next( tf.compat.v1.python_io.tf_record_iterator(path)) final_dict = pickle.loads(serialized_record) _check_version(final_dict, output_path) old_config = final_dict['eval_config'] slicing_specs = None if old_config.slice_spec: slicing_specs = [s.to_proto() for s in old_config.slice_spec] options = config.Options() options.compute_confidence_intervals.value = ( old_config.compute_confidence_intervals) options.k_anonymization_count.value = old_config.k_anonymization_count return (config.EvalConfig(slicing_specs=slicing_specs, options=options), old_config.data_location, '', { '': old_config.model_location })
def _serialize_eval_run(eval_config: config.EvalConfig, data_location: Text, file_format: Text, model_locations: Dict[Text, Text]) -> Text: return json_format.MessageToJson( config_pb2.EvalRun(eval_config=eval_config, version=tfma_version.VERSION_STRING, data_location=data_location, file_format=file_format, model_locations=model_locations))
def load_eval_run( output_path: Text, output_file_format: Text = EVAL_CONFIG_FILE_FORMAT, filename: Optional[Text] = None ) -> Tuple[Optional[config.EvalConfig], Text, Text, Dict[Text, Text]]: """Returns eval config, data location, file format, and model locations. Args: output_path: Directory containing config file. output_file_format: Format of output file. Currently only 'json' is supported. filename: Name of output file (including extension if any). Returns: Tuple of (EvalConfig, data location, file format, model locations). If an EvalConfig is not found at the given path, None will be returned. """ if filename is None: filename = EVAL_CONFIG_FILE + '.' + output_file_format path = os.path.join(output_path, filename) if tf.io.gfile.exists(path): with tf.io.gfile.GFile(path, 'r') as f: pb = json_format.Parse(f.read(), config_pb2.EvalRun()) _check_version(pb.version, output_path) return (pb.eval_config, pb.data_location, pb.file_format, pb.model_locations) # Legacy suppport (to be removed in future). # The previous version did not include file extension. path = os.path.splitext(path)[0] if tf.io.gfile.exists(path): serialized_record = six.next( tf.compat.v1.python_io.tf_record_iterator(path)) final_dict = pickle.loads(serialized_record) _check_version(final_dict, output_path) old_config = final_dict['eval_config'] slicing_specs = None if old_config.slice_spec: slicing_specs = [s.to_proto() for s in old_config.slice_spec] options = config.Options() options.compute_confidence_intervals.value = ( old_config.compute_confidence_intervals) options.min_slice_size.value = old_config.k_anonymization_count return (config.EvalConfig(slicing_specs=slicing_specs, options=options), old_config.data_location, '', { '': old_config.model_location }) # No config found return (None, '', '', {})