def from_trackable(trackable, concrete_func, inputs, outputs, large_model): err_large_model = "model exceeds maximum protobuf size of 2GB. Try setting large_model." # Avoid errors due to bug in TF freezing removed_resource_to_placeholder, placeholder_to_resource, graph_captures_copy, func_captures_copy = \ _remove_non_variable_resources_from_captures(concrete_func) try: frozen_graph = from_function(concrete_func, inputs, outputs, large_model) except ValueError as e: if any(msg in str(e) for msg in ["exceeds maximum protobuf size of 2GB", "string too long"]): raise ValueError(err_large_model) raise e # We might be returning the concrete_func so let's put it back in working order _restore_captured_resources(concrete_func, graph_captures_copy, func_captures_copy) table_info = get_hash_table_info(frozen_graph) placeholder_to_table_info = {} _get_hash_table_info_from_trackable(trackable, table_info, removed_resource_to_placeholder, placeholder_to_table_info) initialized_tables = {} for info in table_info: if info.shared_name is not None: h = lookup_ops.hash_table_v2(info.key_dtype, info.val_dtype, shared_name=info.shared_name) n = info.shared_name elif info.resource_input in placeholder_to_resource and info.resource_input not in placeholder_to_table_info: # We found a lookup op with no corresponding HashTable op, but we can associate the placeholder input # from the op with the resource handle from graph captures and make up a shared_name h = placeholder_to_resource[info.resource_input] n = str(uuid.uuid4()).encode() info.shared_name = n placeholder_to_table_info[info.resource_input] = info else: # Found a lookup op but the corresponding HashTable op has already been found and processed. continue try: k, v = lookup_ops.lookup_table_export_v2(h, info.key_dtype, info.val_dtype) initialized_tables[n] = (k.numpy(), v.numpy()) except Exception: # pylint: disable=broad-except logger.warning("Could not initialize table with shared_name = %r", n) for placeholder in removed_resource_to_placeholder.values(): if placeholder not in placeholder_to_table_info: logger.error("Could not find table resource to replace placeholder %s", placeholder) replace_placeholders_with_tables(frozen_graph, placeholder_to_table_info) return frozen_graph, initialized_tables
def from_trackable(trackable, concrete_func, inputs, outputs, large_model): err_large_model = "model exceeds maximum protobuf size of 2GB. Try setting large_model." # Avoid errors due to bug in TF freezing removed_resource_to_placeholder, graph_captures_copy, func_captures_copy = \ _remove_non_variable_resources_from_captures(concrete_func) try: frozen_graph = from_function(concrete_func, inputs, outputs, large_model) except ValueError as e: if any(msg in str(e) for msg in ["exceeds maximum protobuf size of 2GB", "string too long"]): raise ValueError(err_large_model) raise e # We might be returning the concrete_func so let's put it back in working order _restore_captured_resources(concrete_func, graph_captures_copy, func_captures_copy) table_names, key_dtypes, value_dtypes = get_hash_table_info(frozen_graph) placeholder_to_table_info = {} _get_hash_table_info_from_trackable(trackable, table_names, key_dtypes, value_dtypes, removed_resource_to_placeholder, placeholder_to_table_info) initialized_tables = {} for n, k_dtype, val_dtype in zip(table_names, key_dtypes, value_dtypes): h = lookup_ops.hash_table_v2(k_dtype, val_dtype, shared_name=n) try: k, v = lookup_ops.lookup_table_export_v2(h, k_dtype, val_dtype) initialized_tables[n] = (k.numpy(), v.numpy()) except Exception: # pylint: disable=broad-except logger.warning("Could not initialize table with shared_name = %r", n) for placeholder in removed_resource_to_placeholder.values(): if placeholder not in placeholder_to_table_info: logger.error("Could not find table resource to replace placeholder %s", placeholder) replace_placeholders_with_tables(frozen_graph, placeholder_to_table_info) return frozen_graph, initialized_tables
def _from_saved_model_v2(model_path, input_names, output_names, tag, signature_def, concrete_function_index, large_model): """Load tensorflow graph from saved_model.""" wrn_no_tag = "'--tag' not specified for saved_model. Using --tag serve" wrn_empty_tag = "'--tag' value is empty string. Using tag =[[]]" wrn_sig_1 = "'--signature_def' not specified, using first signature: %s" err_many_sig = "Cannot load multiple signature defs in TF2.x: %s" err_no_call = "Model doesn't contain usable concrete functions under __call__. Try --signature-def instead." err_index = "Invalid concrete_function value: %i. Valid values are [0 to %i]" err_no_sig = "No signatures found in model. Try --concrete_function instead." err_sig_nomatch = "Specified signature not in model %s" err_large_model = "model exceeds maximum protobuf size of 2GB. Try running with --large_model flag." if tag is None: tag = ['serve'] logger.warning(wrn_no_tag) if tag == '': tag = [[]] logger.warning(wrn_empty_tag) utils.make_sure(len(signature_def) < 2, err_many_sig, str(signature_def)) imported = tf.saved_model.load(model_path, tags=tag) # pylint: disable=no-value-for-parameter all_sigs = imported.signatures.keys() valid_sigs = [s for s in all_sigs if not s.startswith("_")] logger.info("Signatures found in model: %s", "[" + ",".join(valid_sigs) + "].") concrete_func = None if concrete_function_index is not None: utils.make_sure(hasattr(imported, "__call__"), err_no_call) utils.make_sure( concrete_function_index < len( imported.__call__.concrete_functions), err_index, concrete_function_index, len(imported.__call__.concrete_functions) - 1) args, kwargs = imported.__call__.concrete_functions[ concrete_function_index].structured_input_signature concrete_func = imported.__call__.get_concrete_function( *args, **kwargs) elif signature_def: utils.make_sure(signature_def[0] in valid_sigs, err_sig_nomatch, signature_def[0]) concrete_func = imported.signatures[signature_def[0]] else: utils.make_sure(len(valid_sigs) > 0, err_no_sig) logger.warning(wrn_sig_1, valid_sigs[0]) concrete_func = imported.signatures[valid_sigs[0]] tensors_to_rename = {} if input_names is None: inputs = [ tensor.name for tensor in concrete_func.inputs if tensor.dtype != tf.dtypes.resource ] if concrete_func.structured_input_signature is not None: args, kwargs = concrete_func.structured_input_signature structured_inputs = [ t.name for t in args if isinstance(t, tf.TensorSpec) ] + sorted(kwargs.keys()) structured_inputs = set(inp + ":0" for inp in structured_inputs) if any(inp in structured_inputs for inp in inputs): inputs = [inp for inp in inputs if inp in structured_inputs] else: inputs = input_names if output_names is None: outputs = [ tensor.name for tensor in concrete_func.outputs if tensor.dtype != tf.dtypes.resource ] if isinstance(concrete_func.structured_outputs, dict): # outputs are sorted, sort structured_outputs the same way structured_outputs = sorted( concrete_func.structured_outputs.keys()) tensors_to_rename.update(zip(outputs, structured_outputs)) logger.info("Output names: %r", structured_outputs) else: logger.info("Output names: %r", outputs) else: outputs = output_names logger.info( "Outputs not left as None; will use provided names not structured output names." ) # Avoid errors due to bug in TF freezing removed_resource_to_placeholder, graph_captures_copy, func_captures_copy = \ _remove_non_variable_resources_from_captures(concrete_func) try: frozen_graph = from_function(concrete_func, inputs, outputs, large_model) except ValueError as e: if any(msg in str(e) for msg in ["exceeds maximum protobuf size of 2GB", "string too long"]): raise ValueError(err_large_model) raise e # We might be returning the concrete_func so let's put it back in working order _restore_captured_resources(concrete_func, graph_captures_copy, func_captures_copy) table_names, key_dtypes, value_dtypes = get_hash_table_info(frozen_graph) placeholder_to_table_info = {} _get_hash_table_info_from_trackable(imported, table_names, key_dtypes, value_dtypes, removed_resource_to_placeholder, placeholder_to_table_info) initialized_tables = {} for n, k_dtype, val_dtype in zip(table_names, key_dtypes, value_dtypes): h = lookup_ops.hash_table_v2(k_dtype, val_dtype, shared_name=n) try: k, v = lookup_ops.lookup_table_export_v2(h, k_dtype, val_dtype) initialized_tables[n] = (k.numpy(), v.numpy()) except Exception: # pylint: disable=broad-except logger.warning("Could not initialize table with shared_name = %r", n) for placeholder in removed_resource_to_placeholder.values(): if placeholder not in placeholder_to_table_info: logger.error( "Could not find table resource to replace placeholder %s", placeholder) replace_placeholders_with_tables(frozen_graph, placeholder_to_table_info) return frozen_graph, inputs, outputs, concrete_func, imported, initialized_tables, tensors_to_rename
def _from_saved_model_v2(model_path, input_names, output_names, tag, signature_def, concrete_function_index, large_model): """Load tensorflow graph from saved_model.""" wrn_no_tag = "'--tag' not specified for saved_model. Using --tag serve" wrn_empty_tag = "'--tag' value is empty string. Using tag =[[]]" wrn_sig_1 = "'--signature_def' not specified, using first signature: %s" err_many_sig = "Cannot load multiple signature defs in TF2.x: %s" err_no_call = "Model doesn't contain usable concrete functions under __call__. Try --signature-def instead." err_index = "Invalid concrete_function value: %i. Valid values are [0 to %i]" err_no_sig = "No signatures found in model. Try --concrete_function instead." err_sig_nomatch = "Specified signature not in model %s" err_large_model = "model exceeds maximum protobuf size of 2GB. Try running with --large_model flag." if tag is None: tag = ['serve'] logger.warning(wrn_no_tag) if tag == '': tag = [[]] logger.warning(wrn_empty_tag) utils.make_sure(len(signature_def) < 2, err_many_sig, str(signature_def)) imported = tf.saved_model.load(model_path, tags=tag) # pylint: disable=no-value-for-parameter all_sigs = imported.signatures.keys() valid_sigs = [s for s in all_sigs if not s.startswith("_")] logger.info("Signatures found in model: %s", "[" + ",".join(valid_sigs) + "].") concrete_func = None if concrete_function_index is not None: utils.make_sure(hasattr(imported, "__call__"), err_no_call) utils.make_sure( concrete_function_index < len( imported.__call__.concrete_functions), err_index, concrete_function_index, len(imported.__call__.concrete_functions) - 1) sig = imported.__call__.concrete_functions[ concrete_function_index].structured_input_signature[0] concrete_func = imported.__call__.get_concrete_function(*sig) elif signature_def: utils.make_sure(signature_def[0] in valid_sigs, err_sig_nomatch, signature_def[0]) concrete_func = imported.signatures[signature_def[0]] else: utils.make_sure(len(valid_sigs) > 0, err_no_sig) logger.warning(wrn_sig_1, valid_sigs[0]) concrete_func = imported.signatures[valid_sigs[0]] inputs = [ tensor.name for tensor in concrete_func.inputs if tensor.dtype != tf.dtypes.resource ] outputs = [ tensor.name for tensor in concrete_func.outputs if tensor.dtype != tf.dtypes.resource ] # filter by user specified inputs/outputs if input_names: inputs = list(set(input_names) & set(inputs)) if output_names: outputs = list(set(output_names) & set(outputs)) # Avoid errors due to bug in TF freezing removed_resource_to_placeholder, graph_captures_copy, func_captures_copy = \ _remove_non_variable_resources_from_captures(concrete_func) try: frozen_graph = from_function(concrete_func, inputs, outputs, large_model) except ValueError as e: if any(msg in str(e) for msg in ["exceeds maximum protobuf size of 2GB", "string too long"]): raise ValueError(err_large_model) raise e # We might be returning the concrete_func so let's put it back in working order _restore_captured_resources(concrete_func, graph_captures_copy, func_captures_copy) table_names, key_dtypes, value_dtypes = get_hash_table_info(frozen_graph) placeholder_to_table_info = {} if hasattr(imported, '_table') and hasattr(imported._table, '_create_resource'): # pylint: disable=protected-access # Add tables from saved_model table initializers # pylint: disable=protected-access initializer = imported._table._create_resource.concrete_functions[ 0].function_def new_names, new_k_dtypes, new_v_dtypes = get_hash_table_info( initializer.node_def) table_names.extend(new_names) key_dtypes.extend(new_k_dtypes) value_dtypes.extend(new_v_dtypes) table_handle = id(imported._table.resource_handle) if table_handle in removed_resource_to_placeholder and len( new_names) == 1: table_info = (new_names[0], new_k_dtypes[0], new_v_dtypes[0]) placeholder_to_table_info[ removed_resource_to_placeholder[table_handle]] = table_info initialized_tables = {} for n, k_dtype, val_dtype in zip(table_names, key_dtypes, value_dtypes): h = lookup_ops.hash_table_v2(k_dtype, val_dtype, shared_name=n) try: k, v = lookup_ops.lookup_table_export_v2(h, k_dtype, val_dtype) initialized_tables[n] = (k.numpy(), v.numpy()) except Exception: # pylint: disable=broad-except logger.warning("Could not initialize table with shared_name = %r", n) replace_placeholders_with_tables(frozen_graph, placeholder_to_table_info) return frozen_graph, inputs, outputs, concrete_func, imported, initialized_tables