def test_combine_graph_defs_src_gradient_func_non_unique(self): graph_def_a = GraphDef() text_format.Merge( ''' library { gradient { function_name: "foo" gradient_func: "foo_grad" } } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' library { gradient { function_name: "bar" gradient_func: "bar_grad" } gradient { function_name: "bar_baz" gradient_func: "bar_grad" } } ''', graph_def_b) with six.assertRaisesRegex( self, ValueError, 'A GraphDef contains non-unique gradient function names: bar_grad' ): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def graph_impl(self, run, tag, is_conceptual, limit_attr_size=None, large_attrs_key=None): """Result of the form `(body, mime_type)`, or `None` if no graph exists.""" if is_conceptual: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. keras_model_config = json.loads( tensor_events[0].tensor_proto.string_val[0]) graph = keras_util.keras_model_to_graph_def(keras_model_config) elif tag: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. run_metadata = config_pb2.RunMetadata.FromString( tensor_events[0].tensor_proto.string_val[0]) graph = graph_pb2.GraphDef() for func_graph in run_metadata.function_graphs: graph_util.combine_graph_defs( graph, func_graph.pre_optimization_graph) else: graph = self._multiplexer.Graph(run) # This next line might raise a ValueError if the limit parameters # are invalid (size is negative, size present but key absent, etc.). process_graph.prepare_graph_for_ui(graph, limit_attr_size, large_attrs_key) return (str(graph), 'text/x-protobuf') # pbtxt
def test_combine_graph_defs_gradient_collison(self): graph_def_a = GraphDef() text_format.Merge( ''' library { gradient { function_name: "foo" gradient_func: "foo_grad" } } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' library { gradient { function_name: "bar" gradient_func: "bar_grad" } gradient { function_name: "foo_1" gradient_func: "foo_grad" } } ''', graph_def_b) with six.assertRaisesRegex( self, ValueError, ('share a gradient_func name but map to different functions: ' 'foo_grad')): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def graph_impl( self, run, tag, is_conceptual, experiment=None, limit_attr_size=None, large_attrs_key=None, ): """Result of the form `(body, mime_type)`, or `None` if no graph exists.""" if self._data_provider: graph_blob_sequences = self._data_provider.read_blob_sequences( experiment_id=experiment, plugin_name=metadata.PLUGIN_NAME, run_tag_filter=provider.RunTagFilter(runs=[run], tags=[tag]), ) blob_datum_list = graph_blob_sequences.get(run, {}).get(tag, ()) try: blob_ref = blob_datum_list[0].values[0] except IndexError: return None # Always use the blob_key approach for now, even if there is a direct url. graph_raw = self._data_provider.read_blob(blob_ref.blob_key) # This method ultimately returns pbtxt, but we have to deserialize and # later reserialize this anyway, because a) this way we accept binary # protobufs too, and b) below we run `prepare_graph_for_ui` on the graph. graph = graph_pb2.GraphDef.FromString(graph_raw) elif is_conceptual: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. keras_model_config = json.loads( tensor_events[0].tensor_proto.string_val[0] ) graph = keras_util.keras_model_to_graph_def(keras_model_config) elif tag: tensor_events = self._multiplexer.Tensors(run, tag) # Take the first event if there are multiple events written from different # steps. run_metadata = config_pb2.RunMetadata.FromString( tensor_events[0].tensor_proto.string_val[0] ) graph = graph_pb2.GraphDef() for func_graph in run_metadata.function_graphs: graph_util.combine_graph_defs( graph, func_graph.pre_optimization_graph ) else: graph = self._multiplexer.Graph(run) # This next line might raise a ValueError if the limit parameters # are invalid (size is negative, size present but key absent, etc.). process_graph.prepare_graph_for_ui( graph, limit_attr_size, large_attrs_key ) return (str(graph), "text/x-protobuf") # pbtxt
def test_combine_graph_defs_name_collided_different_content(self): graph_def_a = GraphDef() text_format.Merge( """ node { name: "X" op: "Input" } node { name: "W" op: "Input" } node { name: "Y" op: "MatMul" input: "X" input: "W" } versions { producer: 21 } """, graph_def_a, ) graph_def_b = GraphDef() text_format.Merge( """ node { name: "X" op: "Input" device: "cpu:0" } node { name: "Z" op: "Input" } node { name: "Q" op: "MatMul" input: "X" input: "Z" } versions { producer: 21 } """, graph_def_b, ) with six.assertRaisesRegex( self, ValueError, ("Cannot combine GraphDefs because nodes share a name but " "contents are different: X"), ): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_src_nodes_duplicate_keys(self): graph_def_a = GraphDef() text_format.Merge( """ node { name: "X" op: "Input" } node { name: "Y" op: "Input" } versions { producer: 21 } """, graph_def_a, ) graph_def_b = GraphDef() text_format.Merge( """ node { name: "W" op: "Input" device: "cpu:0" } node { name: "W" op: "Input" } versions { producer: 21 } """, graph_def_b, ) with six.assertRaisesRegex( self, ValueError, "A GraphDef contains non-unique node names: W"): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_dst_nodes_duplicate_keys(self): graph_def_a = GraphDef() text_format.Merge( ''' node { name: "X" op: "Input" } node { name: "X" op: "Input" } versions { producer: 21 } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' node { name: "X" op: "Input" } node { name: "Z" op: "Input" } versions { producer: 21 } ''', graph_def_b) with six.assertRaisesRegex( self, ValueError, 'A GraphDef contains non-unique node names: X'): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_dst_gradient_func_non_unique(self): graph_def_a = GraphDef() text_format.Merge( """ library { gradient { function_name: "foo" gradient_func: "foo_grad" } gradient { function_name: "foo_bar" gradient_func: "foo_grad" } } """, graph_def_a, ) graph_def_b = GraphDef() text_format.Merge( """ library { gradient { function_name: "bar" gradient_func: "bar_grad" } } """, graph_def_b, ) with six.assertRaisesRegex( self, ValueError, "A GraphDef contains non-unique gradient function names: foo_grad", ): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_gradient(self): expected_proto = """ library { gradient { function_name: "foo" gradient_func: "foo_grad" } gradient { function_name: "bar" gradient_func: "bar_grad" } } """ graph_def_a = GraphDef() text_format.Merge( """ library { gradient { function_name: "foo" gradient_func: "foo_grad" } } """, graph_def_a, ) graph_def_b = GraphDef() text_format.Merge( """ library { gradient { function_name: "foo" gradient_func: "foo_grad" } gradient { function_name: "bar" gradient_func: "bar_grad" } } """, graph_def_b, ) self.assertProtoEquals( expected_proto, graph_util.combine_graph_defs(graph_def_a, graph_def_b), )
def test_combine_graph_defs_src_function_duplicate_keys(self): graph_def_a = GraphDef() text_format.Merge( ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' library { function { signature { name: "bar" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } } function { signature { name: "bar" input_arg { name: "y" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } } } ''', graph_def_b) with six.assertRaisesRegex( self, ValueError, 'A GraphDef contains non-unique function names: bar'): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_function_collison(self): graph_def_a = GraphDef() text_format.Merge( ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "div" op: "Div" input: "x" input: "y" } } function { signature { name: "foo_1" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''', graph_def_b) with six.assertRaisesRegex( self, ValueError, ('Cannot combine GraphDefs because functions share a name but ' 'are different: foo')): graph_util.combine_graph_defs(graph_def_a, graph_def_b)
def test_combine_graph_defs_function(self): expected_proto = ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } function { signature { name: "foo_1" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''' graph_def_a = GraphDef() text_format.Merge( ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' library { function { signature { name: "foo" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } function { signature { name: "foo_1" input_arg { name: "x" type: DT_HALF } output_arg { name: "identity" type: DT_HALF } } node_def { name: "add" op: "Add" input: "x" input: "y" } } } ''', graph_def_b) self.assertProtoEquals( expected_proto, graph_util.combine_graph_defs(graph_def_a, graph_def_b))
def test_combine_graph_defs(self): expected_proto = ''' node { name: "X" op: "Input" } node { name: "W" op: "Input" } node { name: "Y" op: "MatMul" input: "X" input: "W" } node { name: "A" op: "Input" } node { name: "B" op: "Input" } node { name: "C" op: "MatMul" input: "A" input: "B" } versions { producer: 21 } ''' graph_def_a = GraphDef() text_format.Merge( ''' node { name: "X" op: "Input" } node { name: "W" op: "Input" } node { name: "Y" op: "MatMul" input: "X" input: "W" } versions { producer: 21 } ''', graph_def_a) graph_def_b = GraphDef() text_format.Merge( ''' node { name: "A" op: "Input" } node { name: "B" op: "Input" } node { name: "C" op: "MatMul" input: "A" input: "B" } versions { producer: 21 } ''', graph_def_b) self.assertProtoEquals( expected_proto, graph_util.combine_graph_defs(graph_def_a, graph_def_b))
def test_combine_graph_defs_name_collided_but_same_content(self): expected_proto = """ node { name: "X" op: "Input" } node { name: "W" op: "Input" } node { name: "Y" op: "MatMul" input: "X" input: "W" } node { name: "A" op: "Input" } versions { producer: 21 } """ graph_def_a = GraphDef() text_format.Merge( """ node { name: "X" op: "Input" } node { name: "W" op: "Input" } node { name: "Y" op: "MatMul" input: "X" input: "W" } versions { producer: 21 } """, graph_def_a, ) graph_def_b = GraphDef() text_format.Merge( """ node { name: "X" op: "Input" } node { name: "A" op: "Input" } versions { producer: 21 } """, graph_def_b, ) self.assertProtoEquals( expected_proto, graph_util.combine_graph_defs(graph_def_a, graph_def_b), )