示例#1
0
  def _testExportImportAcrossScopes(self, graph_fn):
    """Tests export and importing a graph across scopes.

    Args:
      graph_fn: A closure that creates a graph on the current scope.
    """
    with ops.Graph().as_default() as original_graph:
      with variable_scope.variable_scope("dropA/dropB/keepA"):
        graph_fn()
    exported_meta_graph_def = meta_graph.export_scoped_meta_graph(
        graph=original_graph,
        export_scope="dropA/dropB")[0]

    with ops.Graph().as_default() as imported_graph:
      meta_graph.import_scoped_meta_graph(
          exported_meta_graph_def,
          import_scope="importA")

    with ops.Graph().as_default() as expected_graph:
      with variable_scope.variable_scope("importA/keepA"):
        graph_fn()

    result = meta_graph.export_scoped_meta_graph(graph=imported_graph)[0]
    expected = meta_graph.export_scoped_meta_graph(graph=expected_graph)[0]
    self.assertProtoEquals(expected, result)
示例#2
0
  def testSummaryWithFamilyMetaGraphExport(self):
    with ops.name_scope('outer'):
      i = constant_op.constant(11)
      summ = summary_lib.scalar('inner', i)
      self.assertEquals(summ.op.name, 'outer/inner')
      summ_f = summary_lib.scalar('inner', i, family='family')
      self.assertEquals(summ_f.op.name, 'outer/family/inner')

    metagraph_def, _ = meta_graph.export_scoped_meta_graph(export_scope='outer')

    with ops.Graph().as_default() as g:
      meta_graph.import_scoped_meta_graph(metagraph_def, graph=g,
                                          import_scope='new_outer')
      # The summaries should exist, but with outer scope renamed.
      new_summ = g.get_tensor_by_name('new_outer/inner:0')
      new_summ_f = g.get_tensor_by_name('new_outer/family/inner:0')

      # However, the tags are unaffected.
      with self.cached_session() as s:
        new_summ_str, new_summ_f_str = s.run([new_summ, new_summ_f])
        new_summ_pb = summary_pb2.Summary()
        new_summ_pb.ParseFromString(new_summ_str)
        self.assertEquals('outer/inner', new_summ_pb.value[0].tag)
        new_summ_f_pb = summary_pb2.Summary()
        new_summ_f_pb.ParseFromString(new_summ_f_str)
        self.assertEquals('family/outer/family/inner',
                          new_summ_f_pb.value[0].tag)
  def testPotentialCycle(self):
    graph1 = ops.Graph()
    with graph1.as_default():
      a = constant_op.constant(1.0, shape=[2, 2])
      b = constant_op.constant(2.0, shape=[2, 2])
      matmul = math_ops.matmul(a, b)
      with ops.name_scope("hidden1"):
        c = nn_ops.relu(matmul)
        d = constant_op.constant(3.0, shape=[2, 2])
        matmul = math_ops.matmul(c, d)

    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
        export_scope="hidden1", graph=graph1)

    graph2 = ops.Graph()
    with graph2.as_default():
      with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
        meta_graph.import_scoped_meta_graph(
            orig_meta_graph, import_scope="new_hidden1")

      meta_graph.import_scoped_meta_graph(
          orig_meta_graph,
          import_scope="new_hidden1",
          input_map={
              "$unbound_inputs_MatMul": constant_op.constant(
                  4.0, shape=[2, 2])
          })
示例#4
0
  def testClearDevices(self):
    graph1 = tf.Graph()
    with graph1.as_default():
      with tf.device("/device:CPU:0"):
        a = tf.Variable(tf.constant(1.0, shape=[2, 2]), name="a")
      with tf.device("/job:ps/replica:0/task:0/gpu:0"):
        b = tf.Variable(tf.constant(2.0, shape=[2, 2]), name="b")
      with tf.device("/job:localhost/replica:0/task:0/cpu:0"):
        tf.matmul(a, b, name="matmul")

    self.assertEqual("/device:CPU:0", str(graph1.as_graph_element("a").device))
    self.assertEqual("/job:ps/replica:0/task:0/device:GPU:0",
                     str(graph1.as_graph_element("b").device))
    self.assertEqual("/job:localhost/replica:0/task:0/device:CPU:0",
                     str(graph1.as_graph_element("matmul").device))

    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(graph=graph1)

    graph2 = tf.Graph()
    with graph2.as_default():
      meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=True)

    self.assertEqual("", str(graph2.as_graph_element("a").device))
    self.assertEqual("", str(graph2.as_graph_element("b").device))
    self.assertEqual("", str(graph2.as_graph_element("matmul").device))
示例#5
0
  def testMetricsCollection(self):

    def _enqueue_vector(sess, queue, values, shape=None):
      if not shape:
        shape = (1, len(values))
      dtype = queue.dtypes[0]
      sess.run(
          queue.enqueue(constant_op.constant(
              values, dtype=dtype, shape=shape)))

    meta_graph_filename = os.path.join(
        _TestDir("metrics_export"), "meta_graph.pb")

    graph = ops.Graph()
    with self.session(graph=graph) as sess:
      values_queue = data_flow_ops.FIFOQueue(
          4, dtypes.float32, shapes=(1, 2))
      _enqueue_vector(sess, values_queue, [0, 1])
      _enqueue_vector(sess, values_queue, [-4.2, 9.1])
      _enqueue_vector(sess, values_queue, [6.5, 0])
      _enqueue_vector(sess, values_queue, [-3.2, 4.0])
      values = values_queue.dequeue()

      _, update_op = metrics.mean(values)

      initializer = variables.local_variables_initializer()
      self.evaluate(initializer)
      self.evaluate(update_op)

    meta_graph.export_scoped_meta_graph(
        filename=meta_graph_filename, graph=graph)

    # Verifies that importing a meta_graph with LOCAL_VARIABLES collection
    # works correctly.
    graph = ops.Graph()
    with self.session(graph=graph) as sess:
      meta_graph.import_scoped_meta_graph(meta_graph_filename)
      initializer = variables.local_variables_initializer()
      self.evaluate(initializer)

    # Verifies that importing an old meta_graph where "local_variables"
    # collection is of node_list type works, but cannot build initializer
    # with the collection.
    graph = ops.Graph()
    with self.session(graph=graph) as sess:
      meta_graph.import_scoped_meta_graph(
          test.test_src_dir_path(
              "python/framework/testdata/metrics_export_meta_graph.pb"))
      self.assertEqual(len(ops.get_collection(ops.GraphKeys.LOCAL_VARIABLES)),
                       2)
      with self.assertRaisesRegexp(
          AttributeError, "'Tensor' object has no attribute 'initializer'"):
        initializer = variables.local_variables_initializer()
示例#6
0
  def testNoVariables(self):
    test_dir = _TestDir("no_variables")
    filename = os.path.join(test_dir, "metafile")

    input_feed_value = -10  # Arbitrary input value for feed_dict.

    orig_graph = ops.Graph()
    with self.session(graph=orig_graph) as sess:
      # Create a minimal graph with zero variables.
      input_tensor = array_ops.placeholder(
          dtypes.float32, shape=[], name="input")
      offset = constant_op.constant(42, dtype=dtypes.float32, name="offset")
      output_tensor = math_ops.add(input_tensor, offset, name="add_offset")

      # Add input and output tensors to graph collections.
      ops.add_to_collection("input_tensor", input_tensor)
      ops.add_to_collection("output_tensor", output_tensor)

      output_value = sess.run(output_tensor, {input_tensor: input_feed_value})
      self.assertEqual(output_value, 32)

      # Generates MetaGraphDef.
      meta_graph_def, var_list = meta_graph.export_scoped_meta_graph(
          filename=filename,
          graph_def=ops.get_default_graph().as_graph_def(add_shapes=True),
          collection_list=["input_tensor", "output_tensor"],
          saver_def=None)
      self.assertTrue(meta_graph_def.HasField("meta_info_def"))
      self.assertNotEqual(meta_graph_def.meta_info_def.tensorflow_version, "")
      self.assertNotEqual(meta_graph_def.meta_info_def.tensorflow_git_version,
                          "")
      self.assertEqual({}, var_list)

    # Create a clean graph and import the MetaGraphDef nodes.
    new_graph = ops.Graph()
    with self.session(graph=new_graph) as sess:
      # Import the previously export meta graph.
      meta_graph.import_scoped_meta_graph(filename)

      # Re-exports the current graph state for comparison to the original.
      new_meta_graph_def, _ = meta_graph.export_scoped_meta_graph(filename +
                                                                  "_new")
      test_util.assert_meta_graph_protos_equal(self, meta_graph_def,
                                               new_meta_graph_def)

      # Ensures that we can still get a reference to our graph collections.
      new_input_tensor = ops.get_collection("input_tensor")[0]
      new_output_tensor = ops.get_collection("output_tensor")[0]
      # Verifies that the new graph computes the same result as the original.
      new_output_value = sess.run(new_output_tensor,
                                  {new_input_tensor: input_feed_value})
      self.assertEqual(new_output_value, output_value)
示例#7
0
  def testWhileLoopGradients(self):
    # Create a simple while loop.
    with ops.Graph().as_default():
      with ops.name_scope("export"):
        var = variables.Variable(0.)
        var_name = var.name
        _, output = control_flow_ops.while_loop(
            lambda i, x: i < 5,
            lambda i, x: (i + 1, x + math_ops.cast(i, dtypes.float32)),
            [0, var])
        output_name = output.name

      # Generate a MetaGraphDef containing the while loop with an export scope.
      meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
          export_scope="export")

      # Build and run the gradients of the while loop. We use this below to
      # verify that the gradients are correct with the imported MetaGraphDef.
      init_op = variables.global_variables_initializer()
      grad = gradients_impl.gradients([output], [var])
      with session.Session() as sess:
        self.evaluate(init_op)
        expected_grad_value = self.evaluate(grad)

    # Restore the MetaGraphDef into a new Graph with an import scope.
    with ops.Graph().as_default():
      meta_graph.import_scoped_meta_graph(meta_graph_def, import_scope="import")

      # Re-export and make sure we get the same MetaGraphDef.
      new_meta_graph_def, _ = meta_graph.export_scoped_meta_graph(
          export_scope="import")
      test_util.assert_meta_graph_protos_equal(
          self, meta_graph_def, new_meta_graph_def)

      # Make sure we can still build gradients and get the same result.

      def new_name(tensor_name):
        base_tensor_name = tensor_name.replace("export/", "")
        return "import/" + base_tensor_name

      var = ops.get_default_graph().get_tensor_by_name(new_name(var_name))
      output = ops.get_default_graph().get_tensor_by_name(new_name(output_name))
      grad = gradients_impl.gradients([output], [var])

      init_op = variables.global_variables_initializer()

      with session.Session() as sess:
        self.evaluate(init_op)
        actual_grad_value = self.evaluate(grad)
        self.assertEqual(expected_grad_value, actual_grad_value)
  def _testScopedImportWithQueue(self, test_dir, exported_filename,
                                 new_exported_filename):
    graph = tf.Graph()
    meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filename),
        graph=graph,
        import_scope="new_queue1")
    graph.as_graph_element("new_queue1/dequeue:0")
    graph.as_graph_element("new_queue1/close")
    with graph.as_default():
      new_meta_graph, _ = meta_graph.export_scoped_meta_graph(
          filename=os.path.join(test_dir, new_exported_filename),
          graph=graph, export_scope="new_queue1")

    return new_meta_graph
示例#9
0
  def _testExportImportAcrossScopes(self, graph_fn, use_resource):
    """Tests export and importing a graph across scopes.

    Args:
      graph_fn: A closure that creates a graph on the current scope.
      use_resource: A bool indicating whether or not to use ResourceVariables.
    """
    with ops.Graph().as_default() as original_graph:
      with variable_scope.variable_scope("dropA/dropB/keepA"):
        graph_fn(use_resource=use_resource)
    exported_meta_graph_def = meta_graph.export_scoped_meta_graph(
        graph=original_graph,
        export_scope="dropA/dropB")[0]

    with ops.Graph().as_default() as imported_graph:
      meta_graph.import_scoped_meta_graph(
          exported_meta_graph_def,
          import_scope="importA")

    with ops.Graph().as_default() as expected_graph:
      with variable_scope.variable_scope("importA/keepA"):
        graph_fn(use_resource=use_resource)

      if use_resource:
        # Bringing in a collection that contains ResourceVariables adds ops
        # to the graph, so mimic the same behavior.
        for collection_key in sorted([
            ops.GraphKeys.GLOBAL_VARIABLES,
            ops.GraphKeys.TRAINABLE_VARIABLES,
        ]):
          for var in expected_graph.get_collection(collection_key):
            var._read_variable_op()

    result = meta_graph.export_scoped_meta_graph(graph=imported_graph)[0]
    expected = meta_graph.export_scoped_meta_graph(graph=expected_graph)[0]

    if use_resource:
      # Clear all shared_name attributes before comparing, since they are
      # supposed to be orthogonal to scopes.
      for meta_graph_def in [result, expected]:
        for node in meta_graph_def.graph_def.node:
          shared_name_attr = "shared_name"
          shared_name_value = node.attr.get(shared_name_attr, None)
          if shared_name_value and shared_name_value.HasField("s"):
            if shared_name_value.s:
              node.attr[shared_name_attr].s = b""

    self.assertProtoEquals(expected, result)
示例#10
0
 def testImportsUsingSameScopeName(self):
   with ops.Graph().as_default():
     variables.Variable(0, name="v")
     meta_graph_def, _ = meta_graph.export_scoped_meta_graph()
   with ops.Graph().as_default():
     for suffix in ["", "_1"]:
       imported_variables = meta_graph.import_scoped_meta_graph(
           meta_graph_def, import_scope="s")
       self.assertEqual(len(imported_variables), 1)
       self.assertEqual(list(imported_variables.keys())[0], "v:0")
       self.assertEqual(list(imported_variables.values())[0].name,
                        "s" + suffix + "/v:0")
示例#11
0
  def testVariableObjectsAreSharedAmongCollections(self):
    with ops.Graph().as_default() as graph1:
      v = variables.Variable(3.0)
      # A single instance of Variable is shared among the collections:
      global_vars = graph1.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
      trainable_vars = graph1.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
      self.assertEqual(len(global_vars), 1)
      self.assertEqual(len(trainable_vars), 1)
      self.assertIs(global_vars[0], trainable_vars[0])
      self.assertIs(v, global_vars[0])

    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(graph=graph1)
    del graph1  # To avoid accidental references in code involving graph2.

    with ops.Graph().as_default() as graph2:
      meta_graph.import_scoped_meta_graph(orig_meta_graph)
      global_vars = graph2.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
      trainable_vars = graph2.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)
      self.assertEqual(len(global_vars), 1)
      self.assertEqual(len(trainable_vars), 1)
      # A single instance of Variable is shared among the collections:
      self.assertIs(global_vars[0], trainable_vars[0])
示例#12
0
  def _testExportImportAcrossScopes(self, graph_fn, use_resource):
    """Tests export and importing a graph across scopes.

    Args:
      graph_fn: A closure that creates a graph on the current scope.
      use_resource: A bool indicating whether or not to use ResourceVariables.
    """
    with ops.Graph().as_default() as original_graph:
      with variable_scope.variable_scope("dropA/dropB/keepA"):
        graph_fn(use_resource=use_resource)
    exported_meta_graph_def = meta_graph.export_scoped_meta_graph(
        graph=original_graph,
        export_scope="dropA/dropB")[0]

    with ops.Graph().as_default() as imported_graph:
      meta_graph.import_scoped_meta_graph(
          exported_meta_graph_def,
          import_scope="importA")

    with ops.Graph().as_default() as expected_graph:
      with variable_scope.variable_scope("importA/keepA"):
        graph_fn(use_resource=use_resource)

    result = meta_graph.export_scoped_meta_graph(graph=imported_graph)[0]
    expected = meta_graph.export_scoped_meta_graph(graph=expected_graph)[0]

    if use_resource:
      # Clear all shared_name attributes before comparing, since they are
      # orthogonal to scopes and are not updated on export/import.
      for meta_graph_def in [result, expected]:
        for node in meta_graph_def.graph_def.node:
          shared_name_attr = "shared_name"
          shared_name_value = node.attr.get(shared_name_attr, None)
          if shared_name_value and shared_name_value.HasField("s"):
            if shared_name_value.s:
              node.attr[shared_name_attr].s = b""

    test_util.assert_meta_graph_protos_equal(self, expected, result)
示例#13
0
    def testScopedImportUnderNameScopeNoVarScope(self):
        graph = ops.Graph()
        with graph.as_default():
            variables.Variable(initial_value=1.0, trainable=True, name="myvar")
        meta_graph_def, _ = meta_graph.export_scoped_meta_graph(graph=graph)

        graph = ops.Graph()
        with graph.as_default():
            with ops.name_scope("foo"):
                imported_variables = meta_graph.import_scoped_meta_graph(
                    meta_graph_def)
                self.assertEqual(len(imported_variables), 1)
                self.assertEqual(
                    list(imported_variables.values())[0].name, "foo/myvar:0")
示例#14
0
    def testMetricVariablesCollectionLoadsBytesList(self):
        with ops.Graph().as_default() as graph1:
            v1 = variables.Variable([1, 2, 3],
                                    shape=[3],
                                    dtype=dtypes.float64,
                                    name="v")

        orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(graph=graph1)

        # Copy bytes list from global variables collection to metric variables.
        orig_meta_graph.collection_def[
            ops.GraphKeys.METRIC_VARIABLES].CopyFrom(
                orig_meta_graph.collection_def["variables"])

        with ops.Graph().as_default() as graph2:
            meta_graph.import_scoped_meta_graph(orig_meta_graph)
            var_list = graph2.get_collection(ops.GraphKeys.METRIC_VARIABLES)
            self.assertEqual(len(var_list), 1)
            v2 = var_list[0]
            self.assertIsInstance(v2, variables.Variable)
            self.assertEqual(v1.name, v2.name)
            self.assertEqual(v1.dtype, v2.dtype)
            self.assertEqual(v1.shape, v2.shape)
示例#15
0
  def testScopedImportUnderNameScope(self):
    graph = ops.Graph()
    with graph.as_default():
      variables.Variable(initial_value=1.0, trainable=True, name="myvar")
    meta_graph_def, _ = meta_graph.export_scoped_meta_graph(graph=graph)

    graph = ops.Graph()
    with graph.as_default():
      with ops.name_scope("foo"):
        imported_variables = meta_graph.import_scoped_meta_graph(
            meta_graph_def, import_scope="bar")
        self.assertEqual(len(imported_variables), 1)
        self.assertEqual(list(imported_variables.values())[0].name,
                         "foo/bar/myvar:0")
示例#16
0
    def testExportNestedNames(self):
        graph1 = tf.Graph()
        with graph1.as_default():
            with tf.name_scope("hidden1/hidden2/hidden3"):
                images = tf.constant(1.0,
                                     tf.float32,
                                     shape=[3, 2],
                                     name="images")
                weights1 = tf.Variable([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
                                       name="weights")
                biases1 = tf.Variable([0.1] * 3, name="biases")
                tf.nn.relu(tf.matmul(images, weights1) + biases1, name="relu")

        orig_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
            export_scope="hidden1/hidden2", graph=graph1)
        var_names = [v.name for _, v in var_list.items()]
        self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                         sorted(var_list.keys()))
        self.assertEqual([
            "hidden1/hidden2/hidden3/biases:0",
            "hidden1/hidden2/hidden3/weights:0"
        ], sorted(var_names))
        for node in orig_meta_graph.graph_def.node:
            self.assertTrue(node.name.startswith("hidden3"))

        graph2 = tf.Graph()
        new_var_list = meta_graph.import_scoped_meta_graph(
            orig_meta_graph,
            import_scope="new_hidden1/new_hidden2",
            graph=graph2)
        self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                         sorted(new_var_list.keys()))
        new_var_names = [v.name for _, v in new_var_list.items()]
        self.assertEqual([
            "new_hidden1/new_hidden2/hidden3/biases:0",
            "new_hidden1/new_hidden2/hidden3/weights:0"
        ], sorted(new_var_names))

        nodes = [
            "new_hidden1/new_hidden2/hidden3/biases/Assign",
            "new_hidden1/new_hidden2/hidden3/weights/Assign"
        ]
        expected = [
            b"loc:@new_hidden1/new_hidden2/hidden3/biases",
            b"loc:@new_hidden1/new_hidden2/hidden3/weights"
        ]
        for n, e in zip(nodes, expected):
            self.assertEqual(
                [e],
                graph2.get_operation_by_name(n).get_attr("_class"))
示例#17
0
  def testPotentialCycle(self):
    graph1 = tf.Graph()
    with graph1.as_default():
      a = tf.constant(1.0, shape=[2, 2])
      b = tf.constant(2.0, shape=[2, 2])
      matmul = tf.matmul(a, b)
      with tf.name_scope("hidden1"):
        c = tf.nn.relu(matmul)
        d = tf.constant(3.0, shape=[2, 2])
        matmul = tf.matmul(c, d)

    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
        export_scope="hidden1", graph=graph1)

    graph2 = tf.Graph()
    with graph2.as_default():
      with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
        meta_graph.import_scoped_meta_graph(
            orig_meta_graph, import_scope="new_hidden1")

      meta_graph.import_scoped_meta_graph(
          orig_meta_graph, import_scope="new_hidden1",
          input_map={"$unbound_inputs_MatMul": tf.constant(4.0, shape=[2, 2])})
示例#18
0
    def testClearDevices(self):
        graph1 = ops.Graph()
        with graph1.as_default():
            with ops.device("/device:CPU:0"):
                a = variables.Variable(constant_op.constant(1.0, shape=[2, 2]),
                                       name="a")
            with ops.device("/job:ps/replica:0/task:0/device:GPU:0"):
                b = variables.Variable(constant_op.constant(2.0, shape=[2, 2]),
                                       name="b")
            with ops.device("/job:localhost/replica:0/task:0/cpu:0"):
                math_ops.matmul(a, b, name="matmul")

        self.assertEqual("/device:CPU:0",
                         str(graph1.as_graph_element("a").device))
        self.assertEqual("/job:ps/replica:0/task:0/device:GPU:0",
                         str(graph1.as_graph_element("b").device))
        self.assertEqual("/job:localhost/replica:0/task:0/device:CPU:0",
                         str(graph1.as_graph_element("matmul").device))

        # Verifies that devices are cleared on export.
        orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
            graph=graph1, clear_devices=True)

        graph2 = ops.Graph()
        with graph2.as_default():
            meta_graph.import_scoped_meta_graph(orig_meta_graph,
                                                clear_devices=False)

        self.assertEqual("", str(graph2.as_graph_element("a").device))
        self.assertEqual("", str(graph2.as_graph_element("b").device))
        self.assertEqual("", str(graph2.as_graph_element("matmul").device))

        # Verifies that devices are cleared on export when passing in graph_def.
        orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
            graph_def=graph1.as_graph_def(), clear_devices=True)

        graph2 = ops.Graph()
        with graph2.as_default():
            meta_graph.import_scoped_meta_graph(orig_meta_graph,
                                                clear_devices=False)

        self.assertEqual("", str(graph2.as_graph_element("a").device))
        self.assertEqual("", str(graph2.as_graph_element("b").device))
        self.assertEqual("", str(graph2.as_graph_element("matmul").device))

        # Verifies that devices are cleared on import.
        orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
            graph=graph1, clear_devices=False)

        graph2 = ops.Graph()
        with graph2.as_default():
            meta_graph.import_scoped_meta_graph(orig_meta_graph,
                                                clear_devices=True)

        self.assertEqual("", str(graph2.as_graph_element("a").device))
        self.assertEqual("", str(graph2.as_graph_element("b").device))
        self.assertEqual("", str(graph2.as_graph_element("matmul").device))
示例#19
0
def import_and_run_meta_graph(
    meta_graph_def,
    result_pattern,
    feed_dict_fn,
    log_dir_fn,
    finish_session_fn=None):
  # TODO(adamb) Carefully find the asset map and replace any asset py funcs appropriately with input_map.
  with create_session() as sess:
    try:
      meta_graph.import_scoped_meta_graph(
        meta_graph_def,
        input_map=None,
      )
    except KeyError as e:
      nodes = [n.name for n in tf.get_default_graph().as_graph_def().node]
      nodes.sort()
      eprint('error, but got nodes', nodes)
      raise e

    # NOTE(adamb) Could also store files to copy out in assets_collection
    js_py_func_data_tensor = None
    try:
      js_py_func_data_tensor = sess.graph.get_tensor_by_name("py_funcs_json:0")
    except KeyError as e:
      pass

    if js_py_func_data_tensor is not None:
      js_py_func_data = js_py_func_data_tensor.eval().decode('utf-8')
      py_func_data = json.loads(js_py_func_data)
      # eprint('loaded py_func_data', py_func_data)
      py_importer = graph_ffi.PythonImporter()
      py_importer.restore_py_funcs(script_ops._py_funcs, py_func_data)

    try:
      return run_session(sess, result_pattern, feed_dict_fn(), log_dir_fn, finish_session_fn=finish_session_fn)
    finally:
      sess.close()
示例#20
0
  def doTestExportNestedNames(self, use_resource=False):
    graph1 = ops.Graph()
    with graph1.as_default():
      with ops.name_scope("hidden1/hidden2/hidden3"):
        images = constant_op.constant(
            1.0, dtypes.float32, shape=[3, 2], name="images")
        if use_resource:
          weights1 = variables.Variable(
              [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], name="weights")
          biases1 = resource_variable_ops.ResourceVariable(
              [0.1] * 3, name="biases")
        else:
          biases1 = variables.Variable([0.1] * 3, name="biases")
          weights1 = variables.Variable(
              [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], name="weights")
        nn_ops.relu(math_ops.matmul(images, weights1) + biases1, name="relu")

    orig_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
        export_scope="hidden1/hidden2", graph=graph1)
    var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                     sorted(var_list.keys()))
    self.assertEqual([
        "hidden1/hidden2/hidden3/biases:0", "hidden1/hidden2/hidden3/weights:0"
    ], sorted(var_names))
    for node in orig_meta_graph.graph_def.node:
      self.assertTrue(node.name.startswith("hidden3"))

    graph2 = ops.Graph()
    new_var_list = meta_graph.import_scoped_meta_graph(
        orig_meta_graph, import_scope="new_hidden1/new_hidden2", graph=graph2)
    self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                     sorted(new_var_list.keys()))
    new_var_names = [v.name for _, v in new_var_list.items()]
    self.assertEqual([
        "new_hidden1/new_hidden2/hidden3/biases:0",
        "new_hidden1/new_hidden2/hidden3/weights:0"
    ], sorted(new_var_names))

    nodes = [
        "new_hidden1/new_hidden2/hidden3/biases/Assign",
        "new_hidden1/new_hidden2/hidden3/weights/Assign"
    ]
    expected = [
        b"loc:@new_hidden1/new_hidden2/hidden3/biases",
        b"loc:@new_hidden1/new_hidden2/hidden3/weights"
    ]
    for n, e in zip(nodes, expected):
      self.assertEqual([e], graph2.get_operation_by_name(n).get_attr("_class"))
示例#21
0
    def testVariableObjectsAreSharedAmongCollections(self):
        with ops.Graph().as_default() as graph1:
            v = variables.Variable(3.0)
            # A single instance of Variable is shared among the collections:
            global_vars = graph1.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
            trainable_vars = graph1.get_collection(
                ops.GraphKeys.TRAINABLE_VARIABLES)
            self.assertEqual(len(global_vars), 1)
            self.assertEqual(len(trainable_vars), 1)
            self.assertIs(global_vars[0], trainable_vars[0])
            self.assertIs(v, global_vars[0])

        orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(graph=graph1)
        del graph1  # To avoid accidental references in code involving graph2.

        with ops.Graph().as_default() as graph2:
            meta_graph.import_scoped_meta_graph(orig_meta_graph)
            global_vars = graph2.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
            trainable_vars = graph2.get_collection(
                ops.GraphKeys.TRAINABLE_VARIABLES)
            self.assertEqual(len(global_vars), 1)
            self.assertEqual(len(trainable_vars), 1)
            # A single instance of Variable is shared among the collections:
            self.assertIs(global_vars[0], trainable_vars[0])
    def _test_import(include_collection_keys, omit_collection_keys):
      assert set(include_collection_keys).isdisjoint(omit_collection_keys)
      newgraph = ops.Graph()
      import_scope = "some_scope_name"

      def _restore_collections_predicate(collection_key):
        return (collection_key in include_collection_keys and
                collection_key not in omit_collection_keys)

      meta_graph.import_scoped_meta_graph(
          meta_graph_filename,
          graph=newgraph,
          import_scope=import_scope,
          restore_collections_predicate=_restore_collections_predicate)
      collection_values = [
          newgraph.get_collection(name=key, scope=import_scope)
          for key in include_collection_keys
      ]
      self.assertTrue(all(collection_values))
      collection_values = [
          newgraph.get_collection(name=key, scope=import_scope)
          for key in omit_collection_keys
      ]
      self.assertFalse(any(collection_values))
示例#23
0
    def _testExportImportAcrossScopes(self, graph_fn, use_resource):
        """Tests export and importing a graph across scopes.

    Args:
      graph_fn: A closure that creates a graph on the current scope.
      use_resource: A bool indicating whether or not to use ResourceVariables.
    """
        with ops.Graph().as_default() as original_graph:
            with variable_scope.variable_scope("dropA/dropB/keepA"):
                graph_fn(use_resource=use_resource)
        exported_meta_graph_def = meta_graph.export_scoped_meta_graph(
            graph=original_graph, export_scope="dropA/dropB")[0]

        with ops.Graph().as_default() as imported_graph:
            meta_graph.import_scoped_meta_graph(exported_meta_graph_def,
                                                import_scope="importA")

        with ops.Graph().as_default() as expected_graph:
            with variable_scope.variable_scope("importA/keepA"):
                graph_fn(use_resource=use_resource)

        result = meta_graph.export_scoped_meta_graph(graph=imported_graph)[0]
        expected = meta_graph.export_scoped_meta_graph(graph=expected_graph)[0]

        if use_resource:
            # Clear all shared_name attributes before comparing, since they are
            # orthogonal to scopes and are not updated on export/import.
            for meta_graph_def in [result, expected]:
                for node in meta_graph_def.graph_def.node:
                    shared_name_attr = "shared_name"
                    shared_name_value = node.attr.get(shared_name_attr, None)
                    if shared_name_value and shared_name_value.HasField("s"):
                        if shared_name_value.s:
                            node.attr[shared_name_attr].s = b""

        test_util.assert_meta_graph_protos_equal(self, expected, result)
示例#24
0
    def _test_import(include_collection_keys, omit_collection_keys):
      assert set(include_collection_keys).isdisjoint(omit_collection_keys)
      newgraph = ops.Graph()
      import_scope = "some_scope_name"

      def _restore_collections_predicate(collection_key):
        return (collection_key in include_collection_keys and
                collection_key not in omit_collection_keys)

      meta_graph.import_scoped_meta_graph(
          meta_graph_filename,
          graph=newgraph,
          import_scope=import_scope,
          restore_collections_predicate=_restore_collections_predicate)
      collection_values = [
          newgraph.get_collection(name=key, scope=import_scope)
          for key in include_collection_keys
      ]
      self.assertTrue(all(collection_values))
      collection_values = [
          newgraph.get_collection(name=key, scope=import_scope)
          for key in omit_collection_keys
      ]
      self.assertFalse(any(collection_values))
示例#25
0
  def testClearDevices(self):
    graph1 = ops.Graph()
    with graph1.as_default():
      with ops.device("/device:CPU:0"):
        a = variables.Variable(
            constant_op.constant(
                1.0, shape=[2, 2]), name="a")
      with ops.device("/job:ps/replica:0/task:0/gpu:0"):
        b = variables.Variable(
            constant_op.constant(
                2.0, shape=[2, 2]), name="b")
      with ops.device("/job:localhost/replica:0/task:0/cpu:0"):
        math_ops.matmul(a, b, name="matmul")

    self.assertEqual("/device:CPU:0", str(graph1.as_graph_element("a").device))
    self.assertEqual("/job:ps/replica:0/task:0/device:GPU:0",
                     str(graph1.as_graph_element("b").device))
    self.assertEqual("/job:localhost/replica:0/task:0/device:CPU:0",
                     str(graph1.as_graph_element("matmul").device))

    # Verifies that devices are cleared on export.
    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
        graph=graph1, clear_devices=True)

    graph2 = ops.Graph()
    with graph2.as_default():
      meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=False)

    self.assertEqual("", str(graph2.as_graph_element("a").device))
    self.assertEqual("", str(graph2.as_graph_element("b").device))
    self.assertEqual("", str(graph2.as_graph_element("matmul").device))

    # Verifies that devices are cleared on export when passing in graph_def.
    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
        graph_def=graph1.as_graph_def(), clear_devices=True)

    graph2 = ops.Graph()
    with graph2.as_default():
      meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=False)

    self.assertEqual("", str(graph2.as_graph_element("a").device))
    self.assertEqual("", str(graph2.as_graph_element("b").device))
    self.assertEqual("", str(graph2.as_graph_element("matmul").device))

    # Verifies that devices are cleared on import.
    orig_meta_graph, _ = meta_graph.export_scoped_meta_graph(
        graph=graph1, clear_devices=False)

    graph2 = ops.Graph()
    with graph2.as_default():
      meta_graph.import_scoped_meta_graph(orig_meta_graph, clear_devices=True)

    self.assertEqual("", str(graph2.as_graph_element("a").device))
    self.assertEqual("", str(graph2.as_graph_element("b").device))
    self.assertEqual("", str(graph2.as_graph_element("matmul").device))
示例#26
0
  def doTestExportNestedNames(self, use_resource=False):
    graph1 = ops.Graph()
    with graph1.as_default():
      with ops.name_scope("hidden1/hidden2/hidden3"):
        images = constant_op.constant(
            1.0, dtypes.float32, shape=[3, 2], name="images")
        if use_resource:
          weights1 = variables.Variable(
              [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], name="weights")
          biases1 = resource_variable_ops.ResourceVariable(
              [0.1] * 3, name="biases")
        else:
          biases1 = variables.Variable([0.1] * 3, name="biases")
          weights1 = variables.Variable(
              [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], name="weights")
        nn_ops.relu(math_ops.matmul(images, weights1) + biases1, name="relu")

    orig_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
        export_scope="hidden1/hidden2", graph=graph1)
    var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                     sorted(var_list.keys()))
    self.assertEqual([
        "hidden1/hidden2/hidden3/biases:0", "hidden1/hidden2/hidden3/weights:0"
    ], sorted(var_names))
    for node in orig_meta_graph.graph_def.node:
      self.assertTrue(node.name.startswith("hidden3"))

    graph2 = ops.Graph()
    new_var_list = meta_graph.import_scoped_meta_graph(
        orig_meta_graph, import_scope="new_hidden1/new_hidden2", graph=graph2)
    self.assertEqual(["hidden3/biases:0", "hidden3/weights:0"],
                     sorted(new_var_list.keys()))
    new_var_names = [v.name for _, v in new_var_list.items()]
    self.assertEqual([
        "new_hidden1/new_hidden2/hidden3/biases:0",
        "new_hidden1/new_hidden2/hidden3/weights:0"
    ], sorted(new_var_names))

    nodes = [
        "new_hidden1/new_hidden2/hidden3/biases/Assign",
        "new_hidden1/new_hidden2/hidden3/weights/Assign"
    ]
    expected = [
        b"loc:@new_hidden1/new_hidden2/hidden3/biases",
        b"loc:@new_hidden1/new_hidden2/hidden3/weights"
    ]
示例#27
0
        def __build_pipeline(model_name_scope=None,
                             pipeline_params=None,
                             raw_imgs_placeholder=None):
            graph_model = tf.Graph()
            with graph_model.as_default():
                ##
                with tf.Session(graph=graph_model) as sess:
                    saver = tf.train.import_meta_graph(
                        pipeline_params["checkpoint_file_path"],
                        clear_devices=True,
                        import_scope='CNN_model')
                    saver.restore(
                        sess,
                        tf.train.latest_checkpoint(
                            pipeline_params["checkpoint_path"]))

                    # vars = [v.name.split(":")[0] for v in tf.trainable_variables()]
                    #
                    # tf.graph_util.convert_variables_to_constants(
                    #     sess,
                    #     tf.get_default_graph().as_graph_def(),
                    #     vars)

                X_image_tf = graph_model.get_tensor_by_name(
                    "CNN_model/X_image_tf:0")
                logits_tf = graph_model.get_tensor_by_name(
                    "CNN_model/logits_tf:0")
                # logits_tf_sg = tf.stop_gradient(logits_tf)
                ##

            graph_pipeline = tf.Graph()
            with graph_pipeline.as_default():
                ##
                X_raw = tf.placeholder(tf.float32,
                                       shape=[None, None, None, None],
                                       name="X_raw")
                meta_graph.import_scoped_meta_graph(
                    pipeline_params["checkpoint_file_path"],
                    clear_devices=True,
                    import_scope='img_size_info')

                X_image_tf = graph_pipeline.get_tensor_by_name(
                    "img_size_info/X_image_tf:0")

                resized_imgs = tf.identity(tf.image.resize_images(
                    X_raw, (X_image_tf.get_shape().as_list()[1],
                            X_image_tf.get_shape().as_list()[2])),
                                           name='resized_imgs')
                ##

            graph = tf.get_default_graph()

            raw_imgs = raw_imgs_placeholder

            meta_graph_1 = tf.train.export_meta_graph(graph=graph_pipeline)
            meta_graph.import_scoped_meta_graph(meta_graph_1,
                                                input_map={"X_raw": raw_imgs},
                                                import_scope=model_name_scope +
                                                '_img_pipeline')

            out_1 = graph.get_tensor_by_name(model_name_scope +
                                             '_img_pipeline' +
                                             '/resized_imgs:0')

            meta_graph_2 = tf.train.export_meta_graph(graph=graph_model)
            meta_graph.import_scoped_meta_graph(
                meta_graph_2,
                input_map={"CNN_model/X_image_tf": out_1},
                import_scope=model_name_scope + '_CNN')

            out_2 = graph.get_tensor_by_name(model_name_scope + '_CNN' +
                                             '/CNN_model/logits_tf:0')

            return out_2
示例#28
0
  def _testScopedImport(self, test_dir, exported_filename,
                        new_exported_filename, ckpt_filename):
    graph = tf.Graph()
    # Create all the missing inputs.
    with graph.as_default():
      new_image = tf.constant(1.2, tf.float32, shape=[100, 28],
                              name="images")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filename), graph=graph,
          import_scope="new_hidden1")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filename), graph=graph,
          input_map={"image:0": new_image},
          import_scope="new_hidden1")

    var_list = meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filename), graph=graph,
        input_map={"$unbound_inputs_images": new_image},
        import_scope="new_hidden1")
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
    new_var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["new_hidden1/biases:0", "new_hidden1/weights:0"],
                     sorted(new_var_names))
    hidden1 = graph.as_graph_element("new_hidden1/Relu:0")

    with graph.as_default():
      # Hidden 2
      with tf.name_scope("hidden2"):
        weights = tf.Variable(
            tf.truncated_normal([128, 32],
                                stddev=1.0 / math.sqrt(float(128))),
            name="weights")
        # The use of control_flow_ops.while_loop here is purely for adding test
        # coverage the save and restore of control flow context (which doesn't
        # make any sense here from a machine learning perspective).  The typical
        # biases is a simple Variable without the conditions.
        def loop_cond(it, _):
          return it < 2
        def loop_body(it, biases):
          biases += tf.constant(0.1, shape=[32])
          return it + 1, biases
        _, biases = control_flow_ops.while_loop(
            loop_cond, loop_body,
            [tf.constant(0), tf.Variable(tf.zeros([32]))])
        hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
      # Linear
      with tf.name_scope("softmax_linear"):
        weights = tf.Variable(
            tf.truncated_normal([32, 10],
                                stddev=1.0 / math.sqrt(float(32))),
            name="weights")
        biases = tf.Variable(tf.zeros([10]), name="biases")
        logits = tf.matmul(hidden2, weights) + biases
        tf.add_to_collection("logits", logits)

      new_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
          filename=os.path.join(test_dir, new_exported_filename),
          graph=graph, export_scope="new_hidden1")
      self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

    return new_meta_graph
示例#29
0
def model_fn(features, labels, mode, params):
    graph_ensemble = tf.Graph()
    with tf.Session(graph=graph_ensemble) as sess:
        meta_graph_path = tf.gfile.Glob(
            os.path.join(params["ensemble_architecture_path"], '*.meta'))[0]
        loader = tf.train.import_meta_graph(meta_graph_path,
                                            clear_devices=True)
        loader.restore(
            sess,
            tf.train.latest_checkpoint(params["ensemble_architecture_path"]))

    graph = tf.get_default_graph()

    meta_graph_1 = tf.train.export_meta_graph(graph=graph_ensemble)
    meta_graph.import_scoped_meta_graph(
        meta_graph_1,
        input_map={"raw_imgs": features['img']},
        import_scope='main_graph')

    logits = graph.get_tensor_by_name('main_graph/logits_tf:0')

    predicted_classes = tf.argmax(logits, 1)

    category_map = tf.convert_to_tensor(params["category_map"])

    ##
    class_label = tf.gather_nd(category_map, predicted_classes)
    class_label = tf.convert_to_tensor([class_label], dtype=tf.string)

    ##
    if mode == tf.estimator.ModeKeys.PREDICT:
        predictions = {
            'class_ids': predicted_classes[:, tf.newaxis],
            'probabilities': tf.nn.softmax(logits),
            'logits': logits,
            'class_label': class_label[:, tf.newaxis],
            # 'category_map': tf.convert_to_tensor([str(params["category_map"])])[:, tf.newaxis],
        }
        return tf.estimator.EstimatorSpec(mode, predictions=predictions)

    cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
        labels=labels, logits=logits, name='cross_entropy')
    loss = tf.reduce_mean(cross_entropy, name='cost_fc')

    accuracy = tf.metrics.accuracy(labels=tf.argmax(labels, 1),
                                   predictions=predicted_classes,
                                   name='acc_op')
    """confusion matrix"""
    batch_confusion = tf.confusion_matrix(tf.argmax(labels, 1),
                                          predicted_classes,
                                          num_classes=params['n_output'],
                                          name='batch_confusion')

    plot_buf = tf.py_func(gen_plot, [batch_confusion, category_map], tf.string)

    # Convert PNG buffer to TF image
    cm_image = tf.image.decode_png(plot_buf, channels=4)

    # Add the batch dimension
    cm_image = tf.expand_dims(cm_image, 0)
    """"""

    tf.summary.scalar('accuracy', accuracy[1])
    tf.summary.image('confusion_matrix', cm_image)

    metrics = {'accuracy': accuracy}

    if mode == tf.estimator.ModeKeys.EVAL:
        return tf.estimator.EstimatorSpec(mode,
                                          loss=loss,
                                          eval_metric_ops=metrics)

    assert mode == tf.estimator.ModeKeys.TRAIN

    if params['retrain_primary_models'] != True:
        trainable_variables = [
            v for v in tf.trainable_variables() if 'ensemble' in v.name
        ]
    else:
        trainable_variables = [v for v in tf.trainable_variables()]

    optimizer = tf.train.AdamOptimizer(learning_rate=params['learning_rate'],
                                       name='adam_fc')
    train_op = optimizer.minimize(
        loss,
        var_list=trainable_variables,
        global_step=tf.train.get_or_create_global_step())
    return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)
示例#30
0
  def _testScopedImport(self, test_dir, exported_filenames):
    graph = ops.Graph()
    # Create all the missing inputs.
    with graph.as_default():
      new_image = constant_op.constant(
          1.2, dtypes.float32, shape=[100, 28], name="images")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filenames[0]),
          graph=graph,
          import_scope="new_hidden1")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filenames[0]),
          graph=graph,
          input_map={"image:0": new_image},
          import_scope="new_hidden1")

    # Verifies we can import the original "hidden1" into "new_hidden1".
    var_list = meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filenames[0]),
        graph=graph,
        input_map={"$unbound_inputs_images": new_image},
        import_scope="new_hidden1")

    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
    new_var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["new_hidden1/biases:0", "new_hidden1/weights:0"],
                     sorted(new_var_names))

    # Verifies we can import the original "hidden2" into "new_hidden2".
    hidden1 = array_ops.identity(
        graph.as_graph_element("new_hidden1/Relu:0"), name="hidden1/Relu")
    var_list = meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filenames[1]),
        graph=graph,
        input_map={"$unbound_inputs_hidden1/Relu": hidden1},
        import_scope="new_hidden2",
        unbound_inputs_col_name=None)

    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
    new_var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["new_hidden2/biases:0", "new_hidden2/weights:0"],
                     sorted(new_var_names))

    # Verifies we can import the original "softmax_linear" into
    # "new_softmax_linear".
    hidden2 = array_ops.identity(
        graph.as_graph_element("new_hidden2/Relu:0"), name="hidden2/Relu")
    var_list = meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filenames[2]),
        graph=graph,
        input_map={"$unbound_inputs_hidden2/Relu": hidden2},
        import_scope="new_softmax_linear",
        unbound_inputs_col_name=None)
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
    new_var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(
        ["new_softmax_linear/biases:0", "new_softmax_linear/weights:0"],
        sorted(new_var_names))

    # Exports the scoped meta graphs again.
    new_meta_graph1, var_list = meta_graph.export_scoped_meta_graph(
        graph=graph, export_scope="new_hidden1")
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

    new_meta_graph2, var_list = meta_graph.export_scoped_meta_graph(
        graph=graph, export_scope="new_hidden2", unbound_inputs_col_name=None)
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

    new_meta_graph3, var_list = meta_graph.export_scoped_meta_graph(
        graph=graph,
        export_scope="new_softmax_linear",
        unbound_inputs_col_name=None)
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

    return [new_meta_graph1, new_meta_graph2, new_meta_graph3]
示例#31
0
  def _testScopedImport(self, test_dir, exported_filename,
                        new_exported_filename, ckpt_filename):
    graph = tf.Graph()
    # Create all the missing inputs.
    with graph.as_default():
      new_image = tf.constant(1.2, tf.float32, shape=[100, 28],
                              name="images")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filename), graph=graph,
          import_scope="new_hidden1")

    with self.assertRaisesRegexp(ValueError, "Graph contains unbound inputs"):
      meta_graph.import_scoped_meta_graph(
          os.path.join(test_dir, exported_filename), graph=graph,
          input_map={"image:0": new_image},
          import_scope="new_hidden1")

    var_list = meta_graph.import_scoped_meta_graph(
        os.path.join(test_dir, exported_filename), graph=graph,
        input_map={"$unbound_inputs_images": new_image},
        import_scope="new_hidden1")
    self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
    new_var_names = [v.name for _, v in var_list.items()]
    self.assertEqual(["new_hidden1/biases:0", "new_hidden1/weights:0"],
                     sorted(new_var_names))
    hidden1 = graph.as_graph_element("new_hidden1/Relu:0")

    with graph.as_default():
      # Hidden 2
      with tf.name_scope("hidden2"):
        weights = tf.Variable(
            tf.truncated_normal([128, 32],
                                stddev=1.0 / math.sqrt(float(128))),
            name="weights")
        # The use of control_flow_ops.while_loop here is purely for adding test
        # coverage the save and restore of control flow context (which doesn't
        # make any sense here from a machine learning perspective).  The typical
        # biases is a simple Variable without the conditions.
        def loop_cond(it, _):
          return it < 2
        def loop_body(it, biases):
          biases += tf.constant(0.1, shape=[32])
          return it + 1, biases
        _, biases = control_flow_ops.while_loop(
            loop_cond, loop_body,
            [tf.constant(0), tf.Variable(tf.zeros([32]))])
        hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
      # Linear
      with tf.name_scope("softmax_linear"):
        weights = tf.Variable(
            tf.truncated_normal([32, 10],
                                stddev=1.0 / math.sqrt(float(32))),
            name="weights")
        biases = tf.Variable(tf.zeros([10]), name="biases")
        logits = tf.matmul(hidden2, weights) + biases
        tf.add_to_collection("logits", logits)

      new_meta_graph, var_list = meta_graph.export_scoped_meta_graph(
          filename=os.path.join(test_dir, new_exported_filename),
          graph=graph, export_scope="new_hidden1")
      self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

    return new_meta_graph
示例#32
0
    v1 = tf.Variable(1, name='v1')
    output1 = tf.identity(inputs1, name='output1')
    print("G1:", tf.global_variables())

with tf.Graph().as_default() as graph2:
    inputs2 = tf.placeholder(tf.float32, (None, 2), name='input2')
    v2 = tf.Variable(1, name='v2')
    output2 = tf.identity(inputs2, name='output2')
    print("G2:", tf.global_variables())

graph = tf.get_default_graph()
x = tf.placeholder(tf.float32, (None, 2), name='input')

meta_graph1 = tf.train.export_meta_graph(graph=graph1)
meta_graph.import_scoped_meta_graph(meta_graph1,
                                    input_map={'input1': x},
                                    import_scope='graph1')
out1 = graph.get_tensor_by_name('graph1/output1:0')

meta_graph2 = tf.train.export_meta_graph(graph=graph2)
meta_graph.import_scoped_meta_graph(meta_graph2,
                                    input_map={'input2': out1},
                                    import_scope='graph2')
out2 = graph.get_tensor_by_name('graph2/output2:0')

print(tf.global_variables())

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())

    tf.summary.FileWriter("tensorboard", sess.graph)
示例#33
0
        def build_finalGraph_and_return_final_tensors(
                cls,
                hidden_units=None,
                n_output=None,
                primary_models_directory=None,
                images_shape=None):
            def _new_weights(shape):
                with tf.name_scope('weights_ensemble'):
                    weights = tf.Variable(
                        tf.truncated_normal(shape, stddev=0.05))
                return weights

            def _new_biases(length):
                with tf.name_scope('biases_ensemble'):
                    biases = tf.Variable(tf.constant(0.05, shape=[length]))
                return biases

            def new_fc_layer(inp,
                             num_inputs,
                             num_outputs,
                             use_relu=True,
                             use_drop_out=True,
                             name_scope=''):
                with tf.name_scope(name_scope):
                    weights = _new_weights(shape=[num_inputs, num_outputs])
                    biases = _new_biases(length=num_outputs)

                    layer = tf.matmul(inp, weights) + biases

                    if use_drop_out:
                        layer = tf.layers.dropout(layer,
                                                  rate=0.05,
                                                  training=use_drop_out)

                    if use_relu:
                        layer = tf.nn.relu(layer)

                return layer

            ##
            raw_imgs, concatenated_features = cls._combine_all_channel(
                models_directory=primary_models_directory,
                images_shape=images_shape)
            ##
            params_fc = {'hidden_units': hidden_units, 'n_output': n_output}
            ##
            graph_fc = tf.Graph()
            with graph_fc.as_default():
                X_tf = tf.placeholder(
                    tf.float32,
                    shape=[
                        None,
                        concatenated_features.get_shape().as_list()[1]
                    ],
                    name='X_tf')

                ##
                layer = None
                for n_layer, n_nodes in enumerate(params_fc['hidden_units']):
                    if n_layer == 0:
                        layer = new_fc_layer(
                            X_tf,
                            num_inputs=X_tf.get_shape().as_list()[1],
                            num_outputs=n_nodes,
                            name_scope='layer_' + str(n_layer + 1))
                    else:
                        layer = new_fc_layer(
                            layer,
                            num_inputs=layer.get_shape().as_list()[1],
                            num_outputs=n_nodes,
                            name_scope='layer_' + str(n_layer + 1))

                logits = new_fc_layer(
                    layer,
                    num_inputs=layer.get_shape().as_list()[1],
                    num_outputs=params_fc['n_output'],
                    use_relu=False,
                    use_drop_out=False,
                    name_scope='output_layer')

                logits_fc = tf.identity(logits, name='logits_tf')

            ##
            graph = tf.get_default_graph()

            meta_graph_3 = tf.train.export_meta_graph(graph=graph_fc)
            meta_graph.import_scoped_meta_graph(
                meta_graph_3,
                input_map={"X_tf": concatenated_features},
                import_scope='')

            logits_fc = graph.get_tensor_by_name('logits_tf:0')
            return raw_imgs, logits_fc
示例#34
0
 def body(i, _):
     meta_graph.import_scoped_meta_graph(meta_graph_def)
     return i + 1, ops.get_default_graph().get_tensor_by_name(
         output_name)
示例#35
0
    def _testScopedImport(self, test_dir, exported_filenames):
        graph = ops.Graph()
        # Create all the missing inputs.
        with graph.as_default():
            new_image = constant_op.constant(1.2,
                                             dtypes.float32,
                                             shape=[100, 28],
                                             name="images")

        with self.assertRaisesRegexp(ValueError,
                                     "Graph contains unbound inputs"):
            meta_graph.import_scoped_meta_graph(os.path.join(
                test_dir, exported_filenames[0]),
                                                graph=graph,
                                                import_scope="new_hidden1")

        with self.assertRaisesRegexp(ValueError,
                                     "Graph contains unbound inputs"):
            meta_graph.import_scoped_meta_graph(
                os.path.join(test_dir, exported_filenames[0]),
                graph=graph,
                input_map={"image:0": new_image},
                import_scope="new_hidden1")

        # Verifies we can import the original "hidden1" into "new_hidden1".
        var_list = meta_graph.import_scoped_meta_graph(
            os.path.join(test_dir, exported_filenames[0]),
            graph=graph,
            input_map={"$unbound_inputs_images": new_image},
            import_scope="new_hidden1")

        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
        new_var_names = [v.name for _, v in var_list.items()]
        self.assertEqual(["new_hidden1/biases:0", "new_hidden1/weights:0"],
                         sorted(new_var_names))

        # Verifies we can import the original "hidden2" into "new_hidden2".
        hidden1 = array_ops.identity(
            graph.as_graph_element("new_hidden1/Relu:0"), name="hidden1/Relu")
        var_list = meta_graph.import_scoped_meta_graph(
            os.path.join(test_dir, exported_filenames[1]),
            graph=graph,
            input_map={"$unbound_inputs_hidden1/Relu": hidden1},
            import_scope="new_hidden2",
            unbound_inputs_col_name=None)

        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
        new_var_names = [v.name for _, v in var_list.items()]
        self.assertEqual(["new_hidden2/biases:0", "new_hidden2/weights:0"],
                         sorted(new_var_names))

        # Verifies we can import the original "softmax_linear" into
        # "new_softmax_linear".
        hidden2 = array_ops.identity(
            graph.as_graph_element("new_hidden2/Relu:0"), name="hidden2/Relu")
        var_list = meta_graph.import_scoped_meta_graph(
            os.path.join(test_dir, exported_filenames[2]),
            graph=graph,
            input_map={"$unbound_inputs_hidden2/Relu": hidden2},
            import_scope="new_softmax_linear",
            unbound_inputs_col_name=None)
        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))
        new_var_names = [v.name for _, v in var_list.items()]
        self.assertEqual(
            ["new_softmax_linear/biases:0", "new_softmax_linear/weights:0"],
            sorted(new_var_names))

        # Exports the scoped meta graphs again.
        new_meta_graph1, var_list = meta_graph.export_scoped_meta_graph(
            graph=graph, export_scope="new_hidden1")
        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

        new_meta_graph2, var_list = meta_graph.export_scoped_meta_graph(
            graph=graph,
            export_scope="new_hidden2",
            unbound_inputs_col_name=None)
        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

        new_meta_graph3, var_list = meta_graph.export_scoped_meta_graph(
            graph=graph,
            export_scope="new_softmax_linear",
            unbound_inputs_col_name=None)
        self.assertEqual(["biases:0", "weights:0"], sorted(var_list.keys()))

        return [new_meta_graph1, new_meta_graph2, new_meta_graph3]
示例#36
0
 def body(i, _):
   meta_graph.import_scoped_meta_graph(meta_graph_def)
   return i + 1, ops.get_default_graph().get_tensor_by_name(output_name)