Exemple #1
0
    def testBasic(self):
        """Make sure arguments can be passed correctly."""
        a = constant_op.constant(10, name="a")
        b = constant_op.constant(20, name="b")
        c = math_ops.add_n([a, b], name="c")
        d = math_ops.add_n([b, c], name="d")
        train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
        train_op.append(d)
        mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())

        report = cost_analyzer.GenerateCostReport(mg)

        # Check the report headers
        self.assertTrue(b"Total time measured in ns (serialized):" in report)
        self.assertTrue(b"Total time measured in ns (actual):" in report)
        self.assertTrue(
            b"Total time analytical in ns (upper bound):" in report)
        self.assertTrue(
            b"Total time analytical in ns (lower bound):" in report)
        self.assertTrue(
            b"Overall efficiency (analytical upper/actual):" in report)
        self.assertTrue(
            b"Overall efficiency (analytical lower/actual):" in report)

        # Also print the report to make it easier to debug
        print("{}".format(report))
Exemple #2
0
    def testSmallNetwork(self):
        image = array_ops.placeholder(dtypes.float32, shape=[1, 28, 28, 1])
        label = array_ops.placeholder(dtypes.float32, shape=[1, 10])
        w = variables.Variable(
            random_ops.truncated_normal([5, 5, 1, 32], stddev=0.1))
        b = variables.Variable(random_ops.truncated_normal([32], stddev=0.1))
        conv = nn_ops.conv2d(image, w, strides=[1, 1, 1, 1], padding="SAME")
        h_conv = nn_ops.relu(conv + b)
        h_conv_flat = array_ops.reshape(h_conv, [1, -1])

        w_fc = variables.Variable(
            random_ops.truncated_normal([25088, 10], stddev=0.1))
        b_fc = variables.Variable(random_ops.truncated_normal([10],
                                                              stddev=0.1))
        y_conv = nn_ops.softmax(math_ops.matmul(h_conv_flat, w_fc) + b_fc)

        cross_entropy = math_ops.reduce_mean(-math_ops.reduce_sum(
            label * math_ops.log(y_conv), reduction_indices=[1]))
        _ = adam.AdamOptimizer(1e-4).minimize(cross_entropy)

        mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())
        report = cost_analyzer.GenerateCostReport(mg)

        self.assertTrue(b"MatMul" in report)
        self.assertTrue(b"ApplyAdam" in report)
        self.assertTrue(b"Conv2D" in report)
        self.assertTrue(b"Conv2DBackpropInput" in report)
        self.assertTrue(b"Conv2DBackpropFilter" in report)
        self.assertTrue(b"Softmax" in report)

        # Also print the report to make it easier to debug
        print("{}".format(report))
Exemple #3
0
def main(_):
    if FLAGS.metagraphdef:
        with gfile.GFile(FLAGS.metagraphdef) as meta_file:
            metagraph = meta_graph_pb2.MetaGraphDef()
            metagraph.ParseFromString(meta_file.read())
    else:
        with gfile.GFile(FLAGS.graphdef) as graph_file:
            graph_def = graph_pb2.GraphDef()
            if FLAGS.graphdef.endswith(".pbtxt"):
                text_format.Merge(graph_file.read(), graph_def)
            else:
                graph_def.ParseFromString(graph_file.read())
            importer.import_graph_def(graph_def, name="")
            graph = ops.get_default_graph()
            fetch = graph.get_operation_by_name(FLAGS.fetch)
            graph.add_to_collection("train_op", fetch)
            metagraph = saver.export_meta_graph(graph_def=graph.as_graph_def(),
                                                graph=graph)

    if FLAGS.rewriter_config is not None:
        rewriter_config = rewriter_config_pb2.RewriterConfig()
        text_format.Merge(FLAGS.rewriter_config, rewriter_config)
        optimized_graph = tf_optimizer.OptimizeGraph(rewriter_config,
                                                     metagraph)
        metagraph.graph_def.CopyFrom(optimized_graph)

    report = cost_analyzer.GenerateCostReport(metagraph, FLAGS.per_node_report)
    print(report)
    def testSmallNetworkCost(self):
        image = array_ops.placeholder(dtypes.float32, shape=[1, 28, 28, 1])
        label = array_ops.placeholder(dtypes.float32, shape=[1, 10])
        w = variables.Variable(
            random_ops.truncated_normal([5, 5, 1, 32], stddev=0.1))
        b = variables.Variable(random_ops.truncated_normal([32], stddev=0.1))
        conv = nn_ops.conv2d(image, w, strides=[1, 1, 1, 1], padding="SAME")
        h_conv = nn_ops.relu(conv + b)
        h_conv_flat = array_ops.reshape(h_conv, [1, -1])

        w_fc = variables.Variable(
            random_ops.truncated_normal([25088, 10], stddev=0.1))
        b_fc = variables.Variable(random_ops.truncated_normal([10],
                                                              stddev=0.1))
        y_conv = nn_ops.softmax(math_ops.matmul(h_conv_flat, w_fc) + b_fc)

        cross_entropy = math_ops.reduce_mean(
            -math_ops.reduce_sum(label * math_ops.log(y_conv), axis=[1]))
        _ = adam.AdamOptimizer(1e-4).minimize(cross_entropy)

        mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())
        report = cost_analyzer.GenerateCostReport(mg)

        # Print the report to make it easier to debug
        print("{}".format(report))

        self.assertTrue(b"MatMul" in report)
        self.assertTrue(b"ApplyAdam" in report)
        self.assertTrue(b"Conv2DBackpropFilter" in report)
        self.assertTrue(b"Softmax" in report)

        # When mkl is enabled, Conv2D and MatMul op followed by
        # 1-dimension Add in this graph will be fused, but not
        # in the mkl disabled case.
        expected_matmul_count = 2
        op_types = [b"MatMul", b"Conv2DBackpropFilter"]

        if not test_util.IsMklEnabled():
            self.assertTrue(b"Conv2D" in report)
            expected_matmul_count = 3
            op_types.append(b"Conv2D")

        for op_type in op_types:
            matcher = re.compile(
                br"\s+" + op_type +
                br",\s*(\d+),\s*(\d+),\s*([\d\.eE+-]+)%,\s*" +
                br"([\d\.eE+-]+)%,\s*(-?\d+),\s*(\d+),", re.MULTILINE)
            m = matcher.search(report)

            op_count = int(m.group(1))
            # upper = int(m.group(5))
            lower = int(m.group(6))
            if op_type == b"MatMul":
                self.assertEqual(expected_matmul_count, op_count)
            else:
                self.assertEqual(1, op_count)
            self.assertTrue(0 <= lower)
Exemple #5
0
def main(_):
  with gfile.GFile(FLAGS.input) as input_file:
    metagraph = meta_graph_pb2.MetaGraphDef()
    metagraph.ParseFromString(input_file.read())

  if FLAGS.rewriter_config is not None:
    rewriter_config = rewriter_config_pb2.RewriterConfig()
    text_format.Merge(FLAGS.rewriter_config, rewriter_config)
    optimized_graph = tf_optimizer.OptimizeGraph(rewriter_config, metagraph)
    metagraph.graph_def.CopyFrom(optimized_graph)

  report = cost_analyzer.GenerateCostReport(metagraph, FLAGS.per_node_report)
  print(report)
Exemple #6
0
    def testSmallNetwork(self):
        image = array_ops.placeholder(dtypes.float32, shape=[1, 28, 28, 1])
        label = array_ops.placeholder(dtypes.float32, shape=[1, 10])
        w = variables.Variable(
            random_ops.truncated_normal([5, 5, 1, 32], stddev=0.1))
        b = variables.Variable(random_ops.truncated_normal([32], stddev=0.1))
        conv = nn_ops.conv2d(image, w, strides=[1, 1, 1, 1], padding="SAME")
        h_conv = nn_ops.relu(conv + b)
        h_conv_flat = array_ops.reshape(h_conv, [1, -1])

        w_fc = variables.Variable(
            random_ops.truncated_normal([25088, 10], stddev=0.1))
        b_fc = variables.Variable(random_ops.truncated_normal([10],
                                                              stddev=0.1))
        y_conv = nn_ops.softmax(math_ops.matmul(h_conv_flat, w_fc) + b_fc)

        cross_entropy = math_ops.reduce_mean(-math_ops.reduce_sum(
            label * math_ops.log(y_conv), reduction_indices=[1]))
        _ = adam.AdamOptimizer(1e-4).minimize(cross_entropy)

        mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())
        report = cost_analyzer.GenerateCostReport(mg)

        # Print the report to make it easier to debug
        print("{}".format(report))

        self.assertTrue(b"MatMul" in report)
        self.assertTrue(b"ApplyAdam" in report)
        self.assertTrue(b"Conv2D" in report)
        self.assertTrue(b"Conv2DBackpropInput" in report)
        self.assertTrue(b"Conv2DBackpropFilter" in report)
        self.assertTrue(b"Softmax" in report)

        for op_type in [
                b"MatMul", b"Conv2D", b"Conv2DBackpropInput",
                b"Conv2DBackpropFilter"
        ]:
            matcher = re.compile(
                br"\s+" + op_type +
                br",\s*(\d+),\s*(\d+),\s*([\d\.eE+-]+)%,\s*" +
                br"([\d\.eE+-]+)%,\s*(-?\d+),\s*(\d+),", re.MULTILINE)
            m = matcher.search(report)

            op_count = int(m.group(1))
            # upper = int(m.group(5))
            lower = int(m.group(6))
            if op_type is b"MatMul":
                self.assertEqual(3, op_count)
            else:
                self.assertEqual(1, op_count)
            self.assertTrue(0 <= lower)
Exemple #7
0
def main(_):
    metagraph = get_metagraph()
    rewriter_config = rewriter_config_pb2.RewriterConfig()
    if FLAGS.rewriter_config is not None:
        text_format.Merge(FLAGS.rewriter_config, rewriter_config)
    optimized_graph = tf_optimizer.OptimizeGraph(rewriter_config, metagraph)
    metagraph.graph_def.CopyFrom(optimized_graph)

    report = cost_analyzer.GenerateCostReport(metagraph, FLAGS.per_node_report,
                                              FLAGS.verbose)
    print(report)
    if FLAGS.memory_report:
        report = cost_analyzer.GenerateMemoryReport(metagraph)
        print(report)
Exemple #8
0
  def testVerbose(self):
    """Make sure the full report is generated with verbose=True."""
    a = constant_op.constant(10, name="a")
    b = constant_op.constant(20, name="b")
    c = math_ops.add_n([a, b], name="c")
    d = math_ops.add_n([b, c], name="d")
    train_op = ops.get_collection_ref(ops.GraphKeys.TRAIN_OP)
    train_op.append(d)
    mg = meta_graph.create_meta_graph_def(graph=ops.get_default_graph())

    report = cost_analyzer.GenerateCostReport(
        mg, per_node_report=True, verbose=True)

    # Check the report headers
    self.assertTrue(b"Below is the full per-node report:" in report)

    # Also print the report to make it easier to debug
    print("{}".format(report))
    mnist = input_data.read_data_sets("MNIST_data", one_hot=True)

    x = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNEL],
                       name='x-input')
    y_ = tf.placeholder(tf.float32, shape=[None, OUTPUT_NODE], name='y-input')
    regularizer = tf.contrib.layers.l2_regularizer(REGULARIZATION_TATE)
    y = inference(x, True, regularizer)
    global_step = tf.Variable(0, trainable=False)

    variable_average = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY, global_step)
    variable_average_ops = variable_average.apply(tf.trainable_variables())

    cross_entroy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=y, labels=tf.argmax(y_, 1))
    cross_entroy_mean = tf.reduce_mean(cross_entroy)

    loss = cross_entroy_mean + tf.add_n(tf.get_collection('loss'))

    learning_rate = tf.train.exponential_decay(LEARNING_RATE_BASE, global_step,
                                               mnist.train.num_examples / BATCH_SIZE, LEARNING_RATE_DECAY)
    train_step = tf.train.GradientDescentOptimizer(0.01).minimize(loss, global_step=global_step)
    train_op = tf.group(train_step, variable_average_ops)
    saver = tf.train.Saver()

    run_options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
    run_metadata = tf.RunMetadata()

    mg = meta_graph.create_meta_graph_def(graph=tf.get_default_graph())
    cluster = build_cluster()
    report = cost_analyzer.GenerateCostReport(mg, per_node_report=True, cluster=cluster)
    with open('lenet5_report.json', "w") as f:
        f.write(str(report, encoding="utf-8"))