def _custom_step(features, labels):
     del labels
     logits = model(features)
     with summary_ops_v2.record_if(True), writer.as_default():
         scalar_summary_v2.scalar('logits',
                                  math_ops.reduce_sum(logits),
                                  step=model.optimizer.iterations)
     return logits
Esempio n. 2
0
 def _custom_step(features, labels):
     del labels
     logits = model(features)
     with tf.summary.record_if(True), writer.as_default():
         scalar_summary_v2.scalar(
             "logits",
             tf.reduce_sum(logits),
             step=model.optimizer.iterations,
         )
     return logits
  def setUp(self):
    super(MultiplexerDataProviderTest, self).setUp()
    self.logdir = self.get_temp_dir()

    logdir = os.path.join(self.logdir, "polynomials")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("square", i ** 2, step=2 * i, description="boxen")
        scalar_summary.scalar("cube", i ** 3, step=3 * i)

    logdir = os.path.join(self.logdir, "waves")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
        scalar_summary.scalar("square", tf.sign(tf.sin(float(i))), step=i)
        # Summary with rank-0 data but not owned by the scalars plugin.
        metadata = summary_pb2.SummaryMetadata()
        metadata.plugin_data.plugin_name = "marigraphs"
        tf.summary.write("high_tide", tensor=i, step=i, metadata=metadata)

    logdir = os.path.join(self.logdir, "pictures")
    with tf.summary.create_file_writer(logdir).as_default():
      purple = tf.constant([[[255, 0, 255]]], dtype=tf.uint8)
      for i in xrange(1, 11):
        image_summary.image("purple", [tf.tile(purple, [i, i, 1])], step=i)
Esempio n. 4
0
    def generate_testdata(self, include_text=True, logdir=None):
        run_names = ["fry", "leela"]
        for run_name in run_names:
            subdir = os.path.join(self.logdir, run_name)
            writer = tf.compat.v2.summary.create_file_writer(subdir)

            with writer.as_default():
                step = 0
                for gem in GEMS:
                    message = run_name + " *loves* " + gem
                    if include_text:
                        text("message", message, step)
                    step += 1

                vector_message = ["one", "two", "three", "four"]
                if include_text:
                    text("vector", vector_message, 0)

                scalar("twelve", tf.constant(12), 0)

            writer.close()
Esempio n. 5
0
    def setUp(self):
        super(MultiplexerDataProviderTest, self).setUp()
        self.logdir = self.get_temp_dir()

        logdir = os.path.join(self.logdir, "polynomials")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in xrange(10):
                scalar_summary.scalar("square",
                                      i**2,
                                      step=2 * i,
                                      description="boxen")
                scalar_summary.scalar("cube", i**3, step=3 * i)

        logdir = os.path.join(self.logdir, "waves")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in xrange(10):
                scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
                scalar_summary.scalar("square",
                                      tf.sign(tf.sin(float(i))),
                                      step=i)
                # Summary with rank-0 data but not owned by the scalars plugin.
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "marigraphs"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write("high_tide",
                                 tensor=i,
                                 step=i,
                                 metadata=metadata)
                # Summary with rank-1 data of scalar data class (bad!).
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "greetings"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write("bad",
                                 tensor=[i, i],
                                 step=i,
                                 metadata=metadata)

        logdir = os.path.join(self.logdir, "lebesgue")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("very smooth", (0.0, 0.25, 0.5, 0.75, 1.0), "uniform"),
                ("very smoothn't", (0.0, 0.01, 0.99, 1.0), "bimodal"),
            ]
            for (description, distribution, name) in data:
                tensor = tf.constant([distribution], dtype=tf.float64)
                for i in xrange(1, 11):
                    histogram_summary.histogram(name,
                                                tensor * i,
                                                step=i,
                                                description=description)
 def call(self, x):
     # Add summary scalar using compat v2 implementation.
     scalar_summary_v2.scalar('custom_scalar_summary_v2',
                              math_ops.reduce_sum(x))
     return x
Esempio n. 7
0
    def setUp(self):
        super(MultiplexerDataProviderTest, self).setUp()
        self.logdir = self.get_temp_dir()
        self.ctx = context.RequestContext()

        logdir = os.path.join(self.logdir, "polynomials")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar(
                    "square", i ** 2, step=2 * i, description="boxen"
                )
                scalar_summary.scalar("cube", i ** 3, step=3 * i)

        logdir = os.path.join(self.logdir, "waves")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
                scalar_summary.scalar(
                    "square", tf.sign(tf.sin(float(i))), step=i
                )
                # Summary with rank-0 data but not owned by the scalars plugin.
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "marigraphs"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "high_tide", tensor=i, step=i, metadata=metadata
                )
                # Summary with rank-1 data of scalar data class (bad!).
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "greetings"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "bad", tensor=[i, i], step=i, metadata=metadata
                )

        logdir = os.path.join(self.logdir, "lebesgue")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("very smooth", (0.0, 0.25, 0.5, 0.75, 1.0), "uniform"),
                ("very smoothn't", (0.0, 0.01, 0.99, 1.0), "bimodal"),
            ]
            for (description, distribution, name) in data:
                tensor = tf.constant([distribution], dtype=tf.float64)
                for i in range(1, 11):
                    histogram_summary.histogram(
                        name, tensor * i, step=i, description=description
                    )

        logdir = os.path.join(self.logdir, "mondrian")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("red", (221, 28, 38), "top-right"),
                ("blue", (1, 91, 158), "bottom-left"),
                ("yellow", (239, 220, 111), "bottom-right"),
            ]
            for (name, color, description) in data:
                image_1x1 = tf.constant([[[color]]], dtype=tf.uint8)
                for i in range(1, 11):
                    # Use a non-monotonic sequence of sample sizes to
                    # test `max_length` calculation.
                    k = 6 - abs(6 - i)  # 1, .., 6, .., 2
                    # a `k`-sample image summary of `i`-by-`i` images
                    image = tf.tile(image_1x1, [k, i, i, 1])
                    image_summary.image(
                        name,
                        image,
                        step=i,
                        description=description,
                        max_outputs=99,
                    )
Esempio n. 8
0
 def call(self, x):
     # Add summary scalar using compat v2 implementation.
     scalar_summary_v2.scalar("custom_scalar_summary_v2", tf.reduce_sum(x))
     return x
Esempio n. 9
0
 def computation(x):
     x = x + 1.0
     if x < 5:
         scalar_summary_v2.scalar("x", x, step=0)
         x = x * 2.0
     return x + 1.0
Esempio n. 10
0
 def host_computation(x):
     scalar_summary_v2.scalar("x", x, step=0)
     return x * 2.0