Exemplo n.º 1
0
  def setUp(self):
    super(MultiplexerDataProviderTest, self).setUp()
    self.logdir = self.get_temp_dir()

    logdir = os.path.join(self.logdir, "polynomials")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("square", i ** 2, step=2 * i, description="boxen")
        scalar_summary.scalar("cube", i ** 3, step=3 * i)

    logdir = os.path.join(self.logdir, "waves")
    with tf.summary.create_file_writer(logdir).as_default():
      for i in xrange(10):
        scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
        scalar_summary.scalar("square", tf.sign(tf.sin(float(i))), step=i)
        # Summary with rank-0 data but not owned by the scalars plugin.
        metadata = summary_pb2.SummaryMetadata()
        metadata.plugin_data.plugin_name = "marigraphs"
        tf.summary.write("high_tide", tensor=i, step=i, metadata=metadata)

    logdir = os.path.join(self.logdir, "pictures")
    with tf.summary.create_file_writer(logdir).as_default():
      purple = tf.constant([[[255, 0, 255]]], dtype=tf.uint8)
      for i in xrange(1, 11):
        image_summary.image("purple", [tf.tile(purple, [i, i, 1])], step=i)
Exemplo n.º 2
0
                def f():
                    def body(step, tokens):
                        next_token = random_ops.random_uniform([bsz])
                        tokens = tokens.write(step, next_token)
                        return (step + 1, tokens)

                    def cond(step, tokens):
                        del tokens
                        return math_ops.less(step, max_length)

                    tokens_var = tensor_array_ops.TensorArray(
                        dtype=dtypes.float32,
                        size=max_length,
                        dynamic_size=False,
                        clear_after_read=False,
                        element_shape=(bsz, ),
                        name="tokens_accumulator",
                    )

                    step = constant_op.constant(0)
                    step, tokens_var = control_flow_ops.while_loop(
                        cond, body, [step, tokens_var])

                    image_flat = array_ops.transpose(tokens_var.stack(),
                                                     [1, 0])
                    image = array_ops.tile(
                        array_ops.reshape(image_flat, [bsz, 32, 32, 1]),
                        [1, 1, 1, 3])
                    image_summary_v2.image(
                        "image_sample", image,
                        constant_op.constant(5, dtype=dtypes.int64))
    def call(self, x):
        # Add summary image using compat v2 implementation.
        image_summary_v2.image('custom_image_summary_v2', x)

        return x
Exemplo n.º 4
0
    def setUp(self):
        super(MultiplexerDataProviderTest, self).setUp()
        self.logdir = self.get_temp_dir()
        self.ctx = context.RequestContext()

        logdir = os.path.join(self.logdir, "polynomials")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar(
                    "square", i ** 2, step=2 * i, description="boxen"
                )
                scalar_summary.scalar("cube", i ** 3, step=3 * i)

        logdir = os.path.join(self.logdir, "waves")
        with tf.summary.create_file_writer(logdir).as_default():
            for i in range(10):
                scalar_summary.scalar("sine", tf.sin(float(i)), step=i)
                scalar_summary.scalar(
                    "square", tf.sign(tf.sin(float(i))), step=i
                )
                # Summary with rank-0 data but not owned by the scalars plugin.
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "marigraphs"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "high_tide", tensor=i, step=i, metadata=metadata
                )
                # Summary with rank-1 data of scalar data class (bad!).
                metadata = summary_pb2.SummaryMetadata()
                metadata.plugin_data.plugin_name = "greetings"
                metadata.data_class = summary_pb2.DATA_CLASS_SCALAR
                tf.summary.write(
                    "bad", tensor=[i, i], step=i, metadata=metadata
                )

        logdir = os.path.join(self.logdir, "lebesgue")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("very smooth", (0.0, 0.25, 0.5, 0.75, 1.0), "uniform"),
                ("very smoothn't", (0.0, 0.01, 0.99, 1.0), "bimodal"),
            ]
            for (description, distribution, name) in data:
                tensor = tf.constant([distribution], dtype=tf.float64)
                for i in range(1, 11):
                    histogram_summary.histogram(
                        name, tensor * i, step=i, description=description
                    )

        logdir = os.path.join(self.logdir, "mondrian")
        with tf.summary.create_file_writer(logdir).as_default():
            data = [
                ("red", (221, 28, 38), "top-right"),
                ("blue", (1, 91, 158), "bottom-left"),
                ("yellow", (239, 220, 111), "bottom-right"),
            ]
            for (name, color, description) in data:
                image_1x1 = tf.constant([[[color]]], dtype=tf.uint8)
                for i in range(1, 11):
                    # Use a non-monotonic sequence of sample sizes to
                    # test `max_length` calculation.
                    k = 6 - abs(6 - i)  # 1, .., 6, .., 2
                    # a `k`-sample image summary of `i`-by-`i` images
                    image = tf.tile(image_1x1, [k, i, i, 1])
                    image_summary.image(
                        name,
                        image,
                        step=i,
                        description=description,
                        max_outputs=99,
                    )