예제 #1
0
    def generate_run(self, run_name):
        tf.compat.v1.reset_default_graph()
        sess = tf.compat.v1.Session()
        placeholder = tf.compat.v1.placeholder(tf.float32, shape=[3])

        if run_name == self._RUN_WITH_LEGACY_DISTRIBUTION:
            tf.compat.v1.summary.histogram(self._LEGACY_DISTRIBUTION_TAG,
                                           placeholder)
        elif run_name == self._RUN_WITH_DISTRIBUTION:
            summary.op(
                self._DISTRIBUTION_TAG,
                placeholder,
                display_name=self._DISPLAY_NAME,
                description=self._DESCRIPTION,
            )
        elif run_name == self._RUN_WITH_SCALARS:
            tf.compat.v1.summary.scalar(
                self._SCALAR_TAG, tf.reduce_mean(input_tensor=placeholder))
        else:
            assert False, "Invalid run name: %r" % run_name
        summ = tf.compat.v1.summary.merge_all()

        subdir = os.path.join(self.logdir, run_name)
        with test_util.FileWriterCache.get(subdir) as writer:
            writer.add_graph(sess.graph)
            for step in xrange(self._STEPS):
                feed_dict = {placeholder: [1 + step, 2 + step, 3 + step]}
                s = sess.run(summ, feed_dict=feed_dict)
                writer.add_summary(s, global_step=step)
예제 #2
0
    def generate_run(self, run_name):
        tf.reset_default_graph()
        sess = tf.Session()
        placeholder = tf.placeholder(tf.float32, shape=[3])

        if run_name == self._RUN_WITH_LEGACY_HISTOGRAM:
            tf.summary.histogram(self._LEGACY_HISTOGRAM_TAG, placeholder)
        elif run_name == self._RUN_WITH_HISTOGRAM:
            summary.op(self._HISTOGRAM_TAG,
                       placeholder,
                       display_name=self._DISPLAY_NAME,
                       description=self._DESCRIPTION)
        elif run_name == self._RUN_WITH_SCALARS:
            tf.summary.scalar(self._SCALAR_TAG, tf.reduce_mean(placeholder))
        else:
            assert False, 'Invalid run name: %r' % run_name
        summ = tf.summary.merge_all()

        subdir = os.path.join(self.logdir, run_name)
        writer = tf.summary.FileWriter(subdir)
        writer.add_graph(sess.graph)
        for step in xrange(self._STEPS):
            feed_dict = {placeholder: [1 + step, 2 + step, 3 + step]}
            s = sess.run(summ, feed_dict=feed_dict)
            writer.add_summary(s, global_step=step)
        writer.close()
예제 #3
0
  def generate_run(self, run_name):
    tf.reset_default_graph()
    sess = tf.Session()
    placeholder = tf.placeholder(tf.float32, shape=[3])

    if run_name == self._RUN_WITH_LEGACY_HISTOGRAM:
      tf.summary.histogram(self._LEGACY_HISTOGRAM_TAG, placeholder)
    elif run_name == self._RUN_WITH_HISTOGRAM:
      summary.op(self._HISTOGRAM_TAG, placeholder,
                 display_name=self._DISPLAY_NAME,
                 description=self._DESCRIPTION)
    elif run_name == self._RUN_WITH_SCALARS:
      tf.summary.scalar(self._SCALAR_TAG, tf.reduce_mean(placeholder))
    else:
      assert False, 'Invalid run name: %r' % run_name
    summ = tf.summary.merge_all()

    subdir = os.path.join(self.logdir, run_name)
    writer = tf.summary.FileWriter(subdir)
    writer.add_graph(sess.graph)
    for step in xrange(self._STEPS):
      feed_dict = {placeholder: [1 + step, 2 + step, 3 + step]}
      s = sess.run(summ, feed_dict=feed_dict)
      writer.add_summary(s, global_step=step)
    writer.close()
예제 #4
0
    def compute_and_check_summary_pb(self,
                                     name='nemo',
                                     data=None,
                                     bucket_count=None,
                                     display_name=None,
                                     description=None,
                                     data_tensor=None,
                                     bucket_count_tensor=None,
                                     feed_dict=None):
        """Use both `op` and `pb` to get a summary, asserting equality.

    Returns:
      a `Summary` protocol buffer
    """
        if data is None:
            data = self.gaussian
        if data_tensor is None:
            data_tensor = tf.constant(data)
        if bucket_count_tensor is None:
            bucket_count_tensor = bucket_count
        op = summary.op(name,
                        data_tensor,
                        bucket_count=bucket_count_tensor,
                        display_name=display_name,
                        description=description)
        pb = summary.pb(name,
                        data,
                        bucket_count=bucket_count,
                        display_name=display_name,
                        description=description)
        pb_via_op = self.pb_via_op(op, feed_dict=feed_dict)
        self.assertProtoEquals(pb, pb_via_op)
        return pb
 def test_new_style_histogram(self):
     op = histogram_summary.op('important_data',
                               tf.random_normal(shape=[10, 10]),
                               bucket_count=100,
                               display_name='Important data',
                               description='secrets of the universe')
     value = self._value_from_op(op)
     assert value.HasField('tensor'), value
     self._assert_noop(value)
예제 #6
0
 def test_new_style_histogram(self):
   op = histogram_summary.op('important_data',
                             tf.random_normal(shape=[10, 10]),
                             bucket_count=100,
                             display_name='Important data',
                             description='secrets of the universe')
   value = self._value_from_op(op)
   assert value.HasField('tensor'), value
   self._assert_noop(value)
예제 #7
0
 def test_new_style_histogram(self):
     with tf.compat.v1.Graph().as_default():
         op = histogram_summary.op(
             "important_data",
             tf.random.normal(shape=[10, 10]),
             bucket_count=100,
             display_name="Important data",
             description="secrets of the universe",
         )
         value = self._value_from_op(op)
     assert value.HasField("tensor"), value
     self._assert_noop(value)
예제 #8
0
 def histogram(self, *args, **kwargs):
     # Map new name to the old name.
     if "buckets" in kwargs:
         kwargs["bucket_count"] = kwargs.pop("buckets")
     return summary_pb2.Summary.FromString(
         summary.op(*args, **kwargs).numpy())
예제 #9
0
 def histogram(self, *args, **kwargs):
     # Map new name to the old name.
     if 'buckets' in kwargs:
         kwargs['bucket_count'] = kwargs.pop('buckets')
     return tf.Summary.FromString(summary.op(*args, **kwargs).numpy())
예제 #10
0
def run_all(logdir, verbose=False):
  """Generate a bunch of histogram data, and write it to logdir."""
  del verbose

  tf.compat.v1.set_random_seed(0)

  k = tf.compat.v1.placeholder(tf.float32)

  # Make a normal distribution, with a shifting mean
  mean_moving_normal = tf.random.normal(shape=[1000], mean=(5*k), stddev=1)
  # Record that distribution into a histogram summary
  histogram_summary.op("normal/moving_mean",
                       mean_moving_normal,
                       description="A normal distribution whose mean changes "
                                   "over time.")

  # Make a normal distribution with shrinking variance
  shrinking_normal = tf.random.normal(shape=[1000], mean=0, stddev=1-(k))
  # Record that distribution too
  histogram_summary.op("normal/shrinking_variance", shrinking_normal,
                       description="A normal distribution whose variance "
                                   "shrinks over time.")

  # Let's combine both of those distributions into one dataset
  normal_combined = tf.concat([mean_moving_normal, shrinking_normal], 0)
  # We add another histogram summary to record the combined distribution
  histogram_summary.op("normal/bimodal", normal_combined,
                       description="A combination of two normal distributions, "
                                   "one with a moving mean and one with  "
                                   "shrinking variance. The result is a "
                                   "distribution that starts as unimodal and "
                                   "becomes more and more bimodal over time.")

  # Add a gamma distribution
  gamma = tf.random.gamma(shape=[1000], alpha=k)
  histogram_summary.op("gamma", gamma,
                       description="A gamma distribution whose shape "
                                   "parameter, α, changes over time.")

  # And a poisson distribution
  poisson = tf.compat.v1.random_poisson(shape=[1000], lam=k)
  histogram_summary.op("poisson", poisson,
                       description="A Poisson distribution, which only "
                                   "takes on integer values.")

  # And a uniform distribution
  uniform = tf.random.uniform(shape=[1000], maxval=k*10)
  histogram_summary.op("uniform", uniform,
                       description="A simple uniform distribution.")

  # Finally, combine everything together!
  all_distributions = [mean_moving_normal, shrinking_normal,
                       gamma, poisson, uniform]
  all_combined = tf.concat(all_distributions, 0)
  histogram_summary.op("all_combined", all_combined,
                       description="An amalgamation of five distributions: a "
                                   "uniform distribution, a gamma "
                                   "distribution, a Poisson distribution, and "
                                   "two normal distributions.")

  summaries = tf.compat.v1.summary.merge_all()

  # Setup a session and summary writer
  sess = tf.compat.v1.Session()
  writer = tf.summary.FileWriter(logdir)

  # Setup a loop and write the summaries to disk
  N = 400
  for step in xrange(N):
    k_val = step/float(N)
    summ = sess.run(summaries, feed_dict={k: k_val})
    writer.add_summary(summ, global_step=step)
예제 #11
0
def run_all(logdir, verbose=False):
  """Generate a bunch of histogram data, and write it to logdir."""
  del verbose

  tf.set_random_seed(0)

  k = tf.placeholder(tf.float32)

  # Make a normal distribution, with a shifting mean
  mean_moving_normal = tf.random_normal(shape=[1000], mean=(5*k), stddev=1)
  # Record that distribution into a histogram summary
  histogram_summary.op("normal/moving_mean",
                       mean_moving_normal,
                       description="A normal distribution whose mean changes "
                                   "over time.")

  # Make a normal distribution with shrinking variance
  shrinking_normal = tf.random_normal(shape=[1000], mean=0, stddev=1-(k))
  # Record that distribution too
  histogram_summary.op("normal/shrinking_variance", shrinking_normal,
                       description="A normal distribution whose variance "
                                   "shrinks over time.")

  # Let's combine both of those distributions into one dataset
  normal_combined = tf.concat([mean_moving_normal, shrinking_normal], 0)
  # We add another histogram summary to record the combined distribution
  histogram_summary.op("normal/bimodal", normal_combined,
                       description="A combination of two normal distributions, "
                                   "one with a moving mean and one with  "
                                   "shrinking variance. The result is a "
                                   "distribution that starts as unimodal and "
                                   "becomes more and more bimodal over time.")

  # Add a gamma distribution
  gamma = tf.random_gamma(shape=[1000], alpha=k)
  histogram_summary.op("gamma", gamma,
                       description="A gamma distribution whose shape "
                                   "parameter, α, changes over time.")

  # And a poisson distribution
  poisson = tf.random_poisson(shape=[1000], lam=k)
  histogram_summary.op("poisson", poisson,
                       description="A Poisson distribution, which only "
                                   "takes on integer values.")

  # And a uniform distribution
  uniform = tf.random_uniform(shape=[1000], maxval=k*10)
  histogram_summary.op("uniform", uniform,
                       description="A simple uniform distribution.")

  # Finally, combine everything together!
  all_distributions = [mean_moving_normal, shrinking_normal,
                       gamma, poisson, uniform]
  all_combined = tf.concat(all_distributions, 0)
  histogram_summary.op("all_combined", all_combined,
                       description="An amalgamation of five distributions: a "
                                   "uniform distribution, a gamma "
                                   "distribution, a Poisson distribution, and "
                                   "two normal distributions.")

  summaries = tf.summary.merge_all()

  # Setup a session and summary writer
  sess = tf.Session()
  writer = tf.summary.FileWriter(logdir)

  # Setup a loop and write the summaries to disk
  N = 400
  for step in xrange(N):
    k_val = step/float(N)
    summ = sess.run(summaries, feed_dict={k: k_val})
    writer.add_summary(summ, global_step=step)
예제 #12
0
def run_all(logdir, verbose=False):
    """Generate a bunch of histogram data, and write it to logdir."""
    del verbose

    tf.set_random_seed(0)

    k = tf.placeholder(tf.float32)

    # Make a normal distribution, with a shifting mean
    mean_moving_normal = tf.random_normal(shape=[1000], mean=(5 * k), stddev=1)
    # Record that distribution into a histogram summary
    histogram_summary.op("normal/moving_mean", mean_moving_normal)

    # Make a normal distribution with shrinking variance
    shrinking_normal = tf.random_normal(shape=[1000], mean=0, stddev=1 - (k))
    # Record that distribution too
    histogram_summary.op("normal/shrinking_variance", shrinking_normal)

    # Let's combine both of those distributions into one dataset
    normal_combined = tf.concat([mean_moving_normal, shrinking_normal], 0)
    # We add another histogram summary to record the combined distribution
    histogram_summary.op("normal/bimodal", normal_combined)

    # Add a gamma distribution
    gamma = tf.random_gamma(shape=[1000], alpha=k)
    histogram_summary.op("gamma", gamma)

    # And a poisson distribution
    poisson = tf.random_poisson(shape=[1000], lam=k)
    histogram_summary.op("poisson", poisson)

    # And a uniform distribution
    uniform = tf.random_uniform(shape=[1000], maxval=k * 10)
    histogram_summary.op("uniform", uniform)

    # Finally, combine everything together!
    all_distributions = [
        mean_moving_normal, shrinking_normal, gamma, poisson, uniform
    ]
    all_combined = tf.concat(all_distributions, 0)
    histogram_summary.op("all_combined", all_combined)

    summaries = tf.summary.merge_all()

    # Setup a session and summary writer
    sess = tf.Session()
    writer = tf.summary.FileWriter(logdir)

    # Setup a loop and write the summaries to disk
    N = 400
    for step in xrange(N):
        k_val = step / float(N)
        summ = sess.run(summaries, feed_dict={k: k_val})
        writer.add_summary(summ, global_step=step)