Esempio n. 1
0
def op(name, data, display_name=None, description=None, collections=None):
    """Create a legacy scalar summary op.

    Arguments:
      name: A unique name for the generated summary node.
      data: A real numeric rank-0 `Tensor`. Must have `dtype` castable
        to `float32`.
      display_name: Optional name for this summary in TensorBoard, as a
        constant `str`. Defaults to `name`.
      description: Optional long-form description for this summary, as a
        constant `str`. Markdown is supported. Defaults to empty.
      collections: Optional list of graph collections keys. The new
        summary op is added to these collections. Defaults to
        `[Graph Keys.SUMMARIES]`.

    Returns:
      A TensorFlow summary op.
    """
    # TODO(nickfelt): remove on-demand imports once dep situation is fixed.
    import tensorflow.compat.v1 as tf

    if display_name is None:
        display_name = name
    summary_metadata = metadata.create_summary_metadata(
        display_name=display_name, description=description)
    with tf.name_scope(name):
        with tf.control_dependencies([tf.assert_scalar(data)]):
            return tf.summary.tensor_summary(
                name="scalar_summary",
                tensor=tf.cast(data, tf.float32),
                collections=collections,
                summary_metadata=summary_metadata,
            )
Esempio n. 2
0
def _buckets(data, bucket_count=None):
  """Create a TensorFlow op to group data into histogram buckets.

  Arguments:
    data: A `Tensor` of any shape. Must be castable to `float64`.
    bucket_count: Optional positive `int` or scalar `int32` `Tensor`.
  Returns:
    A `Tensor` of shape `[k, 3]` and type `float64`. The `i`th row is
    a triple `[left_edge, right_edge, count]` for a single bucket.
    The value of `k` is either `bucket_count` or `1` or `0`.
  """
  # TODO(nickfelt): remove on-demand imports once dep situation is fixed.
  import tensorflow.compat.v1 as tf
  if bucket_count is None:
    bucket_count = summary_v2.DEFAULT_BUCKET_COUNT
  with tf.name_scope('buckets', values=[data, bucket_count]), \
       tf.control_dependencies([tf.assert_scalar(bucket_count),
                                tf.assert_type(bucket_count, tf.int32)]):
    data = tf.reshape(data, shape=[-1])  # flatten
    data = tf.cast(data, tf.float64)
    is_empty = tf.equal(tf.size(input=data), 0)

    def when_empty():
      return tf.constant([], shape=(0, 3), dtype=tf.float64)

    def when_nonempty():
      min_ = tf.reduce_min(input_tensor=data)
      max_ = tf.reduce_max(input_tensor=data)
      range_ = max_ - min_
      is_singular = tf.equal(range_, 0)

      def when_nonsingular():
        bucket_width = range_ / tf.cast(bucket_count, tf.float64)
        offsets = data - min_
        bucket_indices = tf.cast(tf.floor(offsets / bucket_width),
                                 dtype=tf.int32)
        clamped_indices = tf.minimum(bucket_indices, bucket_count - 1)
        one_hots = tf.one_hot(clamped_indices, depth=bucket_count)
        bucket_counts = tf.cast(tf.reduce_sum(input_tensor=one_hots, axis=0),
                                dtype=tf.float64)
        edges = tf.linspace(min_, max_, bucket_count + 1)
        left_edges = edges[:-1]
        right_edges = edges[1:]
        return tf.transpose(a=tf.stack(
            [left_edges, right_edges, bucket_counts]))

      def when_singular():
        center = min_
        bucket_starts = tf.stack([center - 0.5])
        bucket_ends = tf.stack([center + 0.5])
        bucket_counts = tf.stack([tf.cast(tf.size(input=data), tf.float64)])
        return tf.transpose(
            a=tf.stack([bucket_starts, bucket_ends, bucket_counts]))

      return tf.cond(is_singular, when_singular, when_nonsingular)

    return tf.cond(is_empty, when_empty, when_nonempty)
Esempio n. 3
0
def _buckets(data, bucket_count=None):
    """Create a TensorFlow op to group data into histogram buckets.

  Arguments:
    data: A `Tensor` of any shape. Must be castable to `float64`.
    bucket_count: Optional positive `int` or scalar `int32` `Tensor`.
  Returns:
    A `Tensor` of shape `[k, 3]` and type `float64`. The `i`th row is
    a triple `[left_edge, right_edge, count]` for a single bucket.
    The value of `k` is either `bucket_count` or `1` or `0`.
  """
    # TODO(nickfelt): remove on-demand imports once dep situation is fixed.
    import tensorflow.compat.v1 as tf
    if bucket_count is None:
        bucket_count = summary_v2.DEFAULT_BUCKET_COUNT
    with tf.name_scope('buckets', values=[data, bucket_count]), \
         tf.control_dependencies([tf.assert_scalar(bucket_count),
                                  tf.assert_type(bucket_count, tf.int32)]):
        data = tf.reshape(data, shape=[-1])  # flatten
        data = tf.cast(data, tf.float64)
        is_empty = tf.equal(tf.size(input=data), 0)

        def when_empty():
            return tf.constant([], shape=(0, 3), dtype=tf.float64)

        def when_nonempty():
            min_ = tf.reduce_min(input_tensor=data)
            max_ = tf.reduce_max(input_tensor=data)
            range_ = max_ - min_
            is_singular = tf.equal(range_, 0)

            def when_nonsingular():  #这个函数将输入的值封装到bucket里面。以后需要该这个函数
                bucket_num = tf.minimum(bucket_count, data.size()[0])

                left_edges = tf.linspace(0, bucket_num - 1, bucket_num)
                right_edges = tf.linspace(1, bucket_num, bucket_num)
                return tf.transpose(a=tf.stack(
                    [left_edges, right_edges,
                     data[0:bucket_num]]))  #stack是拼装矩阵。这里将这三个矩阵顺次拼装

            def when_singular():
                bucket_num = tf.minimum(bucket_count, data.size()[0])

                left_edges = tf.linspace(0, bucket_num - 1, bucket_num)
                right_edges = tf.linspace(1, bucket_num, bucket_num)
                return tf.transpose(
                    a=tf.stack([left_edges, right_edges, data[0:bucket_num]]))

            return tf.cond(is_singular, when_singular, when_nonsingular)

        return tf.cond(is_empty, when_empty, when_nonempty)