Exemple #1
0
def advise(graph, run_meta=None, tfprof_options=ALL_ADVICE):  # pylint: disable=dangerous-default-value
  """Auto profile and advise.

    Builds profiles and automatically check anormalies of various
    aspects. See go/tfprof or README for examples and tutorials.

  Args:
    graph: tf.Graph.
    run_meta: tensorflow::RunMetadata proto. Allows auto-profile
              time and memroy.
    tfprof_options: see ALL_ADVICE example above.
  Returns:
    Returns AdviceProto proto
  """
  # pylint: disable=protected-access
  op_log = tfprof_logger._merge_default_with_oplog(
      graph, None, run_meta, add_trace=True)
  # pylint: enable=protected-access

  run_meta_str = run_meta.SerializeToString() if run_meta else b''

  opts = _build_advisor_options(tfprof_options)
  ret = tfprof_output_pb2.AdviceProto()
  ret.ParseFromString(
      print_mdl.PrintModelAnalysis(
          graph.as_graph_def(add_shapes=True).SerializeToString(),
          run_meta_str,
          op_log.SerializeToString(),
          'advise'.encode('utf-8'),
          opts.SerializeToString()))
  return ret
Exemple #2
0
def print_model_analysis(graph,
                         run_meta=None,
                         op_log=None,
                         tfprof_cmd='scope',
                         tfprof_options=TRAINABLE_VARS_PARAMS_STAT_OPTIONS):
    """Print model statistics.

    See go/tfprof or README for examples and tutorials.
    Run tfprof tool for help:
    'bazel run third_party/tensorflow/tools/tfprof help'

  Args:
    graph: tf.Graph.
    run_meta: tensorflow::RunMetadata proto. When provided, also shows valid
              timing and memory information when 'select' option contains
              'micros' and 'bytes'.
    op_log: tensorflow::tfprof::OpLog proto. users can use this proto to
            group together ops and use a op_type to select the group.
    tfprof_cmd: string. Either 'op', 'scope', 'graph', 'code'.
                'op' view organize outputs using operation type. (e.g. MatMul)
                'scope' view organize outputs using graph node name scope.
                'graph' view organize outputs using graph node inputs/outputs.
                'code' view organize outputs using Python call stack.
    tfprof_options: See 'tfprof help' for details.
  Returns:
    If tfprof_cmd is 'scope' or 'graph', returns TFGraphNodeProto proto.
    If tfprof_cmd is 'op' or 'code', returns TFMultiGraphNodeProto proto.
    Side effect: stdout/file/timeline.json depending on tfprof_options['output']
  """
    # pylint: disable=protected-access
    op_log = tfprof_logger._merge_default_with_oplog(
        graph, op_log, run_meta, add_trace=tfprof_cmd == 'code')
    # pylint: enable=protected-access

    opts = _build_options(tfprof_options)

    run_meta_str = run_meta.SerializeToString() if run_meta else b''

    if tfprof_cmd == 'code' or tfprof_cmd == 'op':
        tfprof_node = tfprof_output_pb2.TFMultiGraphNodeProto()
        tfprof_node.ParseFromString(
            print_mdl.PrintModelAnalysis(
                graph.as_graph_def(add_shapes=True).SerializeToString(),
                run_meta_str, op_log.SerializeToString(),
                tfprof_cmd.encode('utf-8'), opts.SerializeToString()))
    elif tfprof_cmd == 'graph' or tfprof_cmd == 'scope':
        tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
        tfprof_node.ParseFromString(
            print_mdl.PrintModelAnalysis(
                graph.as_graph_def(add_shapes=True).SerializeToString(),
                run_meta_str, op_log.SerializeToString(),
                tfprof_cmd.encode('utf-8'), opts.SerializeToString()))
    else:
        raise errors.InvalidArgumentError(
            None, None, 'unknown tfprof_cmd: %s\n' % tfprof_cmd)

    return tfprof_node
  def add_step(self, step, run_meta):
    """Add statistics of a step.

    Args:
      step: A step uint64 used to identify the RunMetadata. Must be different
         across different AddStep() calls.
      run_meta: RunMetadata proto that contains statistics of a session run.
    """
    # pylint: disable=protected-access
    op_log = tfprof_logger._merge_default_with_oplog(
        self._graph, run_meta=run_meta, add_trace=False,
        add_trainable_var=False)
    # pylint: enable=protected-access
    print_mdl.AddStep(
        step, run_meta.SerializeToString(), op_log.SerializeToString())
Exemple #4
0
  def add_step(self, step, run_meta):
    """Add statistics of a step.

    Args:
      step: A step uint64 used to identify the RunMetadata. Must be different
         across different AddStep() calls.
      run_meta: RunMetadata proto that contains statistics of a session run.
    """
    # pylint: disable=protected-access
    op_log = tfprof_logger._merge_default_with_oplog(
        self._graph, run_meta=run_meta, add_trace=False,
        add_trainable_var=False)
    # pylint: enable=protected-access
    print_mdl.AddStep(
        step, run_meta.SerializeToString(), op_log.SerializeToString())
Exemple #5
0
    def __init__(self, graph, op_log=None):
        """Constructor.

    Args:
      graph: tf.Graph.
      op_log: optional. tensorflow::tfprof::OpLog proto. Used to define
          extra op types.
    """
        self._graph = graph
        # pylint: disable=protected-access
        op_log = tfprof_logger._merge_default_with_oplog(self._graph,
                                                         op_log=op_log)
        # pylint: enable=protected-access

        print_mdl.NewProfiler(self._graph.as_graph_def().SerializeToString(),
                              op_log.SerializeToString())
  def __init__(self, graph, op_log=None):
    """Constructor.

    Args:
      graph: tf.Graph.
      op_log: optional. tensorflow::tfprof::OpLog proto. Used to define
          extra op types.
    """
    self._graph = graph
    # pylint: disable=protected-access
    op_log = tfprof_logger._merge_default_with_oplog(
        self._graph, op_log=op_log)
    # pylint: enable=protected-access

    print_mdl.NewProfiler(
        self._graph.as_graph_def(add_shapes=True).SerializeToString(),
        op_log.SerializeToString())
def print_model_analysis(graph,
                         run_meta=None,
                         op_log=None,
                         tfprof_cmd='scope',
                         tfprof_options=TRAINABLE_VARS_PARAMS_STAT_OPTIONS):
  """Print model statistics.

    See go/tfprof or README for examples and tutorials.
    Run tfprof tool for help:
    'bazel run third_party/tensorflow/tools/tfprof help'

  Args:
    graph: tf.Graph.
    run_meta: tensorflow::RunMetadata proto. When provided, also shows valid
              timing and memory information when 'select' option contains
              'micros' and 'bytes'.
    op_log: tensorflow::tfprof::OpLog proto. users can use this proto to
            group together ops and use a op_type to select the group.
    tfprof_cmd: string. Either 'op', 'scope', 'graph', 'code'.
                'op' view organize outputs using operation type. (e.g. MatMul)
                'scope' view organize outputs using graph node name scope.
                'graph' view organize outputs using graph node inputs/outputs.
                'code' view organize outputs using Python call stack.
    tfprof_options: See 'tfprof help' for details.
  Returns:
    If tfprof_cmd is 'scope' or 'graph', returns TFGraphNodeProto proto.
    If tfprof_cmd is 'op' or 'code', returns TFMultiGraphNodeProto proto.
    Side effect: stdout/file/timeline.json depending on tfprof_options['output']
  """
  # pylint: disable=protected-access
  op_log = tfprof_logger._merge_default_with_oplog(
      graph, op_log, run_meta, add_trace=tfprof_cmd == 'code')
  # pylint: enable=protected-access

  opts = _build_options(tfprof_options)

  run_meta_str = run_meta.SerializeToString() if run_meta else b''

  if tfprof_cmd == 'code' or tfprof_cmd == 'op':
    tfprof_node = tfprof_output_pb2.TFMultiGraphNodeProto()
    tfprof_node.ParseFromString(
        print_mdl.PrintModelAnalysis(
            graph.as_graph_def(add_shapes=True).SerializeToString(),
            run_meta_str,
            op_log.SerializeToString(),
            tfprof_cmd.encode('utf-8'),
            opts.SerializeToString()))
  elif tfprof_cmd == 'graph' or tfprof_cmd == 'scope':
    tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
    tfprof_node.ParseFromString(
        print_mdl.PrintModelAnalysis(
            graph.as_graph_def(add_shapes=True).SerializeToString(),
            run_meta_str,
            op_log.SerializeToString(),
            tfprof_cmd.encode('utf-8'),
            opts.SerializeToString()))
  else:
    raise errors.InvalidArgumentError(
        None, None, 'unknown tfprof_cmd: %s\n' % tfprof_cmd)

  return tfprof_node
def print_model_analysis(graph,
                         run_meta=None,
                         op_log=None,
                         tfprof_cmd='scope',
                         tfprof_options=TRAINABLE_VARS_PARAMS_STAT_OPTIONS):
  """Print model statistics.

    Prints the model statistics to stdout. Also returns the results
    in a TFGraphNodeProto proto. See go/tfprof or run tfprof tool:
    'bazel run third_party/tensorflow/tools/tfprof help'

    Examples:
      Show the parameter/shape statistics of tf.trainable_variables().
        print_model_analysis(sess.graph).

      Show number of float ops. Only ops with RegisterStatistics defined
      are counted.
        show_float_op_opts = model_analyzer.FLOAT_OPS_OPTIONS
        print_model_analysis(sess.graph, tfprof_options=show_float_op_opts)

  Args:
    graph: tf.Graph.
    run_meta: tensorflow::RunMetadata proto. When provided, also shows valid
              timing and memory information when 'select' option contains
              'micros' and 'bytes'.
    op_log: tensorflow::tfprof::OpLog proto. users can use this proto to
            group together ops and use a op_type to select the group.
    tfprof_cmd: string. Either 'scope', 'graph', 'code'.
                'scope' view organize outputs using ops' name scope.
                'graph' view organize outputs using op's inputs/outputs.
                'code' view organize outputs using Python call stack.
    tfprof_options: See 'tfprof help' for details.
  Returns:
    If tfprof_cmd is 'scope' or 'graph', returns TFGraphNodeProto proto.
    If tfprof_cmd is 'code', returns TFCodeNodeProto proto.
    Side effect: a formatted output to stdout.
  """
  # pylint: disable=protected-access
  op_log = tfprof_logger._merge_default_with_oplog(
      graph, op_log, run_meta, add_trace=tfprof_cmd == 'code')
  # pylint: enable=protected-access
  opts = tfprof_options_pb2.OptionsProto()
  opts.max_depth = tfprof_options['max_depth']
  opts.min_bytes = tfprof_options['min_bytes']
  opts.min_micros = tfprof_options['min_micros']
  opts.min_params = tfprof_options['min_params']
  opts.min_float_ops = tfprof_options['min_float_ops']
  for p in tfprof_options['device_regexes']:
    opts.device_regexes.append(p)
  opts.order_by = tfprof_options['order_by']
  for p in tfprof_options['account_type_regexes']:
    opts.account_type_regexes.append(p)
  for p in tfprof_options['start_name_regexes']:
    opts.start_name_regexes.append(p)
  for p in tfprof_options['trim_name_regexes']:
    opts.trim_name_regexes.append(p)
  for p in tfprof_options['show_name_regexes']:
    opts.show_name_regexes.append(p)
  for p in tfprof_options['hide_name_regexes']:
    opts.hide_name_regexes.append(p)
  opts.account_displayed_op_only = tfprof_options['account_displayed_op_only']
  for p in tfprof_options['select']:
    opts.select.append(p)
  opts.output = tfprof_options['output']
  opts.dump_to_file = tfprof_options['dump_to_file']

  run_meta_str = run_meta.SerializeToString() if run_meta else b''

  if tfprof_cmd == 'code':
    tfprof_node = tfprof_output_pb2.TFCodeNodeProto()
    tfprof_node.ParseFromString(
        print_mdl.PrintModelAnalysis(
            graph.as_graph_def().SerializeToString(),
            run_meta_str,
            op_log.SerializeToString(),
            tfprof_cmd.encode('utf-8'),
            opts.SerializeToString()))
  else:
    tfprof_node = tfprof_output_pb2.TFGraphNodeProto()
    tfprof_node.ParseFromString(
        print_mdl.PrintModelAnalysis(
            graph.as_graph_def().SerializeToString(),
            run_meta_str,
            op_log.SerializeToString(),
            tfprof_cmd.encode('utf-8'),
            opts.SerializeToString()))

  return tfprof_node