示例#1
0
def get_path_to_datafile(path):
    """Get the path to the specified file in the data dependencies.

  The path is relative to tensorflow/

  Args:
    path: a string resource path relative to tensorflow/

  Returns:
    The path to the specified file present in the data attribute of py_test
    or py_binary.

  Raises:
    IOError: If the path is not found, or the resource can't be opened.
  """
    # First, try finding in the new path.
    if runfiles:
        r = runfiles.Create()
        new_fpath = r.Rlocation(
            _os.path.abspath(_os.path.join('tensorflow', path)))
        if new_fpath is not None and _os.path.exists(new_fpath):
            return new_fpath

    # Then, the old style path, as people became dependent on this buggy call.
    old_filepath = _os.path.join(
        _os.path.dirname(_inspect.getfile(_sys._getframe(1))), path)
    return old_filepath
示例#2
0
def GetTempDir():
  """Return a temporary directory for tests to use."""
  global _googletest_temp_dir
  if not _googletest_temp_dir:
    if os.environ.get('TEST_TMPDIR'):
      temp_dir = tempfile.mkdtemp(prefix=os.environ['TEST_TMPDIR'])
    else:
      first_frame = tf_inspect.stack()[-1][0]
      temp_dir = os.path.join(tempfile.gettempdir(),
                              os.path.basename(tf_inspect.getfile(first_frame)))
      temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py'))

    # Make sure we have the correct path separators.
    temp_dir = temp_dir.replace('/', os.sep)

    def delete_temp_dir(dirname=temp_dir):
      try:
        file_io.delete_recursively(dirname)
      except errors.OpError as e:
        logging.error('Error removing %s: %s', dirname, e)

    atexit.register(delete_temp_dir)

    _googletest_temp_dir = temp_dir

  return _googletest_temp_dir
示例#3
0
def GetTempDir():
    """Return a temporary directory for tests to use."""
    global _googletest_temp_dir
    if not _googletest_temp_dir:
        if os.environ.get('TEST_TMPDIR'):
            temp_dir = tempfile.mkdtemp(prefix=os.environ['TEST_TMPDIR'])
        else:
            first_frame = tf_inspect.stack()[-1][0]
            temp_dir = os.path.join(
                tempfile.gettempdir(),
                os.path.basename(tf_inspect.getfile(first_frame)))
            temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py'))

        # Make sure we have the correct path separators.
        temp_dir = temp_dir.replace('/', os.sep)

        def delete_temp_dir(dirname=temp_dir):
            try:
                file_io.delete_recursively(dirname)
            except errors.OpError as e:
                logging.error('Error removing %s: %s', dirname, e)

        atexit.register(delete_temp_dir)

        _googletest_temp_dir = temp_dir

    return _googletest_temp_dir
示例#4
0
def function_to_graph(f, conversion_map, arg_values, arg_types,
                      owner_type=None):
  """Specialization of `entity_to_graph` for callable functions."""
  node = parser.parse_object(f).body[0]
  namespace = six.get_function_globals(f)

  # This is needed for non-global functions.
  closure = six.get_function_closure(f)
  if closure:
    for e in closure:
      if callable(e.cell_contents):
        fn = e.cell_contents
        namespace[fn.__name__] = fn

  namer = conversion_map.new_namer(namespace)
  ctx = context.EntityContext(
      namer=namer,
      source_code=tf_inspect.getsource(f),
      source_file=tf_inspect.getfile(f),
      namespace=namespace,
      arg_values=arg_values,
      arg_types=arg_types)
  node = node_to_graph(node, ctx, conversion_map.nocompile_decorators)

  # Simulate a rename to ensure the top level is in the name map. This is needed
  # for top level functions, and it also helps the consistency verification made
  # by update_name_map.
  if owner_type is not None:
    new_name = namer.compiled_function_name(f.__name__, f, owner_type)
  else:
    new_name = namer.compiled_function_name(f.__name__, f)
  node.name = new_name
  conversion_map.update_name_map(namer)
  return node, conversion_map.name_map[f]
  def createAndRunGraphWithWhileLoop(self):
    """Create and run a TensorFlow Graph with a while loop to generate dumps."""

    self.dump_root = self.get_temp_dir()
    self.curr_file_path = os.path.abspath(
        tf_inspect.getfile(tf_inspect.currentframe()))

    # Run a simple TF graph to generate some debug dumps that can be used in
    # source annotation.
    with session.Session() as sess:
      loop_body = lambda i: math_ops.add(i, 2)
      self.traceback_first_line = line_number_above()

      loop_cond = lambda i: math_ops.less(i, 16)

      i = constant_op.constant(10, name="i")
      loop = control_flow_ops.while_loop(loop_cond, loop_body, [i])

      run_options = config_pb2.RunOptions(output_partition_graphs=True)
      debug_utils.watch_graph(
          run_options, sess.graph, debug_urls=["file://%s" % self.dump_root])
      run_metadata = config_pb2.RunMetadata()
      sess.run(loop, options=run_options, run_metadata=run_metadata)

      self.dump = debug_data.DebugDumpDir(
          self.dump_root, partition_graphs=run_metadata.partition_graphs)
      self.dump.set_python_graph(sess.graph)
示例#6
0
    def createAndRunGraphWithWhileLoop(self):
        """Create and run a TensorFlow Graph with a while loop to generate dumps."""

        self.dump_root = self.get_temp_dir()
        self.curr_file_path = os.path.abspath(
            tf_inspect.getfile(tf_inspect.currentframe()))

        # Run a simple TF graph to generate some debug dumps that can be used in
        # source annotation.
        with session.Session() as sess:
            loop_body = lambda i: math_ops.add(i, 2)
            self.traceback_first_line = line_number_above()

            loop_cond = lambda i: math_ops.less(i, 16)

            i = constant_op.constant(10, name="i")
            loop = control_flow_ops.while_loop(loop_cond, loop_body, [i])

            run_options = config_pb2.RunOptions(output_partition_graphs=True)
            debug_utils.watch_graph(run_options,
                                    sess.graph,
                                    debug_urls=["file://%s" % self.dump_root])
            run_metadata = config_pb2.RunMetadata()
            sess.run(loop, options=run_options, run_metadata=run_metadata)

            self.dump = debug_data.DebugDumpDir(
                self.dump_root, partition_graphs=run_metadata.partition_graphs)
            self.dump.set_python_graph(sess.graph)
def get_data_files_path():
  """Get a direct path to the data files colocated with the script.

  Returns:
    The directory where files specified in data attribute of py_test
    and py_binary are stored.
  """
  return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
示例#8
0
def get_data_files_path():
    """Get a direct path to the data files colocated with the script.

  Returns:
    The directory where files specified in data attribute of py_test
    and py_binary are stored.
  """
    return _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
示例#9
0
    def createAndRunGraphHelper(self):
        """Create and run a TensorFlow Graph to generate debug dumps.

    This is intentionally done in separate method, to make it easier to test
    the stack-top mode of source annotation.
    """

        self.dump_root = self.get_temp_dir()
        self.curr_file_path = os.path.abspath(
            tf_inspect.getfile(tf_inspect.currentframe()))

        # Run a simple TF graph to generate some debug dumps that can be used in
        # source annotation.
        with session.Session() as sess:
            self.u_init = constant_op.constant(np.array([[5.0, 3.0],
                                                         [-1.0, 0.0]]),
                                               shape=[2, 2],
                                               name="u_init")
            self.u_init_line_number = line_number_above()

            self.u = variables.Variable(self.u_init, name="u")
            self.u_line_number = line_number_above()

            self.v_init = constant_op.constant(np.array([[2.0], [-1.0]]),
                                               shape=[2, 1],
                                               name="v_init")
            self.v_init_line_number = line_number_above()

            self.v = variables.Variable(self.v_init, name="v")
            self.v_line_number = line_number_above()

            self.w = math_ops.matmul(self.u, self.v, name="w")
            self.w_line_number = line_number_above()

            self.evaluate(self.u.initializer)
            self.evaluate(self.v.initializer)

            run_options = config_pb2.RunOptions(output_partition_graphs=True)
            debug_utils.watch_graph(run_options,
                                    sess.graph,
                                    debug_urls=["file://%s" % self.dump_root])
            run_metadata = config_pb2.RunMetadata()
            sess.run(self.w, options=run_options, run_metadata=run_metadata)

            self.dump = debug_data.DebugDumpDir(
                self.dump_root, partition_graphs=run_metadata.partition_graphs)
            self.dump.set_python_graph(sess.graph)
def get_path_to_datafile(path):
  """Get the path to the specified file in the data dependencies.

  The path is relative to tensorflow/

  Args:
    path: a string resource path relative to tensorflow/

  Returns:
    The path to the specified file present in the data attribute of py_test
    or py_binary.

  Raises:
    IOError: If the path is not found, or the resource can't be opened.
  """
  data_files_path = _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
  return _os.path.join(data_files_path, path)
示例#11
0
def get_path_to_datafile(path):
    """Get the path to the specified file in the data dependencies.

  The path is relative to tensorflow/

  Args:
    path: a string resource path relative to tensorflow/

  Returns:
    The path to the specified file present in the data attribute of py_test
    or py_binary.

  Raises:
    IOError: If the path is not found, or the resource can't be opened.
  """
    data_files_path = _os.path.dirname(_inspect.getfile(_sys._getframe(1)))
    return _os.path.join(data_files_path, path)
示例#12
0
  def createAndRunGraphHelper(self):
    """Create and run a TensorFlow Graph to generate debug dumps.

    This is intentionally done in separate method, to make it easier to test
    the stack-top mode of source annotation.
    """

    self.dump_root = self.get_temp_dir()
    self.curr_file_path = os.path.abspath(
        tf_inspect.getfile(tf_inspect.currentframe()))

    # Run a simple TF graph to generate some debug dumps that can be used in
    # source annotation.
    with session.Session() as sess:
      self.u_init = constant_op.constant(
          np.array([[5.0, 3.0], [-1.0, 0.0]]), shape=[2, 2], name="u_init")
      self.u_init_line_number = line_number_above()

      self.u = variables.Variable(self.u_init, name="u")
      self.u_line_number = line_number_above()

      self.v_init = constant_op.constant(
          np.array([[2.0], [-1.0]]), shape=[2, 1], name="v_init")
      self.v_init_line_number = line_number_above()

      self.v = variables.Variable(self.v_init, name="v")
      self.v_line_number = line_number_above()

      self.w = math_ops.matmul(self.u, self.v, name="w")
      self.w_line_number = line_number_above()

      sess.run(self.u.initializer)
      sess.run(self.v.initializer)

      run_options = config_pb2.RunOptions(output_partition_graphs=True)
      debug_utils.watch_graph(
          run_options, sess.graph, debug_urls=["file://%s" % self.dump_root])
      run_metadata = config_pb2.RunMetadata()
      sess.run(self.w, options=run_options, run_metadata=run_metadata)

      self.dump = debug_data.DebugDumpDir(
          self.dump_root, partition_graphs=run_metadata.partition_graphs)
      self.dump.set_python_graph(sess.graph)
示例#13
0
def GetTempDir():
  """Return a temporary directory for tests to use."""
  global _googletest_temp_dir
  if not _googletest_temp_dir:
    first_frame = tf_inspect.stack()[-1][0]
    temp_dir = os.path.join(tempfile.gettempdir(),
                            os.path.basename(tf_inspect.getfile(first_frame)))
    temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py'))

    def delete_temp_dir(dirname=temp_dir):
      try:
        file_io.delete_recursively(dirname)
      except errors.OpError as e:
        logging.error('Error removing %s: %s', dirname, e)

    atexit.register(delete_temp_dir)
    _googletest_temp_dir = temp_dir

  return _googletest_temp_dir
示例#14
0
def GetTempDir():
  """Return a temporary directory for tests to use."""
  global _googletest_temp_dir
  if not _googletest_temp_dir:
    first_frame = tf_inspect.stack()[-1][0]
    temp_dir = os.path.join(tempfile.gettempdir(),
                            os.path.basename(tf_inspect.getfile(first_frame)))
    temp_dir = tempfile.mkdtemp(prefix=temp_dir.rstrip('.py'))

    def delete_temp_dir(dirname=temp_dir):
      try:
        file_io.delete_recursively(dirname)
      except errors.OpError as e:
        logging.error('Error removing %s: %s', dirname, e)

    atexit.register(delete_temp_dir)
    _googletest_temp_dir = temp_dir

  return _googletest_temp_dir
示例#15
0
def _get_defined_in(py_object, parser_config):
    """Returns a description of where the passed in python object was defined.

  Arguments:
    py_object: The Python object.
    parser_config: A ParserConfig object.

  Returns:
    Either a `_PythonBuiltin`, `_PythonFile`, or a `_GeneratedFile`
  """
    # Every page gets a note about where this object is defined
    # TODO(wicke): If py_object is decorated, get the decorated object instead.
    # TODO(wicke): Only use decorators that support this in TF.

    try:
        path = os.path.relpath(path=tf_inspect.getfile(py_object),
                               start=parser_config.base_dir)
    except TypeError:  # getfile throws TypeError if py_object is a builtin.
        return _PythonBuiltin()

    # TODO(wicke): If this is a generated file, link to the source instead.
    # TODO(wicke): Move all generated files to a generated/ directory.
    # TODO(wicke): And make their source file predictable from the file name.

    # In case this is compiled, point to the original
    if path.endswith('.pyc'):
        path = path[:-1]

    # Never include links outside this code base.
    if path.startswith('..'):
        return None

    if re.match(r'.*/gen_[^/]*\.py$', path):
        return _GeneratedFile(path, parser_config)
    elif re.match(r'.*_pb2\.py$', path):
        # The _pb2.py files all appear right next to their defining .proto file.
        return _ProtoFile(path[:-7] + '.proto', parser_config)
    else:
        return _PythonFile(path, parser_config)
示例#16
0
def _get_defined_in(py_object, parser_config):
  """Returns a description of where the passed in python object was defined.

  Arguments:
    py_object: The Python object.
    parser_config: A ParserConfig object.

  Returns:
    Either a `_PythonBuiltin`, `_PythonFile`, or a `_GeneratedFile`
  """
  # Every page gets a note about where this object is defined
  # TODO(wicke): If py_object is decorated, get the decorated object instead.
  # TODO(wicke): Only use decorators that support this in TF.

  try:
    path = os.path.relpath(path=tf_inspect.getfile(py_object),
                           start=parser_config.base_dir)
  except TypeError:  # getfile throws TypeError if py_object is a builtin.
    return _PythonBuiltin()

  # TODO(wicke): If this is a generated file, link to the source instead.
  # TODO(wicke): Move all generated files to a generated/ directory.
  # TODO(wicke): And make their source file predictable from the file name.

  # In case this is compiled, point to the original
  if path.endswith('.pyc'):
    path = path[:-1]

  # Never include links outside this code base.
  if path.startswith('..'):
    return None

  if re.match(r'.*/gen_[^/]*\.py$', path):
    return _GeneratedFile(path, parser_config)
  elif re.match(r'.*_pb2\.py$', path):
    # The _pb2.py files all appear right next to their defining .proto file.
    return _ProtoFile(path[:-7] + '.proto', parser_config)
  else:
    return _PythonFile(path, parser_config)
示例#17
0
 def testGetFile(self):
     self.assertTrue('tf_inspect_test.py' in tf_inspect.getfile(
         test_decorated_function_with_defaults))
     self.assertTrue('tf_decorator.py' in tf_inspect.getfile(
         test_decorator('decorator')(tf_decorator.unwrap)))
示例#18
0
 def testGetFile(self):
   self.assertTrue('tf_inspect_test.py' in tf_inspect.getfile(
       test_decorated_function_with_defaults))
   self.assertTrue('tf_decorator.py' in tf_inspect.getfile(
       test_decorator('decorator')(tf_decorator.unwrap)))
示例#19
0
from tensorflow.python import debug as tf_debug
from tensorflow.python.util import tf_inspect
from tensorflow.tools.docs import generate_lib

if __name__ == '__main__':
    doc_generator = generate_lib.DocGenerator()
    doc_generator.add_output_dir_argument()
    doc_generator.add_src_dir_argument()

    # This doc generator works on the TensorFlow codebase. Since this script lives
    # at tensorflow/tools/docs, and all code is defined somewhere inside
    # tensorflow/, we can compute the base directory (two levels up), which is
    # valid unless we're trying to apply this to a different code base, or are
    # moving the script around.
    script_dir = os.path.dirname(tf_inspect.getfile(tf_inspect.currentframe()))
    default_base_dir = os.path.join(script_dir, '..', '..')
    doc_generator.add_base_dir_argument(default_base_dir)

    flags = doc_generator.parse_known_args()

    # tf_debug is not imported with tf, it's a separate module altogether
    doc_generator.set_py_modules([('tf', tf), ('tfdbg', tf_debug)])

    doc_generator.set_do_not_descend_map({
        'tf': ['cli', 'lib', 'wrappers'],
        'tf.contrib': [
            'compiler',
            'factorization',
            'grid_rnn',
            'labeled_tensor',
示例#20
0
import tensorflow as tf

from tensorflow.python import debug as tf_debug
from tensorflow.python.util import tf_inspect
from tensorflow.tools.docs import generate_lib

if __name__ == '__main__':
  doc_generator = generate_lib.DocGenerator()
  doc_generator.add_output_dir_argument()
  doc_generator.add_src_dir_argument()

  # This doc generator works on the TensorFlow codebase. Since this script lives
  # at tensorflow/tools/docs, and all code is defined somewhere inside
  # tensorflow/, we can compute the base directory (two levels up), which is
  # valid unless we're trying to apply this to a different code base, or are
  # moving the script around.
  script_dir = os.path.dirname(tf_inspect.getfile(tf_inspect.currentframe()))
  default_base_dir = os.path.join(script_dir, '..', '..')
  doc_generator.add_base_dir_argument(default_base_dir)

  flags = doc_generator.parse_known_args()

  # Suppress documentation of some symbols that users should never use.
  del tf.layers.Layer.inbound_nodes
  del tf.layers.Layer.outbound_nodes

  # tf_debug is not imported with tf, it's a separate module altogether
  doc_generator.set_py_modules([('tf', tf), ('tfdbg', tf_debug)])

  sys.exit(doc_generator.build(flags))