def test_continue_deeply_nested(self):
        def test_fn(x):
            v = []
            u = []
            w = []
            while x > 0:
                x -= 1
                if x % 2 == 0:
                    if x % 3 != 0:
                        u.append(x)
                    else:
                        w.append(x)
                        continue
                v.append(x)
            return v, u, w

        node = self.parse_and_analyze(test_fn, {})
        node = continue_statements.transform(node, self.ctx)

        with self.compiled(node) as result:
            self.assertEqual(test_fn(0), result.test_fn(0))
            self.assertEqual(test_fn(1), result.test_fn(1))
            self.assertEqual(test_fn(2), result.test_fn(2))
            self.assertEqual(test_fn(3), result.test_fn(3))
            self.assertEqual(test_fn(4), result.test_fn(4))
  def test_continue_deeply_nested(self):

    def test_fn(x):
      v = []
      u = []
      w = []
      while x > 0:
        x -= 1
        if x % 2 == 0:
          if x % 3 != 0:
            u.append(x)
          else:
            w.append(x)
            continue
        v.append(x)
      return v, u, w

    node = self.parse_and_analyze(test_fn, {})
    node = continue_statements.transform(node, self.ctx)

    with self.compiled(node) as result:
      self.assertEqual(test_fn(0), result.test_fn(0))
      self.assertEqual(test_fn(1), result.test_fn(1))
      self.assertEqual(test_fn(2), result.test_fn(2))
      self.assertEqual(test_fn(3), result.test_fn(3))
      self.assertEqual(test_fn(4), result.test_fn(4))
    def test_basic_continue_for_loop(self):
        def test_fn(a):
            v = []
            for x in a:
                x -= 1
                if x % 2 == 0:
                    continue
                v.append(x)
            return v

        node = self.parse_and_analyze(test_fn, {})
        node = continue_statements.transform(node, self.ctx)

        with self.compiled(node) as result:
            self.assertEqual(test_fn([]), result.test_fn([]))
            self.assertEqual(test_fn([1]), result.test_fn([1]))
            self.assertEqual(test_fn([2]), result.test_fn([2]))
            self.assertEqual(test_fn([1, 2, 3]), result.test_fn([1, 2, 3]))
    def test_basic_continue(self):
        def test_fn(x):
            v = []
            while x > 0:
                x -= 1
                if x % 2 == 0:
                    continue
                v.append(x)
            return v

        node = self.parse_and_analyze(test_fn, {})
        node = continue_statements.transform(node, self.ctx)

        with self.compiled(node) as result:
            self.assertEqual(test_fn(0), result.test_fn(0))
            self.assertEqual(test_fn(1), result.test_fn(1))
            self.assertEqual(test_fn(2), result.test_fn(2))
            self.assertEqual(test_fn(3), result.test_fn(3))
            self.assertEqual(test_fn(4), result.test_fn(4))
  def test_basic_continue_for_loop(self):

    def test_fn(a):
      v = []
      for x in a:
        x -= 1
        if x % 2 == 0:
          continue
        v.append(x)
      return v

    node = self.parse_and_analyze(test_fn, {})
    node = continue_statements.transform(node, self.ctx)

    with self.compiled(node) as result:
      self.assertEqual(test_fn([]), result.test_fn([]))
      self.assertEqual(test_fn([1]), result.test_fn([1]))
      self.assertEqual(test_fn([2]), result.test_fn([2]))
      self.assertEqual(test_fn([1, 2, 3]), result.test_fn([1, 2, 3]))
  def test_basic_continue(self):

    def test_fn(x):
      v = []
      while x > 0:
        x -= 1
        if x % 2 == 0:
          continue
        v.append(x)
      return v

    node = self.parse_and_analyze(test_fn, {})
    node = continue_statements.transform(node, self.ctx)

    with self.compiled(node) as result:
      self.assertEqual(test_fn(0), result.test_fn(0))
      self.assertEqual(test_fn(1), result.test_fn(1))
      self.assertEqual(test_fn(2), result.test_fn(2))
      self.assertEqual(test_fn(3), result.test_fn(3))
      self.assertEqual(test_fn(4), result.test_fn(4))
def node_to_graph(node, ctx, nocompile_decorators):
    """Convert Python code to equivalent TF graph mode code.

  Args:
    node: A Python AST node representing the code to convert.
    ctx: An EntityContext object.
    nocompile_decorators: A tuple containing decorators to be stripped from
        functions during conversion.

  Returns:
    A tuple (node, deps):
        * node: A Python ast node, representing the converted code.
        * deps: A set of strings, the fully qualified names of entity
            dependencies that this node has.
  """
    # TODO(mdan): Verify arguments for correctness.

    # TODO(mdan): Factor out common elements.
    # These include:
    #   * code move between blocks
    #   * visiting blocks in transformers

    # Certain steps, especially canonicalization, insert new symbols into the
    # tree, which must be accounted. Although less efficient, it is most robust
    # to re-run the analysis.

    node = _static_analysis_pass(node, ctx)

    # TODO(mdan): Clean this up.
    # Some intermediate analyses are not required, and some comments got orphaned.

    # Past this point, line numbers are no longer accurate so we ignore the
    # source.
    # TODO(mdan): Is it feasible to reconstruct intermediate source code?
    ctx.source_code = None
    node = ifexp.transform(node, ctx)
    node, deps = decorators.transform(node, nocompile_decorators)
    node = break_statements.transform(node, ctx)
    node = asserts.transform(node, ctx)

    # Note: sequencing continue canonicalization before for loop one avoids
    # dealing with the extra loop increment operation that the for
    # canonicalization creates.
    node = continue_statements.transform(node, ctx)
    ctx.namespace['len'] = len

    node = _static_analysis_pass(node, ctx)
    node = single_return.transform(node, ctx)

    node = _static_analysis_pass(node, ctx)
    node = lists.transform(node, ctx)
    node = builtin_functions.transform(node, ctx)

    node = _static_analysis_pass(node, ctx)
    node = call_trees.transform(node, ctx, config.DEFAULT_UNCOMPILED_MODULES,
                                nocompile_decorators)
    node = control_flow.transform(node, ctx)

    # control_flow may create new symbols and change scopes.
    node = _static_analysis_pass(node, ctx)
    node = logical_expressions.transform(node, ctx)
    node = side_effect_guards.transform(node, ctx)
    node = name_scopes.transform(node, ctx)

    return node, deps
Esempio n. 8
0
def node_to_graph(node, ctx, nocompile_decorators):
  """Convert Python code to equivalent TF graph mode code.

  Args:
    node: A Python AST node representing the code to convert.
    ctx: An EntityContext object.
    nocompile_decorators: A tuple containing decorators to be stripped from
        functions during conversion.

  Returns:
    A tuple (node, deps):
        * node: A Python ast node, representing the converted code.
        * deps: A set of strings, the fully qualified names of entity
            dependencies that this node has.
  """
  # TODO(mdan): Verify arguments for correctness.

  # TODO(mdan): Factor out common elements.
  # These include:
  #   * code move between blocks
  #   * visiting blocks in transformers

  # Certain steps, especially canonicalization, insert new symbols into the
  # tree, which must be accounted. Although less efficient, it is most robust
  # to re-run the analysis.

  node = _static_analysis_pass(node, ctx)

  # TODO(mdan): Clean this up.
  # Some intermediate analyses are not required, and some comments got orphaned.

  # Past this point, line numbers are no longer accurate so we ignore the
  # source.
  # TODO(mdan): Is it feasible to reconstruct intermediate source code?
  ctx.source_code = None
  node = ifexp.transform(node, ctx)
  node, deps = decorators.transform(node, nocompile_decorators)
  node = break_statements.transform(node, ctx)
  node = asserts.transform(node, ctx)

  # Note: sequencing continue canonicalization before for loop one avoids
  # dealing with the extra loop increment operation that the for
  # canonicalization creates.
  node = continue_statements.transform(node, ctx)
  ctx.namespace['len'] = len

  node = _static_analysis_pass(node, ctx)
  node = single_return.transform(node, ctx)

  node = _static_analysis_pass(node, ctx)
  node = lists.transform(node, ctx)
  node = builtin_functions.transform(node, ctx)

  node = _static_analysis_pass(node, ctx)
  node = call_trees.transform(node, ctx, config.DEFAULT_UNCOMPILED_MODULES,
                              nocompile_decorators)
  node = control_flow.transform(node, ctx)

  # control_flow may create new symbols and change scopes.
  node = _static_analysis_pass(node, ctx)
  node = logical_expressions.transform(node, ctx)
  node = side_effect_guards.transform(node, ctx)
  node = name_scopes.transform(node, ctx)

  return node, deps