def assert_body_anfs_as_expected(self, expected_fn, test_fn, config=None): # Testing the code bodies only. Wrapping them in functions so the # syntax highlights nicely, but Python doesn't try to execute the # statements. exp_node, _ = parser.parse_entity(expected_fn, future_features=()) node, _ = parser.parse_entity(test_fn, future_features=()) node = anf.transform(node, self._simple_context(), config=config) exp_name = exp_node.name # Ignoring the function names in the result because they can't be # the same (because both functions have to exist in the same scope # at the same time). node.name = exp_name self.assert_same_ast(exp_node, node) # Check that ANF is idempotent node_repeated = anf.transform(node, self._simple_context()) self.assert_same_ast(node_repeated, node)
def test_basic(self): def test_function(): a = 0 return a node, _, _ = parser.parse_entity(test_function, future_imports=()) node = anf.transform(node, self._simple_context()) result, _ = compiler.ast_to_object(node) self.assertEqual(test_function(), result.test_function())
def test_basic(self): def test_function(): a = 0 return a node, _ = parser.parse_entity(test_function) node = anf.transform(node.body[0], self._simple_source_info()) result, _ = compiler.ast_to_object(node) self.assertEqual(test_function(), result.test_function())
def test_basic(self): def test_function(): a = 0 return a node, _ = parser.parse_entity(test_function, future_features=()) node = anf.transform(node, self._simple_context()) result, _, _ = loader.load_ast(node) self.assertEqual(test_function(), result.test_function())
def assert_body_anfs_as_expected(self, expected_fn, test_fn): # Testing the code bodies only. Wrapping them in functions so the # syntax highlights nicely, but Python doesn't try to execute the # statements. exp_node, _ = parser.parse_entity(expected_fn) node, _ = parser.parse_entity(test_fn) node = anf.transform( node, self._simple_source_info(), gensym_source=DummyGensym) exp_name = exp_node.body[0].name # Ignoring the function names in the result because they can't be # the same (because both functions have to exist in the same scope # at the same time). node.body[0].name = exp_name self.assert_same_ast(exp_node, node) # Check that ANF is idempotent node_repeated = anf.transform( node, self._simple_source_info(), gensym_source=DummyGensym) self.assert_same_ast(node_repeated, node)
def assert_body_anfs_as_expected(self, expected_fn, test_fn): # Testing the code bodies only. Wrapping them in functions so the # syntax highlights nicely, but Python doesn't try to execute the # statements. node, _ = parser.parse_entity(test_fn, future_features=()) orig_source = parser.unparse(node, indentation=' ') orig_str = textwrap.dedent(orig_source).strip() config = [(anf.ANY, anf.LEAVE)] # Configuration to transform nothing node = anf.transform(node, self._simple_context(), config=config) new_source = parser.unparse(node, indentation=' ') new_str = textwrap.dedent(new_source).strip() self.assertEqual(orig_str, new_str)
def _parse_and_analyze(f): """Performs preliminary analyses and transformations. The goal is to massage the source program into a form on which the `_AutoBatchingTransformer` below will be successful. Args: f: Function to analyze Returns: node: A Python AST node representing the function, suitable for passing to `_AutoBatchingTransformer.visit` entity_info: An AutoGraph `EntityInfo` object, with some information about `f`. Required for initializing `_AutoBatchingTransformer`. """ namespace = {} # Get the AST of the function future_features = inspect_utils.getfutureimports(f) node, _ = parser.parse_entity(f, future_features=future_features) # Boilerplate for AutoGraph transforms entity_info = transformer.EntityInfo(source_code='', source_file=None, future_features=future_features, namespace=namespace) program_ctx = converter.ProgramContext( options=converter.ConversionOptions(recursive=True), autograph_module=None) ctx = converter.EntityContext(namer=naming.Namer(namespace), entity_info=entity_info, program_ctx=program_ctx) # Canonicalize away break statements node = converter.standard_analysis(node, ctx, is_initial=True) node = break_statements.transform(node, ctx) # Canonicalize away continue statements node = converter.standard_analysis(node, ctx, is_initial=False) node = continue_statements.transform(node, ctx) # Force single returns node = converter.standard_analysis(node, ctx, is_initial=False) node = return_statements.transform(node, ctx, default_to_null_return=False) # Transform into ANF node = anf.transform(node, ctx) node = converter.standard_analysis(node, ctx, is_initial=False) return node, ctx
def _parse_and_analyze(f, autobatch_functions): """Performs preliminary analyses and transformations. The goal is to massage the source program into a form on which the `_AutoBatchingTransformer` below will be successful. Args: f: Function to analyze autobatch_functions: List of Python `str` names of autobatched functions. Arguments to these functions will be canonicalized to variable references, but others will not. Returns: node: A Python AST node representing the function, suitable for passing to `_AutoBatchingTransformer.visit` entity_info: An AutoGraph `EntityInfo` object, with some information about `f`. Required for initializing `_AutoBatchingTransformer`. """ namespace = {} # Get the AST of the function future_features = inspect_utils.getfutureimports(f) node, _ = parser.parse_entity(f, future_features=future_features) # Boilerplate for AutoGraph transforms entity_info = transformer.EntityInfo(source_code='', source_file=None, future_features=future_features, namespace=namespace) program_ctx = converter.ProgramContext( options=converter.ConversionOptions(recursive=True), autograph_module=None) ctx = converter.EntityContext(namer=naming.Namer(namespace), entity_info=entity_info, program_ctx=program_ctx) # Canonicalize away break statements node = converter.standard_analysis(node, ctx, is_initial=True) node = break_statements.transform(node, ctx) # Canonicalize away continue statements node = converter.standard_analysis(node, ctx, is_initial=False) node = continue_statements.transform(node, ctx) # Force single returns node = converter.standard_analysis(node, ctx, is_initial=False) node = return_statements.transform(node, ctx, default_to_null_return=False) # Transform into ANF # Replacing if tests and autobatched function call arguments because # that's where divergence can happen. # Replacing all function calls because the downstream transformation # expects calls to lead directly to assignments. def maybe_replace_function_argument(parent, field_name, child): del field_name, child if not anno.hasanno(parent.func, anno.Basic.QN): return False func_name = anno.getanno(parent.func, anno.Basic.QN) if str(func_name) in autobatch_functions: return True return False anf_config = [ (anf.ASTEdgePattern(gast.If, 'test', anf.ANY), anf.REPLACE), (anf.ASTEdgePattern(anf.ANY, anf.ANY, gast.Call), anf.REPLACE), (anf.ASTEdgePattern(gast.Call, 'args', anf.ANY), maybe_replace_function_argument), ] node = anf.transform(node, ctx, config=anf_config) node = converter.standard_analysis(node, ctx, is_initial=False) return node, ctx