def test_dead_code_elimination2(): deadcodeinput = """(import timeseries) { mul (input x y) (:= z (* x y)) (output z) } { dist (input a b) (:= c (mul (mul a b) (mul b a))) (output c) }""" ast2 = parser.parse(deadcodeinput, lexer=lexer) syms2 = ast2.walk( SymbolTableVisitor() ) ir2 = ast2.mod_walk( LoweringVisitor(syms2) ) ir2.flowgraph_pass( DeadCodeElimination() ) # Check the 'mul' graph component_graph1 = ir2.graphs['mul'] nodes_to_have1 = ['@N0', '@N1', '@N2', '@N3', '@N4'] assert(len(component_graph1.nodes) == len(nodes_to_have1)) for node in nodes_to_have1: assert(node in component_graph1.nodes) for a_var in component_graph1.variables.keys(): assert(component_graph1.variables[a_var] in nodes_to_have1) assert(len(component_graph1.outputs) == 1) # Check the 'dist' graph component_graph2 = ir2.graphs['dist'] nodes_to_have2 = ['@N0', '@N1', '@N2', '@N3', '@N4', '@N5', '@N6'] assert(len(component_graph2.nodes) == len(nodes_to_have2)) for node in nodes_to_have2: assert(node in component_graph2.nodes) for a_var in component_graph2.variables.keys(): assert(component_graph2.variables[a_var] in nodes_to_have2) assert(len(component_graph2.outputs) == 1)
def test_dead_code_elimination2(): deadcodeinput = """(import timeseries) { mul (input x y) (:= z (* x y)) (output z) } { dist (input a b) (:= c (mul (mul a b) (mul b a))) (output c) }""" ast2 = parser.parse(deadcodeinput, lexer=lexer) syms2 = ast2.walk(SymbolTableVisitor()) ir2 = ast2.mod_walk(LoweringVisitor(syms2)) ir2.flowgraph_pass(DeadCodeElimination()) # Check the 'mul' graph component_graph1 = ir2.graphs['mul'] nodes_to_have1 = ['@N0', '@N1', '@N2', '@N3', '@N4'] assert (len(component_graph1.nodes) == len(nodes_to_have1)) for node in nodes_to_have1: assert (node in component_graph1.nodes) for a_var in component_graph1.variables.keys(): assert (component_graph1.variables[a_var] in nodes_to_have1) assert (len(component_graph1.outputs) == 1) # Check the 'dist' graph component_graph2 = ir2.graphs['dist'] nodes_to_have2 = ['@N0', '@N1', '@N2', '@N3', '@N4', '@N5', '@N6'] assert (len(component_graph2.nodes) == len(nodes_to_have2)) for node in nodes_to_have2: assert (node in component_graph2.nodes) for a_var in component_graph2.variables.keys(): assert (component_graph2.variables[a_var] in nodes_to_have2) assert (len(component_graph2.outputs) == 1)
def test_single_component_assignment(): input = """{ comp1 (input) (output) (:= a 1) } { comp1 (input) (output) (:= a 2) }""" ast = parser.parse(input, lexer=lexer) # Catch the double component assigment try: ast.walk( semantic_analysis.CheckSingleAssignment() ) except Exception as e: e1 = e assert str(e1) == 'Component name: comp1 has already been taken' assert type(e1).__name__ == 'PypeSyntaxError'
def test_inline(): with open('tests/pype/test_cases/example_opt.ppl') as f: opt_ppl = f.read() program_input = ''' (import timeseries) { mul (input x y) (:= z (* x y)) (output z) } { dist (input a b) (:= c (mul (mul a b) (mul b a))) (output c) } ''' ast3 = parser.parse(program_input, lexer=lexer) ast3.walk(CheckSingleAssignment()) ast3.walk(CheckSingleIOExpression()) syms3 = ast3.walk(SymbolTableVisitor()) ast3.walk(CheckUndefinedVariables(syms3)) ir3 = ast3.mod_walk(LoweringVisitor(syms3)) ir3.flowgraph_pass(AssignmentEllision()) ir3.flowgraph_pass(DeadCodeElimination()) ir3.topological_flowgraph_pass(InlineComponents()) # Check the 'mul' graph component_graph1 = ir3.graphs['mul'] inputs_to_have1 = ['@N0', '@N1'] outputs_to_have1 = ['@N4'] # Check for component nodes for n in component_graph1.nodes.values(): assert (n.type is not FGNodeType.component) # Check inputs and outputs assert (len(component_graph1.inputs) == len(inputs_to_have1)) for input_node in inputs_to_have1: assert (input_node in component_graph1.inputs) assert (len(component_graph1.outputs) == len(outputs_to_have1)) for output_node in outputs_to_have1: assert (output_node in component_graph1.outputs) # Check the 'dist' graph component_graph2 = ir3.graphs['dist'] inputs_to_have2 = ['@N0', '@N1'] outputs_to_have2 = ['@N6'] # Check for component nodes for n in component_graph2.nodes.values(): assert (n.type is not FGNodeType.component) # Check inputs and outputs assert (len(component_graph2.inputs) == len(inputs_to_have2)) for input_node in inputs_to_have2: assert (input_node in component_graph2.inputs) assert (len(component_graph2.outputs) == len(outputs_to_have2)) for output_node in outputs_to_have2: assert (output_node in component_graph2.outputs)
def test_inline(): with open ('tests/pype/test_cases/example_opt.ppl') as f: opt_ppl = f.read() program_input =''' (import timeseries) { mul (input x y) (:= z (* x y)) (output z) } { dist (input a b) (:= c (mul (mul a b) (mul b a))) (output c) } ''' ast3 = parser.parse(program_input, lexer=lexer) ast3.walk(CheckSingleAssignment()) ast3.walk(CheckSingleIOExpression()) syms3 = ast3.walk( SymbolTableVisitor() ) ast3.walk(CheckUndefinedVariables(syms3)) ir3 = ast3.mod_walk( LoweringVisitor(syms3) ) ir3.flowgraph_pass( AssignmentEllision() ) ir3.flowgraph_pass( DeadCodeElimination() ) ir3.topological_flowgraph_pass(InlineComponents()) # Check the 'mul' graph component_graph1 = ir3.graphs['mul'] inputs_to_have1 = ['@N0', '@N1'] outputs_to_have1 = ['@N4'] # Check for component nodes for n in component_graph1.nodes.values(): assert(n.type is not FGNodeType.component) # Check inputs and outputs assert(len(component_graph1.inputs) == len(inputs_to_have1)) for input_node in inputs_to_have1: assert(input_node in component_graph1.inputs) assert(len(component_graph1.outputs) == len(outputs_to_have1)) for output_node in outputs_to_have1: assert(output_node in component_graph1.outputs) # Check the 'dist' graph component_graph2 = ir3.graphs['dist'] inputs_to_have2 = ['@N0', '@N1'] outputs_to_have2 = ['@N6'] # Check for component nodes for n in component_graph2.nodes.values(): assert(n.type is not FGNodeType.component) # Check inputs and outputs assert(len(component_graph2.inputs) == len(inputs_to_have2)) for input_node in inputs_to_have2: assert(input_node in component_graph2.inputs) assert(len(component_graph2.outputs) == len(outputs_to_have2)) for output_node in outputs_to_have2: assert(output_node in component_graph2.outputs)
def test_multiple_outputs(): input="""(import timeseries) { standardize (input (TimeSeries t1)) (output t1) (output t1) }""" ast = parser.parse(input, lexer=lexer) # Catch multiple outputs try: ast.walk( semantic_analysis.CheckSingleIOExpression() ) except Exception as e: e1 = e assert str(e1) == 'Component standardize has multiple output expressions' assert type(e1).__name__ == 'PypeSyntaxError'
def test_single_node_input_assignment(): input="""(import timeseries) { standardize (input (TimeSeries t1)) (input (TimeSeries t1)) (output) }""" ast = parser.parse(input, lexer=lexer) # Catch the double input assigment try: ast.walk( semantic_analysis.CheckSingleAssignment() ) except Exception as e: e1 = e assert str(e1) == 'Node name: t1 has already been taken' assert type(e1).__name__ == 'PypeSyntaxError'
def test_check_undefined_vars(): input="""(import timeseries) { standardize (input) (:= t (std1 t)) (output) }""" ast = parser.parse(input, lexer=lexer) # Catch the undefined var try: syms = ast.walk( SymbolTableVisitor() ) print(syms) ast.walk( semantic_analysis.CheckUndefinedVariables(syms) ) except Exception as e: e1 = e print(e1) assert str(e1) == 'Undefined variable: std1' assert type(e1).__name__ == 'PypeSyntaxError'
def test_dead_code_elimination1(): deadcodeinput = """{ component (input x) (:= useless 1) (output x) }""" ast2 = parser.parse(deadcodeinput, lexer=lexer) syms2 = ast2.walk(SymbolTableVisitor()) ir2 = ast2.mod_walk(LoweringVisitor(syms2)) ir2.flowgraph_pass(DeadCodeElimination()) component_graph = ir2.graphs['component'] nodes_to_have = ['@N0', '@N3'] assert (len(component_graph.nodes) == len(nodes_to_have)) for node in nodes_to_have: assert (node in component_graph.nodes) for a_var in component_graph.variables.keys(): assert (component_graph.variables[a_var] in nodes_to_have)
def test_dead_code_elimination1(): deadcodeinput = """{ component (input x) (:= useless 1) (output x) }""" ast2 = parser.parse(deadcodeinput, lexer=lexer) syms2 = ast2.walk( SymbolTableVisitor() ) ir2 = ast2.mod_walk( LoweringVisitor(syms2) ) ir2.flowgraph_pass( DeadCodeElimination() ) component_graph = ir2.graphs['component'] nodes_to_have = ['@N0', '@N3'] assert(len(component_graph.nodes) == len(nodes_to_have)) for node in nodes_to_have: assert(node in component_graph.nodes) for a_var in component_graph.variables.keys(): assert(component_graph.variables[a_var] in nodes_to_have)
def test_literal_ast(): input = "{t 939}" ast = parser.parse(input, lexer=lexer) pretty = PrettyString() ast.walk(pretty) assert pretty.text == 'ASTProgramASTComponentASTIDASTLiteral'
def test_input_ast(): input = "{(INPUT xs)}" ast = parser.parse(input, lexer=lexer) pretty = PrettyString() ast.walk(pretty) assert pretty.text == 'ASTProgramASTComponentASTIDASTID'
def test_basic_ast(): input = "{sum (a + b)}" ast = parser.parse(input, lexer=lexer) syms = ast.walk(SymbolTableVisitor()) assert syms['global'] == {'sum': Symbol(name='sum', type=SymbolType.component, ref=None)}
from pype import lexer from pype import parser from pype import ast from pype.semantic_analysis import CheckSingleAssignment, CheckSingleIOExpression, PrettyPrint, CheckUndefinedVariables from pype.optimize import * input1 = """(import timeseries) { standardize (input (TimeSeries t)) (:= mu (mean t)) (:= sig (std t)) (:= new_t (/ (- t mu) sig)) (output new_t) }""" ast = parser.parse(input1, lexer=lexer) syms = ast.walk(SymbolTableVisitor()) ir = ast.mod_walk(LoweringVisitor(syms)) # Test that unnecessary assignment nodes are being removed def test_assignment_ellision(): ir.flowgraph_pass(AssignmentEllision()) standardize_graph = ir.graphs['standardize'] nodes_to_have = ['@N0', '@N1', '@N3', '@N5', '@N6', '@N8'] assert (len(standardize_graph.nodes) == len(nodes_to_have)) for node in nodes_to_have: assert (node in standardize_graph.nodes)
from pype import parser from pype import ast from pype.semantic_analysis import CheckSingleAssignment, CheckSingleIOExpression, PrettyPrint, CheckUndefinedVariables from timeseries import TimeSeries input = """(import timeseries) { standardize (input (TimeSeries t)) (:= mu (mean t)) (:= sig (std t)) (:= new_t (/ (- t mu) sig)) (output new_t) }""" ast = parser.parse(input, lexer=lexer) syms = ast.walk( SymbolTableVisitor() ) ir = ast.mod_walk( LoweringVisitor(syms) ) standardize_graph = ir.graphs['standardize'] def test_nodes(): assert(standardize_graph.nodes['@N0'].nodeid == '@N0') assert(standardize_graph.nodes['@N0'].inputs == []) assert(standardize_graph.nodes['@N0'].ref == None) assert(standardize_graph.nodes['@N0'].__repr__() == '<FGNodeType.input @N0<= : None>') assert(standardize_graph.nodes['@N1'].nodeid == '@N1') assert(standardize_graph.nodes['@N1'].inputs == ['@N0']) assert(standardize_graph.nodes['@N1'].ref == TimeSeries.mean) assert(standardize_graph.nodes['@N2'].nodeid == '@N2')