def test_single_component_assignment():
    input = """{ comp1 (input) (output) (:= a 1) }
{ comp1 (input) (output) (:= a 2) }"""
    
    ast = parser.parse(input, lexer=lexer)
    
    # Catch the double component assigment
    try:
        ast.walk( semantic_analysis.CheckSingleAssignment() )
    except Exception as e: 
        e1 = e
    assert str(e1) == 'Component name: comp1 has already been taken'
    assert type(e1).__name__ == 'PypeSyntaxError'
def test_multiple_outputs():
    input="""(import timeseries)

    { standardize
      (input (TimeSeries t1))
      (output t1)
      (output t1)
    }"""
    
    ast = parser.parse(input, lexer=lexer)
    
    # Catch multiple outputs
    try:
        ast.walk( semantic_analysis.CheckSingleIOExpression() )
    except Exception as e: 
        e1 = e
    assert str(e1) == 'Component standardize has multiple output expressions'
    assert type(e1).__name__ == 'PypeSyntaxError'
def test_single_node_input_assignment():
    input="""(import timeseries)

    { standardize
      (input (TimeSeries t1))
      (input (TimeSeries t1))
      (output)
    }"""
    
    ast = parser.parse(input, lexer=lexer)
    
    # Catch the double input assigment
    try:
        ast.walk( semantic_analysis.CheckSingleAssignment() )
    except Exception as e: 
        e1 = e
    assert str(e1) == 'Node name: t1 has already been taken'
    assert type(e1).__name__ == 'PypeSyntaxError'  
def test_check_undefined_vars():
    input="""(import timeseries)

    { standardize
      (input)
      (:= t (std1 t))
      (output)
    }"""
    
    ast = parser.parse(input, lexer=lexer)

    # Catch the undefined var
    try:
        syms = ast.walk( SymbolTableVisitor() )
        print(syms)
        ast.walk( semantic_analysis.CheckUndefinedVariables(syms) )
    except Exception as e: 
        e1 = e
    print(e1)
    assert str(e1) == 'Undefined variable: std1'
    assert type(e1).__name__ == 'PypeSyntaxError'  
Esempio n. 5
0
def test_literal_ast():
    input = "{t 939}"
    ast = parser.parse(input, lexer=lexer)
    pretty = PrettyString()
    ast.walk(pretty)
    assert pretty.text == 'ASTProgramASTComponentASTIDASTLiteral'
Esempio n. 6
0
def test_input_ast():
    input = "{(INPUT xs)}"
    ast = parser.parse(input, lexer=lexer)
    pretty = PrettyString()
    ast.walk(pretty)
    assert pretty.text == 'ASTProgramASTComponentASTIDASTID'
Esempio n. 7
0
def test_basic_ast():
    input = "{sum (a + b)}"
    ast = parser.parse(input, lexer=lexer)
    syms = ast.walk(SymbolTableVisitor())
    assert syms['global'] == {'sum': Symbol(name='sum', type=SymbolType.component, ref=None)}
Esempio n. 8
0
from pype import parser
from pype import ast
from pype.semantic_analysis import CheckSingleAssignment, CheckSingleIOExpression, PrettyPrint, CheckUndefinedVariables
from pype.optimize import *

input1 = """(import timeseries)
{ standardize
  (input (TimeSeries t))
  (:= mu (mean t))
  (:= sig (std t))
  (:= new_t (/ (- t mu) sig))
  (output new_t)
}"""

ast = parser.parse(input1, lexer=lexer)
syms = ast.walk(SymbolTableVisitor())
ir = ast.mod_walk(LoweringVisitor(syms))


# Test that unnecessary assignment nodes are being removed
def test_assignment_ellision():
    ir.flowgraph_pass(AssignmentEllision())
    standardize_graph = ir.graphs['standardize']
    nodes_to_have = ['@N0', '@N1', '@N3', '@N5', '@N6', '@N8']

    assert (len(standardize_graph.nodes) == len(nodes_to_have))

    for node in nodes_to_have:
        assert (node in standardize_graph.nodes)

    for a_var in standardize_graph.variables.keys():
Esempio n. 9
0
from pype import ast
from pype.semantic_analysis import CheckSingleAssignment, CheckSingleIOExpression, PrettyPrint, CheckUndefinedVariables
from timeseries import TimeSeries

input = """(import timeseries)

{ standardize
  (input (TimeSeries t))
  (:= mu (mean t))
  (:= sig (std t))
  (:= new_t (/ (- t mu) sig))
  (output new_t)
}"""
    
ast = parser.parse(input, lexer=lexer)
syms = ast.walk( SymbolTableVisitor() )
ir = ast.mod_walk( LoweringVisitor(syms) )    
standardize_graph = ir.graphs['standardize']
    
def test_nodes():
    assert(standardize_graph.nodes['@N0'].nodeid == '@N0')
    assert(standardize_graph.nodes['@N0'].inputs == [])
    assert(standardize_graph.nodes['@N0'].ref == None)
    assert(standardize_graph.nodes['@N0'].__repr__() == '<FGNodeType.input @N0<= : None>')
    
    assert(standardize_graph.nodes['@N1'].nodeid == '@N1')
    assert(standardize_graph.nodes['@N1'].inputs == ['@N0'])
    assert(standardize_graph.nodes['@N1'].ref == TimeSeries.mean)
    
    assert(standardize_graph.nodes['@N2'].nodeid == '@N2')
    assert(standardize_graph.nodes['@N2'].inputs == ['@N1'])