Пример #1
0
 def p_variable_declaration_noin(self, p):
     """variable_declaration_noin : identifier
                                  | identifier initializer_noin
     """
     if len(p) == 2:
         p[0] = ast.VarDecl(p[1])
     else:
         p[0] = ast.VarDecl(p[1], p[2])
Пример #2
0
def p_vardecl(p):
    '''vardecl : type identifier
               | type arraydecl identifier'''
    if len(p) == 4:
        # declaration of an array
        p[0] = ast.VarDecl(p[1], p[3]).addloc(p.lineno(1))
        p[0].addArray(p[2])
    else:
        p[0] = ast.VarDecl(p[1], p[2]).addloc(p.lineno(1))
Пример #3
0
    def variable_declaration(self):
        """variable_declaration : ID (COMMA ID)* COLON type_spec
        VAR
            a : INTEGER;
            b : REAL;
        """

        var_nodes = [ast.Var(self.current_token)]
        self.eat(TokenType.ID)

        # while contain ',' just like var a,b,c : INTEGER;
        while self.current_token.type == TokenType.COMMA:
            self.eat(TokenType.COMMA)
            var_nodes.append(ast.Var(self.current_token))
            self.eat(TokenType.ID)
        # :
        self.eat(TokenType.COLON)

        # type special int or float
        type_node = self.type_spec()
        # [a, INTEGER], [b, INTEGER] [c, float]
        var_declarations = [
            ast.VarDecl(var_node, type_node) for var_node in var_nodes
        ]
        return var_declarations
Пример #4
0
 def p_direct_declarator_1(self, p):
     """ direct_declarator : identifier
                           | LPAREN declarator RPAREN
     """
     if len(p) == 2:
         p[0] = ast.VarDecl(None, p[1], coord=self._token_coord(p, 1))
     elif len(p) == 4:
         p[0] = p[2]
Пример #5
0
 def p_iteration_statement_6(self, p):
     """
     iteration_statement \
       : FOR LPAREN VAR identifier initializer_noin IN expr RPAREN statement
     """
     p[0] = ast.ForIn(item=ast.VarDecl(identifier=p[4], initializer=p[5]),
                      iterable=p[7],
                      statement=p[9])
Пример #6
0
    def create_decls(self, tab, scope, chansets):
        """
    Given 'chansets' (a list of 'ChanElemSet's) convert them into
    chanend declarations.
    """
        decls = []
        for x in chansets:
            #if tab.lookup(x.name, None, base=scope, scoped=True) != None:
            if x.symbol.scope == T_SCOPE_PROC:
                d = ast.VarDecl(x.chanend, T_CHANEND_SINGLE, None)
                d.symbol = Symbol(x.chanend,
                                  T_CHANEND_SINGLE,
                                  None,
                                  scope=T_SCOPE_PROC)
                decls.append(d)

            elif x.symbol.scope == T_SCOPE_BLOCK:
                d = ast.VarDecl(x.chanend, T_CHANEND_SINGLE, None)
                d.symbol = Symbol(x.chanend,
                                  T_CHANEND_SINGLE,
                                  None,
                                  scope=T_SCOPE_BLOCK)
                decls.append(d)

            elif x.symbol.scope == T_SCOPE_SERVER:
                d = ast.VarDecl(x.chanend, T_CHANEND_SERVER_SINGLE, None)
                d.symbol = Symbol(x.chanend,
                                  T_CHANEND_SERVER_SINGLE,
                                  None,
                                  scope=T_SCOPE_SERVER)
                decls.append(d)

            elif x.symbol.scope == T_SCOPE_CLIENT:
                d = ast.VarDecl(x.chanend, T_CHANEND_CLIENT_SINGLE, None)
                d.symbol = Symbol(x.chanend,
                                  T_CHANEND_CLIENT_SINGLE,
                                  None,
                                  scope=T_SCOPE_CLIENT)
                decls.append(d)

            else:
                assert 0
        return decls
Пример #7
0
    def create_decl(self, elem):
        if elem.symbol.type == T_VAR_SINGLE:
            return ast.VarDecl(elem.name, T_VAR_SINGLE, None)

        elif elem.symbol.type == T_VAR_ARRAY:
            return ast.VarDecl(elem.name, T_VAR_ARRAY, elem.expr)

        elif elem.symbol.type == T_REF_ARRAY:
            return ast.VarDecl(elem.name, T_REF_ARRAY, None)

        elif elem.symbol.type == T_CHANEND_SINGLE:
            return ast.VarDecl(elem.name, T_CHANEND_SINGLE, None)

        elif elem.symbol.type == T_CHANEND_SERVER_SINGLE:
            return ast.VarDecl(elem.name, T_CHANEND_SERVER_SINGLE, None)

        elif elem.symbol.type == T_CHANEND_CLIENT_SINGLE:
            return ast.VarDecl(elem.name, T_CHANEND_CLIENT_SINGLE, None)

        else:
            print(elem.symbol.type)
            print(elem.name)
            assert 0
Пример #8
0
 def p_val_def(self, p):
   'val_def : VAL name IS expr'
   p[0] = ast.VarDecl(p[2], T_VAL_SINGLE, p[4], self.coord(p))
Пример #9
0
 def p_var_decl_chanend(self, p):
   'var_decl : CHANEND name'
   p[0] = ast.VarDecl(p[2], T_CHANEND_SINGLE, None, self.coord(p))
Пример #10
0
 def p_iteration_statement_5(self, p):
     """
     iteration_statement : \
         FOR LPAREN VAR identifier IN expr RPAREN statement
     """
     p[0] = ast.ForIn(item=ast.VarDecl(p[4]), iterable=p[6], statement=p[8])
Пример #11
0
 def p_var_decl_chanend_server(self, p):
   'var_decl : CHANEND SERVER name'
   p[0] = ast.VarDecl(p[2], T_CHANEND_SERVER_SINGLE, None, self.coord(p))
Пример #12
0
def p_expression_3(p):
    'expression : ID ID'
    p[0] = ast.VarDecl(p[1], [p[2]], p.lineno(1) + __start_line_no - 1)
Пример #13
0
    def distribute_stmt(self, m, elem_t, elem_n, elem_m, base, indices,
                        proc_actuals, formals, pcall):
        """
    Create the distribution process body statement.
    """

        # Setup some useful expressions
        name = self.sig.unique_process_name()
        elem_x = ast.ElemId('_x')
        expr_x = ast.ExprSingle(elem_x)
        expr_t = ast.ExprSingle(elem_t)
        expr_n = ast.ExprSingle(elem_n)
        expr_m = ast.ExprSingle(elem_m)
        elem_base = ast.ElemNumber(base)
        expr_base = ast.ExprSingle(elem_base)

        # Replace ocurrances of index variables i with i = f(_t)
        divisor = m
        for x in indices:
            divisor = floor(divisor / x.count_value)
            # Calculate the index i as a function of _t and the dimensions.
            e = ast.ExprBinop(
                'rem',
                ast.ElemGroup(
                    ast.ExprBinop('/', elem_t,
                                  ast.ExprSingle(ast.ElemNumber(divisor)))),
                ast.ExprSingle(ast.ElemNumber(x.count_value)))
            if x.base_value > 0:
                e = ast.ExprBinop('+', ast.ElemNumber(x.base_value),
                                  ast.ExprSingle(ast.ElemGroup(e)))
            # Then replace it for each ocurrance of i
            for y in pcall.args:
                y.accept(SubElem(ast.ElemId(x.name), ast.ElemGroup(e)))

        d = ast.ExprBinop('+', elem_t, ast.ExprSingle(elem_x))
        d = form_location(self.sym, elem_base, d, 1)

        # Create on the on statement
        on_stmt = ast.StmtOn(
            d,
            ast.StmtPcall(name, [
                ast.ExprBinop('+', elem_t, ast.ExprSingle(elem_x)), expr_x,
                ast.ExprBinop('-', elem_m, ast.ExprSingle(elem_x))
            ] + proc_actuals))
        on_stmt.location = None

        # Conditionally recurse {d()|d()} or d()
        s1 = ast.StmtIf(
            # if m > n/2
            ast.ExprBinop('>', elem_m, ast.ExprSingle(elem_x)),
            # then
            ast.StmtPar(
                [],
                [
                    # on id()+t+n/2 do d(t+n/2, n/2, m-n/2, ...)
                    on_stmt,
                    # d(t, n/2, n/2)
                    ast.StmtPcall(name,
                                  [expr_t, expr_x, expr_x] + proc_actuals),
                ],
                False),
            # else d(t, n/2, m)
            ast.StmtPcall(name, [expr_t, expr_x, expr_m] + proc_actuals))

        # _x = n/2 ; s1
        n_div_2 = ast.ExprBinop('>>', elem_n,
                                ast.ExprSingle(ast.ElemNumber(1)))
        s2 = ast.StmtSeq([], [ast.StmtAss(elem_x, n_div_2), s1])

        # if n = 1 then process() else s1
        s3 = ast.StmtIf(
            ast.ExprBinop('=', elem_n, ast.ExprSingle(ast.ElemNumber(1))),
            pcall, s2)

        # Create the local declarations
        decls = [ast.VarDecl(elem_x.name, T_VAR_SINGLE, None)]

        s4 = ast.StmtSeq(decls, [s3])

        # Create the definition
        d = ast.ProcDef(name, T_PROC, formals, s4)

        return d
Пример #14
0
 def p_var_decl_chanend_client(self, p):
   'var_decl : CHANEND CLIENT name'
   p[0] = ast.VarDecl(p[2], T_CHANEND_CLIENT_SINGLE, None, self.coord(p))
Пример #15
0
 def p_direct_declarator_1(self, p):
     """ direct_declarator : identifier
     """
     p[0] = ast.VarDecl(p[1], None, self._token_coord(p, 1))
Пример #16
0
 def p_var_decl_array_ref(self, p):
   'var_decl : VAR name LBRACKET RBRACKET'
   p[0] = ast.VarDecl(p[2], T_REF_ARRAY, None, self.coord(p))
Пример #17
0
def p_expression_3(p):
    "expression : ID ID"
    p[0] = ast.VarDecl(p[1], [p[2]], line_no=str(p.lineno(1) + __start_line_no - 1))
Пример #18
0
 def p_var_decl_var(self, p):
   'var_decl : VAR name'
   p[0] = ast.VarDecl(p[2], T_VAR_SINGLE, None, self.coord(p))
Пример #19
0
    def gen_array_conn(self, tab, scope, chan):
        """
    Generate a conncection for an array channel declaration. We must analyse
    the subscript by generating nested conditional statements. 'chan' is a
    ChanElemSet with multiple elements.
    """
        def target_loc(chan, elem, scope):
            master = tab.lookup_is_master(chan, elem, scope)
            return (tab.lookup_slave_location(chan.name, elem.index, scope)
                    if master else tab.lookup_master_location(
                        chan.name, elem.index, scope))

        def create_single_conn(s, chan, scope, i_elem, elem):
            debug(self.debug, 'New connection for index {}'.format(elem.index))
            master = tab.lookup_is_master(chan, elem, scope)
            if master:
                location = ast.ExprSingle(
                    ast.ElemNumber(
                        tab.lookup_slave_location(chan.name, elem.index,
                                                  scope)))
            else:
                location = ast.ExprSingle(
                    ast.ElemNumber(
                        tab.lookup_master_location(chan.name, elem.index,
                                                   scope)))
            chanend = ast.ElemId(chan.chanend)
            chanend.symbol = Symbol(chan.chanend,
                                    self.chanend_type(chan),
                                    None,
                                    scope=T_SCOPE_PROC)
            connid = tab.lookup_connid(chan.name, elem.index, scope)
            chanid = ast.ExprSingle(ast.ElemNumber(connid))
            cond = ast.ExprBinop(
                '=', i_elem,
                ast.ExprSingle(ast.ElemNumber(elem.indices_value)))
            conn = ast.StmtConnect(chanend, chanid, location,
                                   self.connect_type(chan, master))
            return ast.StmtIf(cond, conn, s) if s != None else conn

        def create_range_conn(s, chan, i_elem, group):
            diff2 = group[0]
            elem0 = group[1][0][0]
            offset = target_loc(chan, elem0, scope)

            # Form the location expression
            if elem0.indices_value > 0:
                location = ast.ElemGroup(
                    ast.ExprBinop(
                        '-', i_elem,
                        ast.ExprSingle(ast.ElemNumber(elem0.indices_value))))
            else:
                location = i_elem
            location = ast.ExprBinop('*', ast.ElemNumber(diff2 + 1),
                                     ast.ExprSingle(location))
            location = ast.ExprBinop('+', ast.ElemGroup(location),
                                     ast.ExprSingle(ast.ElemNumber(offset)))

            chanend = ast.ElemId(chan.chanend)
            chanend.symbol = Symbol(chan.chanend,
                                    self.chanend_type(chan),
                                    None,
                                    scope=T_SCOPE_PROC)
            connid = tab.lookup_connid(chan.name, elem0.index, scope)
            chanid = ast.ExprSingle(ast.ElemNumber(connid))
            begin = elem0.indices_value
            end = group[1][-1][0].indices_value
            cond = ast.ExprBinop(
                '>=', i_elem, ast.ExprSingle(ast.ElemNumber(min(begin, end))))
            master = tab.lookup_is_master(chan, elem0, scope)
            conn = ast.StmtConnect(chanend, chanid, location,
                                   self.connect_type(chan, master))
            return ast.StmtIf(cond, conn, s) if s else conn

        def create_tree_conn(tab, scope, chan, phase, group_size,
                             base_indices_value, loc_base, loc_diff,
                             connid_min, connid_offset, connid_diff, i_elem):
            location = ast.ExprBinop(
                '-', i_elem,
                ast.ExprSingle(ast.ElemNumber(base_indices_value)))
            location = ast.ExprBinop(
                '/', ast.ElemGroup(location),
                ast.ExprSingle(ast.ElemNumber(group_size)))
            location = ast.ExprBinop('*', ast.ElemNumber(loc_diff),
                                     ast.ExprSingle(ast.ElemGroup(location)))
            location = ast.ExprBinop('+', ast.ElemNumber(loc_base),
                                     ast.ExprSingle(ast.ElemGroup(location)))
            chanend = ast.ElemId(chan.chanend)
            chanend.symbol = Symbol(chan.chanend,
                                    self.chanend_type(chan),
                                    None,
                                    scope=T_SCOPE_PROC)
            elem0 = chan.elems[phase]
            #connid = ast.ExprBinop('+', i_elem,
            #    ast.ExprSingle(ast.ElemNumber(connid_offset)))
            connid = ast.ExprBinop('-', i_elem,
                                   ast.ExprSingle(ast.ElemNumber(phase)))
            connid = ast.ExprBinop('rem', ast.ElemGroup(connid),
                                   ast.ExprSingle(ast.ElemNumber(group_size)))
            connid = ast.ExprBinop('*', ast.ElemGroup(connid),
                                   ast.ExprSingle(ast.ElemNumber(connid_diff)))
            connid = ast.ExprBinop('+', ast.ElemGroup(connid),
                                   ast.ExprSingle(ast.ElemNumber(connid_min)))
            master = tab.lookup_is_master(chan, elem0, scope)
            return ast.StmtConnect(chanend, connid, location,
                                   self.connect_type(chan, master))

        def conn_diff_groups(chan, s, i_elem, d=DEBUG_COMPRESSION):
            """
      Compress connecitons based on the difference between differences of
      indices value and destination.
      """
            # Build a list of channel elements and index-dest differences
            diffs = []
            for x in chan.elems:
                diff = target_loc(chan, x, scope) - x.indices_value
                diffs.append((x, diff))

            debug(d, 'Differences for chan {}:'.format(chan.name))
            for (elem, diff) in diffs:
                connid = tab.lookup_connid(chan.name, elem.index, scope)
                debug(
                    d,
                    '  {:>3}: [{}]:{} - {}'.format(elem.indices_value,
                                                   elem.index, connid, diff))

            # Group consecutive elements with a constant second difference
            groups = []
            newgroup = True
            for ((elemA, diffA), (elemB, diffB)) in zip(diffs[:-1], diffs[1:]):
                diff2 = diffB - diffA
                connid = tab.lookup_connid(chan.name, elemB.index, scope)
                master = tab.lookup_is_master(chan, elemB, scope)
                if newgroup:
                    groups.append((diff2, [(elemA, diffA)]))
                    groupdiff2 = diff2
                    groupconnid = tab.lookup_connid(chan.name, elemA.index,
                                                    scope)
                    groupmaster = tab.lookup_is_master(chan, elemA, scope)
                    newgroup = False
                if (groupdiff2 == diff2 and groupconnid == connid
                        and groupmaster == master):
                    groups[-1][1].append((elemB, diffB))
                else:
                    newgroup = True
            if newgroup:
                groups.append((None, [diffs[-1]]))

            debug(d, 'Groups:')
            for x in groups:
                diff2 = x[0]
                elem0 = x[1][0][0]
                offset = target_loc(chan, elem0, scope)
                debug(d, '  diff2:  {}'.format(diff2))
                debug(d, '  offset: {}'.format(offset))
                debug(d, '  base:   {}'.format(elem0.indices_value))
                if len(x[1]) > 1:
                    for (i, (elem, diff)) in enumerate(x[1]):
                        loc = target_loc(chan, elem, scope)
                        computed = ((diff2 + 1) *
                                    (elem.indices_value -
                                     elem0.indices_value)) + offset
                        assert computed == loc
                        debug(
                            d,
                            '    {:>3}: [{:>3}]->{:>3}, diff: {:>3}, computed: {}'
                            .format(elem.indices_value, elem.index, loc, diff,
                                    computed))
                else:
                    debug(
                        d, '    {:>3}: [{:>3}]->{:>3}'.format(
                            elem0.indices_value, elem0.index, offset))

            # If compression was inneffective then abort
            if len(groups) == len(chan.elems):
                debug(d, 'Aborting group diff compression.')
                return None

            debug(d, 'Diff compression successful.')

            # Construct connection syntax
            s = None
            for x in groups:
                elem0 = x[1][0][0]
                if len(x[1]) == 1:
                    s = create_single_conn(s, chan, scope, i_elem, elem0)
                else:
                    s = create_range_conn(s, chan, i_elem, x)
            return s

        def conn_tree_groups(chan, s, i_elem, d=DEBUG_COMPRESSION):
            """
      Compress connections based on monotonically increasing or decreasing
      sets with the same target destination. Within a set, connection IDs can
      be monotonically increasing or decreasing.
      """
            locs = []
            debug(d, 'Locations:')
            for x in chan.elems:
                loc = target_loc(chan, x, scope)
                locs.append((x, loc))
                debug(
                    d,
                    '  {:>4} : {}[{}] -> {}'.format(x.indices_value, chan.name,
                                                    x.index, loc))

            # Separate the first odd element if there is one
            phase = 0
            if locs[0][1] != locs[1][1]:
                odd_elem = locs[0][0]
                locs = locs[1:]
                phase = 1

            # Count the group size
            group_size = 1
            while group_size < len(locs) and locs[group_size -
                                                  1][1] == locs[group_size][1]:
                group_size += 1

            # Only consider connections with more than one group
            if len(locs) <= group_size + 1:
                debug(d, 'Aborting tree compression.')
                return None

            # Set parameters
            loc_diff = locs[group_size][1] - locs[group_size - 1][1]
            loc_base = locs[0][1]
            base_indices_value = locs[0][0].indices_value
            connidA = tab.lookup_connid(chan.name, locs[0][0].index, scope)
            connidB = tab.lookup_connid(chan.name, locs[1][0].index, scope)
            connid_min = min(connidA, connidB)
            connid_diff = max(connidA, connidB) - connid_min
            connid_offset = (phase + (1 if connidA > connidB else 0)) % 2

            # Print some debug info
            debug(d, 'Attempting tree compression.')
            debug(d, '  Group size:    {}'.format(group_size))
            debug(d, '  Location base: {}'.format(loc_base))
            debug(d, '  Location diff: {}'.format(loc_diff))
            debug(d, '  Base ival:     {}'.format(base_indices_value))
            debug(d, '  ConnID base:   {}'.format(connidA))
            debug(d, '  ConnID diff:   {}'.format(connid_diff))

            # Check each group contains the same location
            debug(d, 'Checking groups...')
            i = 0
            while i * group_size < len(locs):
                debug(d, '  Group {}'.format(i))
                for j in range(1, group_size):
                    if locs[i * group_size][1] != locs[(i * group_size) +
                                                       j][1]:
                        debug(d, 'Aborting tree compression.')
                        return None
                i += 1

            # Check each step between groups has same location and connection diffs
            debug(d, 'Checking diffs...')
            for (i, (x, y)) in enumerate(
                    zip(locs[1::group_size], locs[group_size::group_size])):
                debug(
                    d, '  Group {} and {}: {} and {}'.format(
                        i, i + 1, y[1], x[1]))
                connidX = tab.lookup_connid(chan.name, x[0].index, scope)
                connidY = tab.lookup_connid(chan.name, y[0].index, scope)
                connid_diff_ = connidX - connidY
                if y[1] - x[1] != loc_diff or connid_diff != connid_diff_:
                    debug(d, 'Aborting tree compression.')
                    return None

            # Check matching computed location
            debug(d, 'Checking computed...')
            debug(d, 'connid_min = {}'.format(connid_min))
            debug(d, 'connid_off = {}'.format(connid_offset))
            if phase == 1:
                connid = tab.lookup_connid(chan.name, odd_elem.index, scope)
                debug(d, '  {}: connid={}'.format(odd_elem.indices_value,
                                                  connid))
            for (elem, loc) in locs:
                computed_loc = loc_base + (loc_diff * (math.floor(
                    ((elem.indices_value - base_indices_value)) / group_size)))
                connid = tab.lookup_connid(chan.name, elem.index, scope)
                #computed_connid = (connid_min +
                #    ((elem.indices_value + connid_offset) % group_size) * connid_diff)
                computed_connid = (connid_min + (
                    (elem.indices_value - phase) % group_size) * connid_diff)
                debug(
                    d, '  {}: connid={}, loc={} computed({}, {})'.format(
                        elem.indices_value, connid, loc, computed_connid,
                        computed_loc))
                assert computed_loc == loc
                assert computed_connid == connid

            debug(d, 'Tree compression successful.')

            # Construct connection syntax
            if phase == 0:
                return create_tree_conn(tab, scope, chan, phase, group_size,
                                        base_indices_value, loc_base, loc_diff,
                                        connid_min, connid_offset, connid_diff,
                                        i_elem)
            else:
                s = create_tree_conn(tab, scope, chan, phase, group_size,
                                     base_indices_value, loc_base, loc_diff,
                                     connid_min, connid_offset, connid_diff,
                                     i_elem)
                return create_single_conn(s, chan, scope, i_elem, odd_elem)

        def conn_singles(chan, s, i_elem, d=DEBUG_COMPRESSION):
            """
      Create (uncompressed) connections for each case.
      """
            debug(d, 'Creating uncompressed connection range.')
            for x in chan.elems:
                s = create_single_conn(s, chan, scope, i_elem, x)
                debug(
                    d, '  {}: {}[{}]'.format(x.indices_value, chan.name,
                                             x.index))
            return s

        # Sort the channel elements into increasing order of indices
        chan.elems = sorted(chan.elems, key=lambda x: x.indices_value)

        # Compress conditional connections and return the AST construction
        i_expr = indices_expr(chan.indices)
        i_elem = ast.ElemId('_i')
        i_elem.symbol = Symbol(i_elem.name, T_VAR_SINGLE, scope=T_SCOPE_BLOCK)
        s = None
        if COMPRESS:
            s = conn_tree_groups(chan, s, i_elem)
            s = conn_diff_groups(chan, s, i_elem) if s == None else s
        s = conn_singles(chan, s, i_elem) if s == None else s
        s = [ast.StmtAss(i_elem, i_expr), s]
        s = ast.StmtSeq([ast.VarDecl(i_elem.name, T_VAR_SINGLE, None)], s)
        return s
Пример #20
0
 def p_var_decl_chan_array(self, p):
   'var_decl : CHAN name LBRACKET expr RBRACKET'
   p[0] = ast.VarDecl(p[2], T_CHAN_ARRAY, p[4], self.coord(p))
Пример #21
0
 def p_var_decl_array(self, p):
   'var_decl : VAR name LBRACKET expr RBRACKET'
   p[0] = ast.VarDecl(p[2], T_VAR_ARRAY, p[4], self.coord(p))