Пример #1
0
def _MatchOshToken_Fast(lex_mode, line, start_pos):
  # type: (lex_mode_t, str, int) -> Tuple[Id_t, int]
  """Returns (Id, end_pos)."""
  tok_type, end_pos = fastlex.MatchOshToken(lex_mode.enum_id, line, start_pos)
  # IMPORTANT: We're reusing Id instances here.  Ids are very common, so this
  # saves memory.
  return IdInstance(tok_type), end_pos
Пример #2
0
def _ThreeArgs(w_parser):
    """Returns an expression tree to be evaluated."""
    w0 = w_parser.Read()
    w1 = w_parser.Read()
    w2 = w_parser.Read()

    # NOTE: Order is important here.

    binary_id = _BINARY_LOOKUP.get(w1.s)
    if binary_id is not None:
        return bool_expr.BoolBinary(IdInstance(binary_id), w0, w2)

    if w1.s == '-a':
        return bool_expr.LogicalAnd(bool_expr.WordTest(w0),
                                    bool_expr.WordTest(w2))

    if w1.s == '-o':
        return bool_expr.LogicalOr(bool_expr.WordTest(w0),
                                   bool_expr.WordTest(w2))

    if w0.s == '!':
        w_parser.Rewind(2)
        child = _TwoArgs(w_parser)
        return bool_expr.LogicalNot(child)

    if w0.s == '(' and w2.s == ')':
        return bool_expr.WordTest(w1)

    p_die('Expected binary operator, got %r (3 args)', w1.s, word=w1)
Пример #3
0
def _ThreeArgs(argv):
    """Returns an expression tree to be evaluated."""
    a0, a1, a2 = argv

    # NOTE: Order is important here.

    binary_id = _BINARY_LOOKUP.get(a1)
    if binary_id is not None:
        left = word.StringWord(Id.Word_Compound, a0)
        right = word.StringWord(Id.Word_Compound, a2)
        return bool_expr.BoolBinary(IdInstance(binary_id), left, right)

    if a1 == '-a':
        left = _StringWordTest(a0)
        right = _StringWordTest(a2)
        return bool_expr.LogicalAnd(left, right)

    if a1 == '-o':
        left = _StringWordTest(a0)
        right = _StringWordTest(a2)
        return bool_expr.LogicalOr(left, right)

    if a0 == '!':
        child = _TwoArgs(argv[1:])
        return bool_expr.LogicalNot(child)

    if a0 == '(' and a2 == ')':
        return _StringWordTest(a1)

    p_die('Syntax error: binary operator expected, got %r (3 args)', a1)
Пример #4
0
    def ReadWord(self, unused_lex_mode):
        """Interface for bool_parse.py."""
        if self.i == self.n:
            # Does it make sense to define Eof_Argv or something?
            w = word.StringWord(Id.Eof_Real, '')
            # TODO: Add a way to show this.  Show 1 char past the right-most spid of
            # the last word?  But we only have the left-most spid.
            w.spids.append(const.NO_INTEGER)
            return w

        #log('ARGV %s i %d', self.argv, self.i)
        s = self.arg_vec.strs[self.i]
        left_spid = self.arg_vec.spids[self.i]
        self.i += 1

        # default is an operand word
        id_int = (_UNARY_LOOKUP.get(s) or _BINARY_LOOKUP.get(s)
                  or _OTHER_LOOKUP.get(s))

        id_ = Id.Word_Compound if id_int is None else IdInstance(id_int)

        # NOTE: We only have the left spid now.  It might be useful to add the
        # right one.
        w = word.StringWord(id_, s)
        w.spids.append(left_spid)
        return w
Пример #5
0
def _TwoArgs(w_parser):
    """Returns an expression tree to be evaluated."""
    w0 = w_parser.Read()
    w1 = w_parser.Read()
    if w0.s == '!':
        return bool_expr.LogicalNot(bool_expr.WordTest(w1))
    unary_id = _UNARY_LOOKUP.get(w0.s)
    if unary_id is None:
        # TODO:
        # - separate lookup by unary
        p_die('Expected unary operator, got %r (2 args)', w0.s, word=w0)
    return bool_expr.BoolUnary(IdInstance(unary_id), w1)
Пример #6
0
def _TwoArgs(argv):
    """Returns an expression tree to be evaluated."""
    a0, a1 = argv
    if a0 == '!':
        return bool_expr.LogicalNot(_StringWordTest(a1))
    unary_id = _UNARY_LOOKUP.get(a0)
    if unary_id is None:
        # TODO:
        # - syntax error
        # - separate lookup by unary
        p_die('Expected unary operator, got %r (2 args)', a0)
    child = word.StringWord(Id.Word_Compound, a1)
    return bool_expr.BoolUnary(IdInstance(unary_id), child)
Пример #7
0
    def ReadWord(self, unused_lex_mode):
        if self.i == self.n:
            # NOTE: Could define something special
            return word.StringWord(Id.Eof_Real, '')

        #log('ARGV %s i %d', self.argv, self.i)
        s = self.argv[self.i]
        self.i += 1

        # default is an operand word
        id_int = (_UNARY_LOOKUP.get(s) or _BINARY_LOOKUP.get(s)
                  or _OTHER_LOOKUP.get(s))

        id_ = Id.Word_Compound if id_int is None else IdInstance(id_int)
        return word.StringWord(id_, s)
Пример #8
0
def _MatchHistoryToken_Fast(line, start_pos):
  # type: (str, int) -> Tuple[Id_t, int]
  """Returns (id, end_pos)."""
  tok_type, end_pos = fastlex.MatchHistoryToken(line, start_pos)
  return IdInstance(tok_type), end_pos
Пример #9
0
def PrintBoolTable():
    for i, arg_type in BOOL_ARG_TYPES.items():
        row = (IdInstance(i), arg_type)
        print('\t'.join(str(c) for c in row))
Пример #10
0
 def testEquality(self):
     left = IdInstance(198)
     right = IdInstance(198)
     print(left, right)
     print(left == right)
     self.assertEqual(left, right)
Пример #11
0
def _MatchHistoryToken_Fast(line, start_pos):
    """Returns (id, end_pos)."""
    tok_type, end_pos = fastlex.MatchHistoryToken(line, start_pos)
    return IdInstance(tok_type), end_pos
Пример #12
0
def PrintBoolTable():
    for i, arg_type in BOOL_ARG_TYPES.items():
        print('%-40s %s' % (IdInstance(i), arg_type))
Пример #13
0
  def Expr(self, pnode):
    # type: (PNode) -> expr_t
    """Transform expressions (as opposed to statements)."""
    typ = pnode.typ
    tok = pnode.tok
    children = pnode.children

    if ISNONTERMINAL(typ):

      #
      # Oil Entry Points / Additions
      #

      if typ == grammar_nt.oil_expr:  # for if/while
        # oil_expr: '(' testlist ')'
        return self.Expr(children[1])

      if typ == grammar_nt.return_expr:
        # return_expr: testlist end_stmt
        return self.Expr(children[0])

      if typ == grammar_nt.place_list:
        return self._AssocBinary(children)

      if typ == grammar_nt.place:
        # place: NAME place_trailer*
        if len(pnode.children) == 1:
          return self.Expr(pnode.children[0])
        # TODO: Called _Trailer but don't handle ( )?
        # only [] . -> :: ?
        raise NotImplementedError

      #
      # Python-like Expressions / Operators
      #

      if typ == grammar_nt.atom:
        if len(children) == 1:
          return self.Expr(children[0])
        return self._Atom(children)

      if typ == grammar_nt.testlist:
        # testlist: test (',' test)* [',']
        # We need tuples for Python's 'var a, b = x' and 'for (a, b in x) {'
        return self._Tuple(children)

      if typ == grammar_nt.test:
        # test: or_test ['if' or_test 'else' test] | lambdef
        if len(children) == 1:
          return self.Expr(children[0])

        # TODO: Handle lambdef

        test = self.Expr(children[2])
        body = self.Expr(children[0])
        orelse = self.Expr(children[4])
        return expr.IfExp(test, body, orelse)

      if typ == grammar_nt.test_nocond:
        # test_nocond: or_test | lambdef_nocond
        assert len(children) == 1
        return self.Expr(children[0])

      if typ == grammar_nt.argument:
        # argument: ( test [comp_for] |
        #             test '=' test |
        #             '**' test |
        #             '*' test )
        if len(pnode.children) == 1:
          return self.Expr(children[0])
        # TODO:
        raise NotImplementedError

      if typ == grammar_nt.subscript:
        # subscript: test | [test] ':' [test] [sliceop]
        if len(pnode.children) == 1:
          return self.Expr(children[0])
        # TODO:
        raise NotImplementedError

      if typ == grammar_nt.testlist_comp:
        # testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
        if children[1].typ == grammar_nt.comp_for:
          elt = self.Expr(children[0])
          comp = self._CompFor(children[1])
          return expr.ListComp(elt, [comp])

        # (1,)  (1, 2)  etc.
        if children[1].tok.id == Id.Arith_Comma:
          return self._Tuple(children)
        raise NotImplementedError('testlist_comp')

      elif typ == grammar_nt.exprlist:
        # exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']

        if len(children) == 1:
          return self.Expr(children[0])

        # used in for loop, genexpr.
        # TODO: This sould be placelist?  for x, *y ?
        raise NotImplementedError('exprlist')

      #
      # Operators with Precedence
      #

      if typ == grammar_nt.or_test:
        # or_test: and_test ('or' and_test)*
        return self._AssocBinary(children)

      if typ == grammar_nt.and_test:
        # and_test: not_test ('and' not_test)*
        return self._AssocBinary(children)

      if typ == grammar_nt.not_test:
        # not_test: 'not' not_test | comparison
        if len(children) == 1:
          return self.Expr(children[0])

        op_tok = children[0].tok  # not
        return expr.Unary(op_tok, self.Expr(children[1]))

      elif typ == grammar_nt.comparison:
        if len(children) == 1:
          return self.Expr(children[0])

        return self._CompareChain(children)

      elif typ == grammar_nt.expr:
        # expr: xor_expr ('|' xor_expr)*
        return self._AssocBinary(children)

      if typ == grammar_nt.xor_expr:
        # xor_expr: and_expr ('xor' and_expr)*
        return self._AssocBinary(children)

      if typ == grammar_nt.and_expr:  # a & b
        # and_expr: shift_expr ('&' shift_expr)*
        return self._AssocBinary(children)

      elif typ == grammar_nt.shift_expr:
        # shift_expr: arith_expr (('<<'|'>>') arith_expr)*
        return self._AssocBinary(children)

      elif typ == grammar_nt.arith_expr:
        # arith_expr: term (('+'|'-') term)*
        return self._AssocBinary(children)

      elif typ == grammar_nt.term:
        # term: factor (('*'|'/'|'div'|'mod') factor)*
        return self._AssocBinary(children)

      elif typ == grammar_nt.factor:
        # factor: ('+'|'-'|'~') factor | power
        # the power would have already been reduced
        if len(children) == 1:
          return self.Expr(children[0])
        op, e = children
        assert isinstance(op.tok, token)
        return expr.Unary(op.tok, self.Expr(e))

      elif typ == grammar_nt.power:
        # power: atom trailer* ['^' factor]

        node = self.Expr(children[0])
        if len(children) == 1:  # No trailers
          return node

        n = len(children)
        i = 1
        while i < n and ISNONTERMINAL(children[i].typ):
          node = self._Trailer(node, children[i])
          i += 1

        if i != n:  # ['^' factor]
          op_tok = children[i].tok
          assert op_tok.id == Id.Arith_Caret, op_tok
          factor = self.Expr(children[i+1])
          node = expr.Binary(op_tok, node, factor)

        return node

      #
      # Oil Lexer Modes
      #

      elif typ == grammar_nt.array_literal:
        left_tok = children[0].tok

        # Approximation for now.
        tokens = [
            pnode.tok for pnode in children[1:-1] if pnode.tok.id ==
            Id.Lit_Chars
        ]
        items = [expr.Const(t) for t in tokens]  # type: List[expr_t]
        return expr.ArrayLiteral(left_tok, items)

      elif typ == grammar_nt.sh_array_literal:
        left_tok = children[0].tok

        # HACK: When typ is Id.Expr_CastedDummy, the 'tok' field ('opaque')
        # actually has a list of words!
        typ1 = children[1].typ
        assert typ1 == Id.Expr_CastedDummy.enum_id, typ1
        array_words = cast('List[word_t]', children[1].tok)

        return sh_array_literal(left_tok, array_words)

      elif typ == grammar_nt.sh_command_sub:
        return cast(command_sub, children[1].tok)

      elif typ == grammar_nt.braced_var_sub:
        return cast(braced_var_sub, children[1].tok)

      elif typ == grammar_nt.dq_string:
        return cast(double_quoted, children[1].tok)

      elif typ == grammar_nt.sq_string:
        return cast(single_quoted, children[1].tok)

      elif typ == grammar_nt.simple_var_sub:
        return simple_var_sub(children[0].tok)

      else:
        nt_name = self.number2symbol[typ]
        raise AssertionError(
            "PNode type %d (%s) wasn't handled" % (typ, nt_name))

    else:  # Terminals should have a token
      id_ = tok.id

      if id_ == Id.Expr_Name:
        return expr.Var(tok)

      if id_ in (
          Id.Expr_DecInt, Id.Expr_BinInt, Id.Expr_OctInt, Id.Expr_HexInt,
          Id.Expr_Float):
        return expr.Const(tok)

      if id_ in (Id.Expr_Null, Id.Expr_True, Id.Expr_False):
        return expr.Const(tok)

      from core.meta import IdInstance
      raise NotImplementedError(IdInstance(typ))
Пример #14
0
def MatchOshToken(lex_mode, line, start_pos):
    tok_type, end_pos = fastlex.MatchOshToken(lex_mode.enum_id, line,
                                              start_pos)
    return IdInstance(tok_type), end_pos
Пример #15
0
    def Expr(self, pnode):
        # type: (PNode) -> expr_t
        """Transform expressions (as opposed to statements)."""
        typ = pnode.typ
        tok = pnode.tok
        children = pnode.children

        if ISNONTERMINAL(typ):

            #
            # Oil Entry Points / Additions
            #

            if typ == grammar_nt.oil_expr:  # for if/while
                # oil_expr: '(' testlist ')'
                return self.Expr(children[1])

            if typ == grammar_nt.command_expr:
                # return_expr: testlist end_stmt
                return self.Expr(children[0])

            #
            # Python-like Expressions / Operators
            #

            if typ == grammar_nt.atom:
                if len(children) == 1:
                    return self.Expr(children[0])
                return self._Atom(children)

            if typ == grammar_nt.testlist:
                # testlist: test (',' test)* [',']
                return self._Tuple(children)

            if typ == grammar_nt.test:
                # test: or_test ['if' or_test 'else' test] | lambdef
                if len(children) == 1:
                    return self.Expr(children[0])

                # TODO: Handle lambdef

                test = self.Expr(children[2])
                body = self.Expr(children[0])
                orelse = self.Expr(children[4])
                return expr.IfExp(test, body, orelse)

            if typ == grammar_nt.lambdef:
                # lambdef: '|' [name_type_list] '|' test

                n = len(children)
                if n == 4:
                    params = self._NameTypeList(children[1])
                else:
                    params = []

                body = self.Expr(children[n - 1])
                return expr.Lambda(params, body)

            #
            # Operators with Precedence
            #

            if typ == grammar_nt.or_test:
                # or_test: and_test ('or' and_test)*
                return self._AssocBinary(children)

            if typ == grammar_nt.and_test:
                # and_test: not_test ('and' not_test)*
                return self._AssocBinary(children)

            if typ == grammar_nt.not_test:
                # not_test: 'not' not_test | comparison
                if len(children) == 1:
                    return self.Expr(children[0])

                op_tok = children[0].tok  # not
                return expr.Unary(op_tok, self.Expr(children[1]))

            elif typ == grammar_nt.comparison:
                if len(children) == 1:
                    return self.Expr(children[0])

                return self._CompareChain(children)

            elif typ == grammar_nt.range_expr:
                if len(children) == 1:
                    return self.Expr(children[0])

                if len(children) == 3:
                    return expr.Range(self.Expr(children[0]),
                                      self.Expr(children[2]))

                raise AssertionError(children)

            elif typ == grammar_nt.expr:
                # expr: xor_expr ('|' xor_expr)*
                return self._AssocBinary(children)

            if typ == grammar_nt.xor_expr:
                # xor_expr: and_expr ('xor' and_expr)*
                return self._AssocBinary(children)

            if typ == grammar_nt.and_expr:  # a & b
                # and_expr: shift_expr ('&' shift_expr)*
                return self._AssocBinary(children)

            elif typ == grammar_nt.shift_expr:
                # shift_expr: arith_expr (('<<'|'>>') arith_expr)*
                return self._AssocBinary(children)

            elif typ == grammar_nt.arith_expr:
                # arith_expr: term (('+'|'-') term)*
                return self._AssocBinary(children)

            elif typ == grammar_nt.term:
                # term: factor (('*'|'/'|'div'|'mod') factor)*
                return self._AssocBinary(children)

            elif typ == grammar_nt.factor:
                # factor: ('+'|'-'|'~') factor | power
                # the power would have already been reduced
                if len(children) == 1:
                    return self.Expr(children[0])
                op, e = children
                assert isinstance(op.tok, token)
                return expr.Unary(op.tok, self.Expr(e))

            elif typ == grammar_nt.power:
                # power: atom trailer* ['^' factor]

                node = self.Expr(children[0])
                if len(children) == 1:  # No trailers
                    return node

                n = len(children)
                i = 1
                while i < n and ISNONTERMINAL(children[i].typ):
                    node = self._Trailer(node, children[i])
                    i += 1

                if i != n:  # ['^' factor]
                    op_tok = children[i].tok
                    assert op_tok.id == Id.Arith_Caret, op_tok
                    factor = self.Expr(children[i + 1])
                    node = expr.Binary(op_tok, node, factor)

                return node

            elif typ == grammar_nt.array_literal:
                left_tok = children[0].tok

                items = [self._ArrayItem(p) for p in children[1:-1]]
                return expr.ArrayLiteral(left_tok, items)

            elif typ == grammar_nt.oil_expr_sub:
                return self.Expr(children[0])

            #
            # Oil Lexer Modes
            #

            elif typ == grammar_nt.sh_array_literal:
                left_tok = children[0].tok

                # HACK: When typ is Id.Expr_CastedDummy, the 'tok' field ('opaque')
                # actually has a list of words!
                typ1 = children[1].typ
                assert typ1 == Id.Expr_CastedDummy.enum_id, typ1
                array_words = cast('List[word_t]', children[1].tok)

                return sh_array_literal(left_tok, array_words)

            elif typ == grammar_nt.sh_command_sub:
                return cast(command_sub, children[1].tok)

            elif typ == grammar_nt.braced_var_sub:
                return cast(braced_var_sub, children[1].tok)

            elif typ == grammar_nt.dq_string:
                return cast(double_quoted, children[1].tok)

            elif typ == grammar_nt.sq_string:
                return cast(single_quoted, children[1].tok)

            elif typ == grammar_nt.simple_var_sub:
                return simple_var_sub(children[0].tok)

            else:
                nt_name = self.number2symbol[typ]
                raise AssertionError("PNode type %d (%s) wasn't handled" %
                                     (typ, nt_name))

        else:  # Terminals should have a token
            id_ = tok.id

            if id_ == Id.Expr_Name:
                return expr.Var(tok)

            if id_ in (Id.Expr_DecInt, Id.Expr_BinInt, Id.Expr_OctInt,
                       Id.Expr_HexInt, Id.Expr_Float):
                return expr.Const(tok)

            if id_ in (Id.Expr_Null, Id.Expr_True, Id.Expr_False):
                return expr.Const(tok)

            from core.meta import IdInstance
            raise NotImplementedError(IdInstance(typ))
Пример #16
0
def MatchOshToken(lex_mode, line, start_pos):
  tok_type, end_pos = fastlex.MatchOshToken(lex_mode.enum_id, line, start_pos)
  #log('tok_type = %d, id = %s', tok_type, IdInstance(tok_type))
  return IdInstance(tok_type), end_pos