示例#1
0
    def _ReadArithWord(self):
        """Helper function for ReadArithWord."""
        #assert self.token_type != Id.Undefined_Tok
        self._Peek()
        #print('_ReadArithWord', self.cur_token)

        if self.token_kind == Kind.Unknown:
            self.AddErrorContext("Unknown token in arith context: %s",
                                 self.cur_token,
                                 token=self.cur_token)
            return None, False

        elif self.token_kind == Kind.Eof:
            # Just return EOF token
            w = ast.TokenWord(self.cur_token)
            return w, False
            #self.AddErrorContext("Unexpected EOF in arith context: %s",
            #    self.cur_token, token=self.cur_token)
            #return None, False

        elif self.token_kind == Kind.Ignored:
            # Space should be ignored.  TODO: change this to SPACE_SPACE and
            # SPACE_NEWLINE?  or SPACE_TOK.
            self._Next(LexMode.ARITH)
            return None, True  # Tell wrapper to try again

        elif self.token_kind in (Kind.Arith, Kind.Right):
            # Id.Right_ArithSub IS just a normal token, handled by ArithParser
            self._Next(LexMode.ARITH)
            w = ast.TokenWord(self.cur_token)
            return w, False

        elif self.token_kind in (Kind.Lit, Kind.Left):
            w = self._ReadCompoundWord(lex_mode=LexMode.ARITH)
            if not w:
                return None, True
            return w, False

        elif self.token_kind == Kind.VSub:
            part = ast.SimpleVarSub(self.cur_token)
            self._Next(LexMode.ARITH)
            w = ast.CompoundWord([part])
            return w, False

        else:
            self._BadToken("Unexpected token parsing arith sub: %s",
                           self.cur_token)
            return None, False

        raise AssertionError("Shouldn't get here")
示例#2
0
    def testMultiLine(self):
        w_parser = InitWordParser("""\
ls foo

# Multiple newlines and comments should be ignored

ls bar
""")

        print('--MULTI')
        w = w_parser.ReadWord(LexMode.OUTER)
        parts = [ast.LiteralPart(ast.token(Id.Lit_Chars, 'ls'))]
        self.assertEqual(ast.CompoundWord(parts), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        parts = [ast.LiteralPart(ast.token(Id.Lit_Chars, 'foo'))]
        self.assertEqual(ast.CompoundWord(parts), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        t = ast.token(Id.Op_Newline, '\n')
        self.assertEqual(ast.TokenWord(t), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        parts = [ast.LiteralPart(ast.token(Id.Lit_Chars, 'ls'))]
        self.assertEqual(ast.CompoundWord(parts), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        parts = [ast.LiteralPart(ast.token(Id.Lit_Chars, 'bar'))]
        self.assertEqual(ast.CompoundWord(parts), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        t = ast.token(Id.Op_Newline, '\n')
        self.assertEqual(ast.TokenWord(t), w)

        w = w_parser.ReadWord(LexMode.OUTER)
        t = ast.token(Id.Eof_Real, '')
        self.assertEqual(ast.TokenWord(t), w)
示例#3
0
def _assertReadWordWithArena(test, word_str):
    print('\n---', word_str)
    arena, w_parser = _InitWordParserWithArena(word_str)
    w = w_parser.ReadWord(LexMode.OUTER)
    if w:
        ast.PrettyPrint(w)
    else:
        err = w_parser.Error()
        test.fail("Couldn't parse %r: %s" % (word_str, err))

    # Next word must be \n
    w2 = w_parser.ReadWord(LexMode.OUTER)
    test.assertTrue(
        TokenWordsEqual(ast.TokenWord(ast.token(Id.Op_Newline, '\n')), w2))

    return arena, w
示例#4
0
def _assertReadWordWithArena(test, word_str):
  print('\n---', word_str)
  arena, w_parser = _InitWordParserWithArena(word_str)
  w = w_parser.ReadWord(lex_mode_e.OUTER)
  if w:
    ast.PrettyPrint(w)
  else:
    err = w_parser.Error()
    test.fail("Couldn't parse %r: %s" % (word_str, err))

  # Next word must be Eof_Real
  w2 = w_parser.ReadWord(lex_mode_e.OUTER)
  test.assertTrue(
      test_lib.TokenWordsEqual(ast.TokenWord(ast.token(Id.Eof_Real, '')), w2),
      w2)

  return arena, w
示例#5
0
    def _ReadWord(self, lex_mode):
        """Helper function for Read().

    Returns:
      2-tuple (word, need_more)
        word: Word, or None if there was an error, or need_more is set
        need_more: True if the caller should call us again
    """
        #print('_Read', lex_mode, self.cur_token)
        self._Peek()

        if self.token_kind == Kind.Eof:
            # No advance
            return ast.TokenWord(self.cur_token), False

        # Allow Arith for ) at end of for loop?
        elif self.token_kind in (Kind.Op, Kind.Redir, Kind.Arith):
            self._Next(lex_mode)
            if self.token_type == Id.Op_Newline:
                if self.cursor_was_newline:
                    #print('SKIP(nl)', self.cur_token)
                    return None, True

            return ast.TokenWord(self.cur_token), False

        elif self.token_kind == Kind.Right:
            #print('WordParser.Read: Kind.Right', self.cur_token)
            if self.token_type not in (Id.Right_Subshell, Id.Right_FuncDef,
                                       Id.Right_CasePat,
                                       Id.Right_ArrayLiteral):
                raise AssertionError(self.cur_token)

            self._Next(lex_mode)
            return ast.TokenWord(self.cur_token), False

        elif self.token_kind in (Kind.Ignored, Kind.WS):
            self._Next(lex_mode)
            return None, True  # tell Read() to try again

        elif self.token_kind in (Kind.VSub, Kind.Lit, Kind.Left, Kind.KW,
                                 Kind.Assign, Kind.ControlFlow, Kind.BoolUnary,
                                 Kind.BoolBinary):
            # We're beginning a word.  If we see Id.Lit_Pound, change to
            # LexMode.COMMENT and read until end of line.  (TODO: How to add comments
            # to AST?)

            # TODO: Can we do the same thing for Tilde here?  Enter a state where we
            # look for / too.
            if self.token_type == Id.Lit_Pound:
                self._Next(LexMode.COMMENT)
                self._Peek()
                assert self.token_type == Id.Ignored_Comment, self.cur_token
                # The next iteration will go into Kind.Ignored and set lex state
                # to LexMode.OUTER/etc.
                return None, True  # tell Read() to try again after comment

            else:
                w = self._ReadCompoundWord(lex_mode=lex_mode)
                if not w:
                    self.AddErrorContext('Error reading command word',
                                         token=self.cur_token)
                    return None, False
                return w, False

        else:
            raise AssertionError('Unhandled: %s (%s)' %
                                 (self.cur_token, self.token_kind))

        raise AssertionError("Shouldn't get here")