Exemple #1
0
    def __init__(self,
                 s,
                 strictmode=True,
                 expansionlimit=None,
                 tokenizerargs=None):
        # 判断 expansionlimit为空值
        # Check if the number expansionlimit is an integer:
        assert expansionlimit is None or isinstance(expansionlimit, int)

        # Save the context which we need to parse
        self.s = s
        self._strictmode = strictmode
        self._expansionlimit = expansionlimit
        # If there is note tokenizerargs, then tokenizerargs = None
        if tokenizerargs is None:
            tokenizerargs = {}
        # pop() is an inbuilt function in Python
        # that removes and returns last value from the list or the given index value.
        self.parserstate = tokenizerargs.pop('parserstate',
                                             state.parserstate())

        self.tok = tokenizer.tokenizer(s,
                                       parserstate=self.parserstate,
                                       strictmode=strictmode,
                                       **tokenizerargs)

        self.redirstack = self.tok.redirstack
Exemple #2
0
    def __init__(self, s, strictmode=True, expansionlimit=None, tokenizerargs=None):
        assert expansionlimit is None or isinstance(expansionlimit, int)

        self.s = s
        self._strictmode = strictmode
        self._expansionlimit = expansionlimit

        if tokenizerargs is None:
            tokenizerargs = {}
        self.parserstate = tokenizerargs.pop('parserstate', state.parserstate())

        self.tok = tokenizer.tokenizer(s,
                                       parserstate=self.parserstate,
                                       strictmode=strictmode,
                                       **tokenizerargs)

        self.redirstack = self.tok.redirstack
Exemple #3
0
    def __init__(self, s, strictmode=True, expansionlimit=None, tokenizerargs=None):
        assert expansionlimit is None or isinstance(expansionlimit, int)

        self.s = s
        self._strictmode = strictmode
        self._expansionlimit = expansionlimit

        if tokenizerargs is None:
            tokenizerargs = {}
        self.parserstate = tokenizerargs.pop('parserstate', state.parserstate())

        self.tok = tokenizer.tokenizer(s,
                                       parserstate=self.parserstate,
                                       strictmode=strictmode,
                                       **tokenizerargs)

        self.redirstack = self.tok.redirstack
Exemple #4
0
import unittest

from bashlex import tokenizer, state, flags, errors

from bashlex.tokenizer import token as t
from bashlex.tokenizer import tokentype as tt

tokenize = lambda s: list(tokenizer.tokenizer(s, state.parserstate()))

hasdollarset = set([flags.word.HASDOLLAR])


class test_tokenizer(unittest.TestCase):
    def setUp(self):
        if not hasattr(self, 'assertRaisesRegex'):
            self.assertRaisesRegex = self.assertRaisesRegexp

    def assertTokens(self, s, tokens):
        result = tokenize(s)

        # pop the last token if it's a new line since that gets appended
        # to the input string by default and we don't really care about
        # that here
        if result[-1].value == '\n':
            result.pop()

        self.assertEqual(result, tokens)

        for t in tokens:
            self.assertEqual(str(t.value), s[t.lexpos:t.endlexpos])
Exemple #5
0
import unittest

from bashlex import tokenizer, state, flags, errors

from bashlex.tokenizer import token as t
from bashlex.tokenizer import tokentype as tt

tokenize = lambda s: list(tokenizer.tokenizer(s, state.parserstate()))

hasdollarset = set([flags.word.HASDOLLAR])

class test_tokenizer(unittest.TestCase):
    def assertTokens(self, s, tokens):
        result = tokenize(s)

        # pop the last token if it's a new line since that gets appended
        # to the input string by default and we don't really care about
        # that here
        if result[-1].value == '\n':
            result.pop()

        self.assertEquals(result, tokens)

        for t in tokens:
            self.assertEquals(str(t.value), s[t.lexpos:t.endlexpos])

    def test_empty_string(self):
        self.assertEquals(len(tokenize('')), 0)

    def test_simple(self):
        s = 'a b'