示例#1
0
    def on_tokenize(self, shell, tokens, origin):
        ret = []
        for token in tokens:
            if not isinstance(token, StringToken) or self.prefix not in token.text:
                ret.append(token)
                continue

            for subt in get_subtokens(token, self.prefix):
                if isinstance(subt, StringToken):
                    ret.append(subt)
                    continue

                expanded = self.expand(shell, subt)
                if token.quote:
                    ret.append(StringToken(subt.index, expanded, token.quote))
                else:
                    ws = False
                    for part in expanded.split():
                        if ws:
                            ret.append(WhitespaceToken(subt.index))
                        else:
                            ws = True
                        ret.append(StringToken(subt.index, part))

        return ret
示例#2
0
 def test_get_subtokens_dbl_escape(self):
     assert (list(
         get_subtokens(StringToken(0, 'hello, \\\\$name'), '$',
                       self.features)) == [
                           StringToken(0, 'hello, \\\\'),
                           VariableToken(0, '$', 'name')
                       ])
示例#3
0
 def test_get_subtokens_var_escape_next(self):
     assert (list(
         get_subtokens(StringToken(0, 'hello $name\\n'), '$',
                       self.features)) == [
                           StringToken(0, 'hello '),
                           VariableToken(0, '$', 'name'),
                           StringToken(0, '\\n')
                       ])
示例#4
0
 def test_get_subtokens_repeat(self):
     assert (list(
         get_subtokens(StringToken(0, 'hello $name$name'), '$',
                       self.features)) == [
                           StringToken(0, 'hello '),
                           VariableToken(0, '$', 'name'),
                           VariableToken(0, '$', 'name')
                       ])
示例#5
0
 def test_get_subtokens_multi(self):
     assert (list(
         get_subtokens(StringToken(0, '$name, welcome to $city'), '$',
                       self.features)) == [
                           VariableToken(0, '$', 'name'),
                           StringToken(1, ', welcome to '),
                           VariableToken(0, '$', 'city')
                       ])
示例#6
0
 def test_get_subtokens_single(self):
     assert (list(
         get_subtokens(StringToken(0, 'hello, $name, welcome'), '$',
                       self.features)) == [
                           StringToken(0, 'hello, '),
                           VariableToken(0, '$', 'name'),
                           StringToken(1, ', welcome')
                       ])
示例#7
0
文件: variable.py 项目: lnenov/pypsi
    def on_tokenize(self, shell, tokens, origin):
        ret = []
        for token in tokens:
            if (not isinstance(token, StringToken) or
                    self.prefix not in token.text):
                ret.append(token)
                continue

            for subt in get_subtokens(token, self.prefix):
                if isinstance(subt, StringToken):
                    ret.append(subt)
                    continue

                expanded = self.expand(shell, subt)
                '''
                if token.quote:
                    ret.append(StringToken(subt.index, expanded, token.quote))
                else:
                    ws = False
                    for part in shell.parser.tokenize(expanded):
                        ret.append(part)
                '''
                ret.append(StringToken(subt.index, expanded, '"'))

        return ret
示例#8
0
def get_subtokens(token, prefix, features):
    escape = False
    index = token.index
    subt = None
    var = None
    for c in token.text:
        if escape:
            escape = False
            if c != prefix:
                subt.text += '\\'
            subt.text += c
        elif var:
            rc = var.add_char(c)
            if rc == TokenEnd:
                yield var
                var = None
                if c == prefix:
                    var = VariableToken(index, c)
                else:
                    if c == '\\':
                        escape = True
                        c = ''
                    subt = StringToken(index,
                                       c,
                                       token.quote,
                                       features=features)
        elif c == prefix:
            if subt:
                yield subt
                subt = None
            var = VariableToken(index, c)
        else:
            if c == '\\':
                escape = True
                c = ''

            if not subt:
                subt = StringToken(index, c, token.quote, features=features)
            else:
                subt.text += c
        index += 1

    if subt:
        yield subt
    elif var:
        yield var
示例#9
0
    def preprocess_single(self, raw, origin):
        tokens = self.on_tokenize([StringToken(0, raw, quote='"')], origin)

        if tokens:
            parser = StatementParser(self.features)
            parser.clean_escapes(tokens)
            ret = ''
            for token in tokens:
                ret += token.text
            return ret
        return ''
示例#10
0
文件: shell.py 项目: lnenov/pypsi
    def preprocess_single(self, raw, origin):
        tokens = [StringToken(0, raw, quote='"')]
        for pp in self.preprocessors:
            tokens = pp.on_tokenize(self, tokens, origin)
            if not tokens:
                break

        if tokens:
            self.parser.clean_escapes(tokens)
            ret = ''
            for token in tokens:
                ret += token.text
            return ret
        return ''
示例#11
0
    def on_tokenize(self, shell, tokens, origin):
        ret = []
        for token in tokens:
            if (not isinstance(token, StringToken)
                    or self.prefix not in token.text):
                ret.append(token)
                continue

            for subt in get_subtokens(token, self.prefix, shell.features):
                if isinstance(subt, StringToken):
                    ret.append(subt)
                    continue

                expanded = self.expand(shell, subt)
                ret.append(StringToken(subt.index, expanded, '"'))

        return ret
示例#12
0
 def test_on_tokenize(self):
     self.shell.ctx.vars['token'] = 'first_token'
     self.shell.ctx.vars['message'] = 'second_message'
     assert self.plugin.on_tokenize(self.shell, [
         StringToken(0, 'first $token'),
         WhitespaceToken(1),
         StringToken(2, 'second $message')
     ], 'input') == [
         StringToken(0, 'first '),
         StringToken(1, 'first_token', quote='"'),
         WhitespaceToken(2),
         StringToken(3, 'second '),
         StringToken(4, 'second_message', quote='"')
     ]
示例#13
0
 def test_get_subtokens_none(self):
     assert list(
         get_subtokens(StringToken(0, 'hello, adam'), '$',
                       self.features)) == [StringToken(0, 'hello, adam')]