def testBoldRightArrow(self): tok = Tokenizer('echo=>x') self.assertEquals([(LITERAL, 'echo'), (BOLD_RIGHT_ARROW, '=>'), (LITERAL, 'x'), (EOF, '')], list(tok)) tok = Tokenizer('echo => x') self.assertEquals([(LITERAL, 'echo'), (BOLD_RIGHT_ARROW, '=>'), (LITERAL, 'x'), (EOF, '')], list(tok))
def testRightArrow(self): tok = Tokenizer('echo->x') self.assertEquals([(LITERAL, 'echo'), (RIGHT_ARROW, '->'), (LITERAL, 'x'), (EOF, '')], list(tok)) tok = Tokenizer('echo -> x') self.assertEquals([(LITERAL, 'echo'), (RIGHT_ARROW, '->'), (LITERAL, 'x'), (EOF, '')], list(tok))
def testHyphenEqualInLiteral(self): tok = Tokenizer('echo x- -- -') self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'x-'), (SPACE, ' '), (LITERAL, '--'), (SPACE, ' '), (LITERAL, '-'), (EOF, '')], list(tok)) tok = Tokenizer('echo x= == =') self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'x='), (SPACE, ' '), (LITERAL, '=='), (SPACE, ' '), (LITERAL, '='), (EOF, '')], list(tok))
def testPipeWithoutSpace(self): tok = Tokenizer('cat|/tmp/out') self.assertEquals([ (LITERAL, 'cat'), (PIPE, '|'), (LITERAL, '/tmp/out'), (EOF, ''), ], list(tok))
def testSubstitutionUnderscore(self): tok = Tokenizer('echo $__init__') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '$__init__'), (EOF, ''), ], list(tok))
def testExpression(self): tok = Tokenizer('echo ${{1: 10}}') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '${{1: 10}}'), (EOF, ''), ], list(tok))
def testSubstitutionSuffix(self): alias_map = {'ls': ('ls -la', False)} tok = Tokenizer('ls$x', alias_map=alias_map) self.assertEquals([ (LITERAL, 'ls'), (SUBSTITUTION, '$x'), (EOF, ''), ], list(tok))
def testSubstitutionPrefix(self): alias_map = {'GR': ('| grep', True)} tok = Tokenizer('${x}GR', alias_map=alias_map) self.assertEquals([ (SUBSTITUTION, '${x}'), (LITERAL, 'GR'), (EOF, ''), ], list(tok))
def testSemicolon(self): input = 'echo; cat;' parser = Parser(Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, BinaryOp)) self.assertEquals(';', ast.op) self.assertTrue(isinstance(ast.left, Process)) self.assertTrue(isinstance(ast.right, Process))
def test2_2(self): tok = Tokenizer('cat\t/tmp/www/foo.txt') self.assertEquals([ (LITERAL, 'cat'), (SPACE, ' '), (LITERAL, '/tmp/www/foo.txt'), (EOF, ''), ], list(tok))
def testRedirectAppend(self): tok = Tokenizer('echo \'abc\'"def"') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SINGLE_QUOTED_STRING, '\'abc\''), (DOUBLE_QUOTED_STRING, '"def"'), (EOF, ''), ], list(tok))
def testRedirectInvalidName(self): tok = Tokenizer('echo $10') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, '$'), (LITERAL, '10'), (EOF, ''), ], list(tok))
def testBraceSubstitution(self): tok = Tokenizer('echo hoge${a}') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'hoge'), (SUBSTITUTION, '${a}'), (EOF, ''), ], list(tok))
def testSubstitutionWithoutSpace(self): tok = Tokenizer('echo hoge$a') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'hoge'), (SUBSTITUTION, '$a'), (EOF, ''), ], list(tok))
def testBackquoteWithSemicolon(self): input = 'echo `echo foo | cat`' parser = Parser(Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, Process)) self.assertEquals(2, len(ast.args)) self.assertEquals(1, len(ast.args[1])) self.assertEquals(BACKQUOTE, ast.args[1][0][0]) self.assertEquals('|', ast.args[1][0][1].op)
def testRecursiveNotGlobal(self): alias_map = {'ls': ('ls -la', False), 'sl': ('ls', True)} tok = Tokenizer('echo sl', alias_map=alias_map) self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'ls'), (EOF, ''), ], list(tok))
def testRedirectAppend(self): tok = Tokenizer('echo a>>/tmp/out') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'a'), (REDIRECT, '>>'), (LITERAL, '/tmp/out'), (EOF, ''), ], list(tok))
def testBackquoteWithSpace(self): input = 'echo ` echo foo `' parser = Parser(Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, Process)) self.assertEquals(2, len(ast.args)) self.assertEquals(1, len(ast.args[1])) self.assertEquals(BACKQUOTE, ast.args[1][0][0]) self.assertTrue(isinstance(ast.args[1][0][1], Process)) self.assertEquals([[(LITERAL, 'echo')], [(LITERAL, 'foo')]], ast.args[1][0][1].args)
def testGlobal(self): alias_map = {'GR': ('| grep &&', True)} tok = Tokenizer('ls GR echo', alias_map=alias_map) self.assertEquals([ (LITERAL, 'ls'), (PIPE, '|'), (LITERAL, 'grep'), (AND_OP, '&&'), (LITERAL, 'echo'), (EOF, ''), ], list(tok))
def testNotGlobal(self): alias_map = {'ls': ('ls -la', False)} tok = Tokenizer('echo ls /tmp/www/foo.txt', alias_map=alias_map) self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'ls'), (SPACE, ' '), (LITERAL, '/tmp/www/foo.txt'), (EOF, ''), ], list(tok))
def testSubstitution(self): tok = Tokenizer('echo $a$b/$c') self.assertEquals([ (LITERAL, 'echo'), (SPACE, ' '), (SUBSTITUTION, '$a'), (SUBSTITUTION, '$b'), (LITERAL, '/'), (SUBSTITUTION, '$c'), (EOF, ''), ], list(tok))
def testAndOrOperator(self): tok = Tokenizer('foo && bar || a&&b||c') self.assertEquals([ (LITERAL, 'foo'), (AND_OP, '&&'), (LITERAL, 'bar'), (OR_OP, '||'), (LITERAL, 'a'), (AND_OP, '&&'), (LITERAL, 'b'), (OR_OP, '||'), (LITERAL, 'c'), (EOF, ''), ], list(tok))
def test(self): input = 'echo hoge$foo || echo piyo && cat' parser = Parser(Tokenizer(input)) ast = parser.parse() self.assertTrue(isinstance(ast, BinaryOp)) self.assertEquals('&&', ast.op) self.assertTrue(isinstance(ast.left, BinaryOp)) self.assertEquals('||', ast.left.op) proc0 = ast.left.left self.assertTrue(isinstance(proc0, Process)) self.assertEquals([[('literal', 'echo')], [('literal', 'hoge'), ('substitution', '$foo')]], proc0.args) self.assertFalse(proc0.redirects) proc1 = ast.left.right self.assertTrue(isinstance(proc1, Process)) self.assertEquals([[('literal', 'echo')], [('literal', 'piyo')]], proc1.args) self.assertFalse(proc1.redirects) proc2 = ast.right self.assertTrue(isinstance(proc2, Process)) self.assertEquals([[('literal', 'cat')]], proc2.args) self.assertFalse(proc2.redirects)
def extractResponseNames(self, content): parser = Parser(Tokenizer(content)) ast = parser.parse() names = [] self.extractResponseNamesInternal(ast, names) return names
def testBackQuoteWithSpace(self): tok = Tokenizer('echo ` echo foo `') self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (BACKQUOTE, '`'), (SPACE, ' '), (LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'foo'), (SPACE, ' '), (BACKQUOTE, '`'), (EOF, '')], list(tok))
def testSemicolonWithSpace(self): tok = Tokenizer('a ; b ; ') self.assertEquals([(LITERAL, 'a'), (SEMICOLON, ';'), (LITERAL, 'b'), (SEMICOLON, ';'), (EOF, '')], list(tok))
def parse(self, input): tok = Tokenizer(input) parser = Parser(tok) return parser.parse()
def testParenthesis(self): tok = Tokenizer('() a(b)') self.assertEquals([(PARENTHESIS_START, '('), (PARENTHESIS_END, ')'), (LITERAL, 'a'), (PARENTHESIS_START, '('), (LITERAL, 'b'), (PARENTHESIS_END, ')'), (EOF, '')], list(tok))