Exemplo n.º 1
0
 def testBoldRightArrow(self):
     tok = Tokenizer('echo=>x')
     self.assertEquals([(LITERAL, 'echo'), (BOLD_RIGHT_ARROW, '=>'),
                        (LITERAL, 'x'), (EOF, '')], list(tok))
     tok = Tokenizer('echo => x')
     self.assertEquals([(LITERAL, 'echo'), (BOLD_RIGHT_ARROW, '=>'),
                        (LITERAL, 'x'), (EOF, '')], list(tok))
Exemplo n.º 2
0
 def testRightArrow(self):
     tok = Tokenizer('echo->x')
     self.assertEquals([(LITERAL, 'echo'), (RIGHT_ARROW, '->'),
                        (LITERAL, 'x'), (EOF, '')], list(tok))
     tok = Tokenizer('echo -> x')
     self.assertEquals([(LITERAL, 'echo'), (RIGHT_ARROW, '->'),
                        (LITERAL, 'x'), (EOF, '')], list(tok))
Exemplo n.º 3
0
 def testHyphenEqualInLiteral(self):
     tok = Tokenizer('echo x- -- -')
     self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'x-'),
                        (SPACE, ' '), (LITERAL, '--'), (SPACE, ' '),
                        (LITERAL, '-'), (EOF, '')], list(tok))
     tok = Tokenizer('echo x= == =')
     self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (LITERAL, 'x='),
                        (SPACE, ' '), (LITERAL, '=='), (SPACE, ' '),
                        (LITERAL, '='), (EOF, '')], list(tok))
Exemplo n.º 4
0
 def testPipeWithoutSpace(self):
     tok = Tokenizer('cat|/tmp/out')
     self.assertEquals([
         (LITERAL, 'cat'),
         (PIPE, '|'),
         (LITERAL, '/tmp/out'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 5
0
 def testSubstitutionUnderscore(self):
     tok = Tokenizer('echo $__init__')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '$__init__'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 6
0
 def testExpression(self):
     tok = Tokenizer('echo ${{1: 10}}')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '${{1: 10}}'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 7
0
 def testSubstitutionSuffix(self):
     alias_map = {'ls': ('ls -la', False)}
     tok = Tokenizer('ls$x', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'ls'),
         (SUBSTITUTION, '$x'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 8
0
 def testSubstitutionPrefix(self):
     alias_map = {'GR': ('| grep', True)}
     tok = Tokenizer('${x}GR', alias_map=alias_map)
     self.assertEquals([
         (SUBSTITUTION, '${x}'),
         (LITERAL, 'GR'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 9
0
 def testSemicolon(self):
     input = 'echo; cat;'
     parser = Parser(Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, BinaryOp))
     self.assertEquals(';', ast.op)
     self.assertTrue(isinstance(ast.left, Process))
     self.assertTrue(isinstance(ast.right, Process))
Exemplo n.º 10
0
 def test2_2(self):
     tok = Tokenizer('cat\t/tmp/www/foo.txt')
     self.assertEquals([
         (LITERAL, 'cat'),
         (SPACE, ' '),
         (LITERAL, '/tmp/www/foo.txt'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 11
0
 def testRedirectAppend(self):
     tok = Tokenizer('echo \'abc\'"def"')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SINGLE_QUOTED_STRING, '\'abc\''),
         (DOUBLE_QUOTED_STRING, '"def"'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 12
0
 def testRedirectInvalidName(self):
     tok = Tokenizer('echo $10')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, '$'),
         (LITERAL, '10'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 13
0
 def testBraceSubstitution(self):
     tok = Tokenizer('echo hoge${a}')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'hoge'),
         (SUBSTITUTION, '${a}'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 14
0
 def testSubstitutionWithoutSpace(self):
     tok = Tokenizer('echo hoge$a')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'hoge'),
         (SUBSTITUTION, '$a'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 15
0
 def testBackquoteWithSemicolon(self):
     input = 'echo `echo foo | cat`'
     parser = Parser(Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, Process))
     self.assertEquals(2, len(ast.args))
     self.assertEquals(1, len(ast.args[1]))
     self.assertEquals(BACKQUOTE, ast.args[1][0][0])
     self.assertEquals('|', ast.args[1][0][1].op)
Exemplo n.º 16
0
 def testRecursiveNotGlobal(self):
     alias_map = {'ls': ('ls -la', False), 'sl': ('ls', True)}
     tok = Tokenizer('echo sl', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'ls'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 17
0
 def testRedirectAppend(self):
     tok = Tokenizer('echo a>>/tmp/out')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'a'),
         (REDIRECT, '>>'),
         (LITERAL, '/tmp/out'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 18
0
 def testBackquoteWithSpace(self):
     input = 'echo ` echo foo `'
     parser = Parser(Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, Process))
     self.assertEquals(2, len(ast.args))
     self.assertEquals(1, len(ast.args[1]))
     self.assertEquals(BACKQUOTE, ast.args[1][0][0])
     self.assertTrue(isinstance(ast.args[1][0][1], Process))
     self.assertEquals([[(LITERAL, 'echo')], [(LITERAL, 'foo')]],
                       ast.args[1][0][1].args)
Exemplo n.º 19
0
 def testGlobal(self):
     alias_map = {'GR': ('| grep &&', True)}
     tok = Tokenizer('ls GR echo', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'ls'),
         (PIPE, '|'),
         (LITERAL, 'grep'),
         (AND_OP, '&&'),
         (LITERAL, 'echo'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 20
0
 def testNotGlobal(self):
     alias_map = {'ls': ('ls -la', False)}
     tok = Tokenizer('echo ls /tmp/www/foo.txt', alias_map=alias_map)
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (LITERAL, 'ls'),
         (SPACE, ' '),
         (LITERAL, '/tmp/www/foo.txt'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 21
0
 def testSubstitution(self):
     tok = Tokenizer('echo $a$b/$c')
     self.assertEquals([
         (LITERAL, 'echo'),
         (SPACE, ' '),
         (SUBSTITUTION, '$a'),
         (SUBSTITUTION, '$b'),
         (LITERAL, '/'),
         (SUBSTITUTION, '$c'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 22
0
 def testAndOrOperator(self):
     tok = Tokenizer('foo && bar || a&&b||c')
     self.assertEquals([
         (LITERAL, 'foo'),
         (AND_OP, '&&'),
         (LITERAL, 'bar'),
         (OR_OP, '||'),
         (LITERAL, 'a'),
         (AND_OP, '&&'),
         (LITERAL, 'b'),
         (OR_OP, '||'),
         (LITERAL, 'c'),
         (EOF, ''),
     ], list(tok))
Exemplo n.º 23
0
 def test(self):
     input = 'echo hoge$foo || echo piyo && cat'
     parser = Parser(Tokenizer(input))
     ast = parser.parse()
     self.assertTrue(isinstance(ast, BinaryOp))
     self.assertEquals('&&', ast.op)
     self.assertTrue(isinstance(ast.left, BinaryOp))
     self.assertEquals('||', ast.left.op)
     proc0 = ast.left.left
     self.assertTrue(isinstance(proc0, Process))
     self.assertEquals([[('literal', 'echo')],
                        [('literal', 'hoge'), ('substitution', '$foo')]],
                       proc0.args)
     self.assertFalse(proc0.redirects)
     proc1 = ast.left.right
     self.assertTrue(isinstance(proc1, Process))
     self.assertEquals([[('literal', 'echo')], [('literal', 'piyo')]],
                       proc1.args)
     self.assertFalse(proc1.redirects)
     proc2 = ast.right
     self.assertTrue(isinstance(proc2, Process))
     self.assertEquals([[('literal', 'cat')]], proc2.args)
     self.assertFalse(proc2.redirects)
Exemplo n.º 24
0
 def extractResponseNames(self, content):
   parser = Parser(Tokenizer(content))
   ast = parser.parse()
   names = []
   self.extractResponseNamesInternal(ast, names)
   return names
Exemplo n.º 25
0
 def testBackQuoteWithSpace(self):
     tok = Tokenizer('echo ` echo foo `')
     self.assertEquals([(LITERAL, 'echo'), (SPACE, ' '), (BACKQUOTE, '`'),
                        (SPACE, ' '), (LITERAL, 'echo'), (SPACE, ' '),
                        (LITERAL, 'foo'), (SPACE, ' '), (BACKQUOTE, '`'),
                        (EOF, '')], list(tok))
Exemplo n.º 26
0
 def testSemicolonWithSpace(self):
     tok = Tokenizer('a ; b ; ')
     self.assertEquals([(LITERAL, 'a'), (SEMICOLON, ';'), (LITERAL, 'b'),
                        (SEMICOLON, ';'), (EOF, '')], list(tok))
Exemplo n.º 27
0
 def parse(self, input):
     tok = Tokenizer(input)
     parser = Parser(tok)
     return parser.parse()
Exemplo n.º 28
0
 def testParenthesis(self):
     tok = Tokenizer('() a(b)')
     self.assertEquals([(PARENTHESIS_START, '('), (PARENTHESIS_END, ')'),
                        (LITERAL, 'a'), (PARENTHESIS_START, '('),
                        (LITERAL, 'b'), (PARENTHESIS_END, ')'), (EOF, '')],
                       list(tok))