def test_tokenize_slashslash_comment(self):
     tokens = list(strip_js_comments._TokenizeJS("A // foo"))
     self.assertEquals(["A ", "//", " foo"], tokens)
 def test_tokenize_slashslash_comment_then_newline(self):
     tokens = list(strip_js_comments._TokenizeJS("A // foo\nbar"))
     self.assertEquals(["A ", "//", " foo", "\n", "bar"], tokens)
 def test_tokenize_empty(self):
     tokens = list(strip_js_comments._TokenizeJS(""))
     self.assertEquals([], tokens)
 def test_tokenize_nl(self):
     tokens = list(strip_js_comments._TokenizeJS("\n"))
     self.assertEquals(["\n"], tokens)
示例#5
0
 def test_tokenize_cstyle_comment_one_line(self):
   tokens = list(strip_js_comments._TokenizeJS('A /* foo */'))
   self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)
示例#6
0
 def test_tokenize_cstyle_comment_multi_line(self):
   tokens = list(strip_js_comments._TokenizeJS('A /* foo\n*bar\n*/'))
   self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens)
示例#7
0
 def test_tokenize_slashslash_comment(self):
   tokens = list(strip_js_comments._TokenizeJS('A // foo'))
   self.assertEquals(['A ', '//', ' foo'], tokens)
 def test_tokenize_slashslash_comment(self):
   tokens = list(strip_js_comments._TokenizeJS('A // foo'))
   self.assertEquals(['A ', '//', ' foo'], tokens)
示例#9
0
 def test_tokenize_empty(self):
   tokens = list(strip_js_comments._TokenizeJS(''))
   self.assertEquals([], tokens)
示例#10
0
 def test_tokenize_nl(self):
   tokens = list(strip_js_comments._TokenizeJS('\n'))
   self.assertEquals(['\n'], tokens)
 def test_tokenize_cstyle_comment_multi_line(self):
   tokens = list(strip_js_comments._TokenizeJS('A /* foo\n*bar\n*/'))
   self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens)
 def test_tokenize_cstyle_comment_one_line(self):
   tokens = list(strip_js_comments._TokenizeJS('A /* foo */'))
   self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)
 def test_tokenize_slashslash_comment_then_newline(self):
   tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar'))
   self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
 def test_tokenize_cstyle_comment_one_line(self):
     tokens = list(strip_js_comments._TokenizeJS("A /* foo */"))
     self.assertEquals(["A ", "/*", " foo ", "*/"], tokens)
示例#15
0
 def test_tokenize_slashslash_comment_then_newline(self):
   tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar'))
   self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)
 def test_tokenize_cstyle_comment_multi_line(self):
     tokens = list(strip_js_comments._TokenizeJS("A /* foo\n*bar\n*/"))
     self.assertEquals(["A ", "/*", " foo", "\n", "*bar", "\n", "*/"], tokens)
 def test_tokenize_nl(self):
   tokens = list(strip_js_comments._TokenizeJS('\n'))
   self.assertEquals(['\n'], tokens)