Ejemplo n.º 1
0
 def get_tokens_unprocessed(self, text):
     for index, token, value in JavascriptLexer.get_tokens_unprocessed(
             self, text):
         if token is Name.Other and value in self.EXTRA_KEYWORDS:
             yield index, Keyword, value
         else:
             yield index, token, value
Ejemplo n.º 2
0
 def get_tokens_unprocessed(self, text):
     for index, token, value in JavascriptLexer.get_tokens_unprocessed(self, text):
         if token is Error and value in ["#", "@"]:
             token_type = Name.Tag if value == "#" else Keyword
             yield index, token_type, value
         else:
             yield index, token, value
Ejemplo n.º 3
0
 def get_tokens_unprocessed(self, text):
   for index, token, value in JavascriptLexer.get_tokens_unprocessed(self, text):
     if token is Error and value in ['#', '@']:
       token_type = Name.Tag if value == '#' else Keyword
       yield index, token_type, value
     else:
       yield index, token, value
Ejemplo n.º 4
0
 def get_tokens_unprocessed(self, text):
     is_example = False
     is_output = False
     for item in JavascriptLexer.get_tokens_unprocessed(self, text):
         if item[1] is Generic.Prompt:
             is_example = True
             is_output = False
         elif is_example and item[2].endswith(u"\n"):
             is_example = False
             is_output = True
         elif is_output:
             item = item[0], Generic.Output, item[2]
         elif item[2] in self.EXCEPTIONS:
             item = item[0], Name.Exception, item[2]
         yield item
Ejemplo n.º 5
0
 def get_tokens_unprocessed(self, text):
     is_example = False
     is_output = False
     for item in JavascriptLexer.get_tokens_unprocessed(self, text):
         if item[1] is Generic.Prompt:
             is_example = True
             is_output = False
         elif is_example and item[2].endswith(u"\n"):
             is_example = False
             is_output = True
         elif is_output:
             item = item[0], Generic.Output, item[2]
         elif item[2] in self.EXCEPTIONS:
             item = item[0], Name.Exception, item[2]
         yield item
Ejemplo n.º 6
0
 def get_tokens_unprocessed(self, text):
     # Munge tokens for IDL
     for index, token, value in JavascriptLexer.get_tokens_unprocessed(self, text):
         if value.find('UCS') != -1:
             # print 'UCS is a %s'%(repr(token))
             pass
         if value.find('@') != -1:
             # @ is a Token.Error, need to extend lexer to recognize
             # annotations?
             # print '%s is a %s'%(value,repr(token))
             pass
         if token is Name.Other and value in self.RESERVED_KEYWORDS:
             yield index, Keyword.Reserved, value
         elif token is Name.Other and value in self.PSEUDO_KEYWORDS:
             yield index, Keyword.Pseudo, value
         else:
             yield index, token, value
Ejemplo n.º 7
0
 def get_tokens_unprocessed(self, text):
     for index, token, value in JavascriptLexer.get_tokens_unprocessed(self, text):
         if token is Name.Other and value in self.EXTRA_KEYWORDS:
             yield index, Keyword, value
         else:
             yield index, token, value
Ejemplo n.º 8
0
 def get_tokens_unprocessed(self, text, stack=('root',)):
     text = text[1:-1]
     text = text.replace('\\n', '\n')
     return JavascriptLexer.get_tokens_unprocessed(self, text, stack)
Ejemplo n.º 9
0
 def get_tokens_unprocessed(self, text, stack=('root', )):
     text = text[1:-1]
     text = text.replace('\\n', '\n')
     return JavascriptLexer.get_tokens_unprocessed(self, text, stack)