def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: input_text = document.text try: command, _ = split_command_args(input_text, all_commands) # compile grammar for this command grammar = get_command_grammar(command) self._current_lexer = GrammarLexer(grammar, lexers=lexers_mapping) except InvalidArguments: self._current_lexer = self._dummy return self._current_lexer.lex_document(document)
class IRedisLexer(Lexer): """ Lexer class that can dynamically returns any Lexer. :param get_lexer: Callable that returns a :class:`.Lexer` instance. """ def __init__(self) -> None: self._current_lexer = self._dummy = SimpleLexer() def lex_document(self, document: Document) -> Callable[[int], StyleAndTextTuples]: input_text = document.text try: command, _ = split_command_args(input_text, all_commands) # compile grammar for this command grammar = get_command_grammar(command) self._current_lexer = GrammarLexer(grammar, lexers=lexers_mapping) except InvalidArguments: self._current_lexer = self._dummy return self._current_lexer.lex_document(document) def invalidation_hash(self) -> Hashable: lexer = self.get_lexer() or self._dummy return id(lexer)
def create_prompt(): lexer = { name: token for token, level in ((Token.Command, COMMANDS), (Token.Operator, SUBCOMMANDS), (Token.Other, ARGUMENTS)) for names in level.values() for name in names } # print('LEXER:', lexer) lexer = GrammarLexer(GRAMMAR, lexer) completer = { k: WordCompleter(v) for item in LEVELS.values() for k, v in item.items() } # print('COMPLETER:', completer) completer = GrammarCompleter(GRAMMAR, completer) return functools.partial( get_input, message=DEFAULT_PROMPT, # text at the beginning lexer=lexer, completer=completer, # cf above style=ExampleStyle, # pygmentation patch_stdout=True, # printings occurs above the prompt line )
def get_lexer(command_groups, redis_grammar): """ Input command render color with lexer mapping below This converts token to styles in style.py """ # pygments token # http://pygments.org/docs/tokens/ lexers_dict = { "key": SimpleLexer("class:key"), "keys": SimpleLexer("class:key"), "newkey": SimpleLexer("class:important-key"), "destination": SimpleLexer("class:important-key"), "member": SimpleLexer("class:member"), "members": SimpleLexer("class:member"), "value": SimpleLexer("class:string"), "values": SimpleLexer("class:string"), "lexmin": SimpleLexer("class:string"), "lexmax": SimpleLexer("class:string"), "bit": SimpleLexer("class:bit"), "expiration": SimpleLexer("class:integer"), "second": SimpleLexer("class:integer"), "millisecond": SimpleLexer("class:integer"), "start": SimpleLexer("class:integer"), "float": SimpleLexer("class:integer"), "end": SimpleLexer("class:integer"), "delta": SimpleLexer("class:integer"), "offset": SimpleLexer("class:integer"), "count": SimpleLexer("class:integer"), "index": SimpleLexer("class:index"), "password": SimpleLexer("class:password"), "min": SimpleLexer("class:integer"), "max": SimpleLexer("class:integer"), "score": SimpleLexer("class:integer"), "timeout": SimpleLexer("class:integer"), "position": SimpleLexer("class:integer"), "cursor": SimpleLexer("class:integer"), "pattern": SimpleLexer("class:pattern"), "type": SimpleLexer("class:string"), "fields": SimpleLexer("class:field"), "field": SimpleLexer("class:field"), # const "condition": SimpleLexer("class:const"), "operation": SimpleLexer("class:const"), "withscores": SimpleLexer("class:const"), "limit": SimpleLexer("class:const"), "match": SimpleLexer("class:const"), "count_const": SimpleLexer("class:const"), "type_const": SimpleLexer("class:const"), "position_choice": SimpleLexer("class:const"), } lexers_dict.update({key: SimpleLexer("class:command") for key in command_groups}) lexer = GrammarLexer(redis_grammar, lexers=lexers_dict) return lexer
def create_lexer(): g = create_ipython_grammar() return GrammarLexer(g, lexers={ 'percent': SimpleLexer('class:pygments.operator'), 'magic': SimpleLexer('class:pygments.keyword'), 'filename': SimpleLexer('class:pygments.name'), 'python': PygmentsLexer(PythonLexer), 'system': PygmentsLexer(BashLexer), })
def create_command_lexer(): """ Lexer for highlighting of the command line. """ return GrammarLexer(COMMAND_GRAMMAR, lexers={ 'command': SimpleLexer(Token.CommandLine.Command), 'location': SimpleLexer(Token.CommandLine.Location), 'shell_command': PygmentsLexer(BashLexer), })
def create_lexer(): g = create_ipython_grammar() return GrammarLexer(g, lexers={ 'percent': SimpleLexer(Token.Operator), 'magic': SimpleLexer(Token.Keyword), 'filename': SimpleLexer(Token.Name), 'python': PygmentsLexer(PythonLexer), 'system': PygmentsLexer(BashLexer), })
def create_command_lexer(): """ Lexer for highlighting of the command line. """ return GrammarLexer(COMMAND_GRAMMAR, lexers={ 'command': SimpleLexer('class:commandline.command'), 'location': SimpleLexer('class:commandline.location'), 'shell_command': PygmentsLexer(BashLexer), })
def create_command_lexer(): """ Lexer for highlighting of the command line. """ return GrammarLexer(COMMAND_GRAMMAR, tokens={ 'command': Token.CommandLine.Command, 'filename': Token.CommandLine.Filename, }, lexers={ 'shell_command': BashLexer, })
def create_lexer(): g = create_ipython_grammar() return GrammarLexer( g, lexers={ "percent": SimpleLexer("class:pygments.operator"), "magic": SimpleLexer("class:pygments.keyword"), "filename": SimpleLexer("class:pygments.name"), "python": PygmentsLexer(PythonLexer), "system": PygmentsLexer(BashLexer), }, )
def create_lexer(): g = create_ipython_grammar() return GrammarLexer(g, tokens={ 'percent': Token.Operator, 'magic': Token.Keyword, 'filename': Token.Name, }, lexers={ 'python': PythonLexer, 'system': BashLexer, })
def get_lexer(command_groups, redis_grammar): # pygments token # http://pygments.org/docs/tokens/ lexers_dict = { "key": SimpleLexer("class:key"), "keys": SimpleLexer("class:key"), "index": SimpleLexer("class:integer"), "password": SimpleLexer("class:password"), } lexers_dict.update( {key: SimpleLexer("class:pygments.keyword") for key in command_groups}) lexer = GrammarLexer(redis_grammar, lexers=lexers_dict) return lexer
def start(): g = create_grammar() lexer = GrammarLexer(g, lexers={ 'op_mac': SimpleLexer(Token.Operator), 'op_main': SimpleLexer(Token.Operator), 'op_instance': SimpleLexer(Token.Operator), 'op_configuration': SimpleLexer(Token.Operator), 'op_infrastructure': SimpleLexer(Token.Operator), 'op_parameter': SimpleLexer(Token.Text), }) completer = GrammarCompleter(g, { 'op_main': WordCompleter(op_main), 'op_instance': WordCompleter(op_instance), 'op_configuration': WordCompleter(op_configuration), 'op_infrastructure': WordCompleter(op_infrastructure), }) history = InMemoryHistory() parser = maccli.mac_cli.initialize_parser() show("Start typing 'mac', CTRL+C to exit") user_aborted = False program_running = True while program_running: try: text = prompt('> ', lexer=lexer, completer=completer, style=MacStyle, history=history, auto_suggest=AutoSuggestFromHistory()) argv_raw = shlex.split(text) argv = maccli.mac_cli.patch_help_option(argv_raw) args = parser.parse_args(argv) maccli.mac_cli.dispatch_cmds(args) user_aborted = False except InternalError as e: maccli.logger.debug("Code raised Internal Error", e) pass except EOFError as e: maccli.logger.debug("Code raised EOFError", e) pass except KeyboardInterrupt as e: maccli.logger.debug("Code raised KeyboardInterrupt", e) if user_aborted: program_running = False else: user_aborted = True show("Press CTRL+C again to exit") pass
def get_lexer(command_groups, redis_grammar): """ Input command render color with lexer mapping below """ # pygments token # http://pygments.org/docs/tokens/ lexers_dict = { "key": SimpleLexer("class:key"), "keys": SimpleLexer("class:key"), "value": SimpleLexer("class:string"), "expiration": SimpleLexer("class:integer"), "condition": SimpleLexer("class:const"), "index": SimpleLexer("class:index"), "password": SimpleLexer("class:password"), } lexers_dict.update({key: SimpleLexer("class:command") for key in command_groups}) lexer = GrammarLexer(redis_grammar, lexers=lexers_dict) return lexer
def apply_transformation( self, transformation_input: TransformationInput) -> Transformation: input_text = transformation_input.document.text try: command, _ = split_command_args(input_text, all_commands) except InvalidArguments: self.command_holder.command = None self.session.completer = default_completer self.session.lexer = default_lexer else: self.command_holder.command = command.upper() # compile grammar for this command grammar = get_command_grammar(command) lexer = GrammarLexer(grammar, lexers=lexers_mapping) completer = GrammarCompleter(grammar, completer_mapping) self.session.completer = completer self.session.lexer = lexer return Transformation(transformation_input.fragments)
def __init__(self, config, db, scanner): """class object initialization""" self.log = Log(name='shell') self.config = config self.db = db self.scan = scanner self.parser = Parser() self.collors = ( '#000000', '#800000', '#008000', '#808000', '#000080', '#800080', '#008080', '#c0c0c0', '#808080', '#ff0000', '#00ff00', '#ffff00', '#0000ff', '#ff00ff', '#00ffff', '#ffffff', '#000000', '#00005f', '#000087', '#0000af', '#0000d7', '#0000ff', '#005f00', '#005f5f', '#005f87', '#005faf', '#005fd7', '#005fff', '#008700', '#00875f', '#008787', '#0087af', '#0087d7', '#0087ff', '#00af00', '#00af5f', '#00af87', '#00afaf', '#00afd7', '#00afff', '#00d700', '#00d75f', '#00d787', '#00d7af', '#00d7d7', '#00d7ff', '#00ff00', '#00ff5f', '#00ff87', '#00ffaf', '#00ffd7', '#00ffff', '#5f0000', '#5f005f', '#5f5fd7', '#5faf5f', '#5f0087', '#5f00af', '#5f00d7', '#5f00ff', '#5f5f00', '#5f5f5f', '#5f5f87', '#5f5faf', '#5f5fff', '#5f8700', '#5f875f', '#5f8787', '#5f87af', '#5f87d7', '#5f87ff', '#5faf00', '#5faf87', '#5fafaf', '#5fafd7', '#5fafff', '#5fd700', '#5fd75f', '#5fd787', '#5fd7af', '#5fd7ff', '#5fff00', '#5fff5f', '#5fff87', '#5fffaf', '#5fffd7', '#5fffff', '#870000', '#870087', '#8700af', '#8700d7', '#8700ff', '#875f00', '#875f5f', '#875f87', '#875faf', '#875fff', '#878700', '#87875f', '#878787', '#8787af', '#8787d7', '#8787ff', '#87af00', '#87af87', '#87afaf', '#87afd7', '#87afff', '#87d700', '#87d75f', '#87d787', '#87d7af', '#87d7ff', '#87ff00', '#87ff5f', '#87ff87', '#87ffaf', '#87ffd7', '#87ffff', '#af0000', '#af0087', '#af00af', '#af00d7', '#af00ff', '#af5f00', '#af5f5f', '#af5f87', '#af5faf', '#af5fff', '#af8700', '#af875f', '#af8787', '#af87af', '#af87d7', '#af87ff', '#afaf00', '#afaf87', '#afafaf', '#afafd7', '#afafff', '#afd700', '#afd75f', '#afd787', '#afd7af', '#afd7ff', '#afff00', '#afff5f', '#afff87', '#afffaf', '#afffd7', '#afffff', '#d70000', '#d70087', '#d700af', '#d700d7', '#d700ff', '#d75f00', '#d75f5f', '#d75f87', '#d75faf', '#d75fff', '#d78700', '#d7875f', '#d78787', '#d787af', '#d787d7', '#d787ff', '#d7af00', '#d7af87', '#d7afaf', '#d7afd7', '#d7afff', '#d7d700', '#d7d75f', '#d7d787', '#d7d7af', '#d7d7ff', '#d7ff00', '#d7ff5f', '#d7ff87', '#d7ffaf', '#d7ffd7', '#d7ffff', '#ff0000', '#ff0087', '#ff00af', '#ff00d7', '#ff00ff', '#ff5f00', '#ff5f5f', '#ff5f87', '#ff5faf', '#ff5fff', '#ff8700', '#ff875f', '#ff8787', '#ff87af', '#ff87d7', '#ff87ff', '#ffaf00', '#ffaf87', '#ffafaf', '#ffafd7', '#ffafff', '#ffd700', '#ffd75f', '#ffd787', '#ffd7af', '#ffd7ff', '#ffff00', '#ffff5f', '#ffff87', '#ffffaf', '#ffffd7', '#ffffff', '#080808', '#1c1c1c', '#262626', '#303030', '#3a3a3a', '#444444', '#4e4e4e', '#585858', '#626262', '#767676', '#808080', '#8a8a8a', '#949494', '#9e9e9e', '#a8a8a8', '#b2b2b2', '#bcbcbc', '#d0d0d0', '#dadada', '#e4e4e4', '#eeeeee', '#5fd7d7', '#87005f', '#875fd7', '#875fd7', '#87af5f', '#87d7d7', '#af005f', '#af5fd7', '#afaf5f', '#afd7d7', '#d7005f', '#d75fd7', '#d7af5f', '#d7d7d7', '#ff005f', '#ff5fd7', '#ffaf5f', '#ffd7d7', '#121212', '#6c6c6c', '#c6c6c6', ) self.commands = { 'set': self.config.get_all_start_config_key(), 'show': { 'config': 'print curent config (takes param)', 'host': 'print host table (takes param)', 'port': 'print port table', 'task': 'print running tasks', 'log': 'print the last n lines of the log file' }, 'sync': { 'config': 'synchronizes the configuration file' }, 'run': self.get_scanner_methods(self.scan), 'workspase': self.get_all_workspase(), 'flush': {}, 'kill': {}, 'help': {}, 'exit': {} } self.c_function = { 'set': self.f_set, 'show': self.f_show, 'sync': self.f_sync, 'run': self.f_run, 'workspase': self.f_workspase, 'flush': self.f_flush, 'kill': self.f_kill, 'help': self.f_help, 'exit': self.f_exit } self.grammar = compile(""" (\s* (?P<command>[a-z]+) \s*) | (\s* (?P<command>[a-z]+) \s+ (?P<operator>[A-Za-z0-9_-]+) \s*) | (\s* (?P<command>[a-z]+) \s+ (?P<operator>[A-Za-z0-9_-]+) \s+ (?P<parameter>[A-Za-z0-9.,-_/+*]+) \s*) """) self.style = Style.from_dict({ 'command': '#216f21 bold', 'operator': '#6f216f bold', 'parameter': '#ff0000 bold', 'trailing-input': 'bg:#662222 #ffffff', 'bottom-toolbar': '#6f216f bg:#ffffff', # Logo. 'bear': random.choice(self.collors), 'text': random.choice(self.collors), # User input (default text). '': '#ff0066', # Prompt. 'prompt_for_input': '#6f216f', }) self.lexer = GrammarLexer(self.grammar, lexers={ 'command': SimpleLexer('class:command'), 'operator': SimpleLexer('class:operator'), 'parameter': SimpleLexer('class:parameter') }) self.completer = GCompleter(self.commands) self.history_path = Path( os.path.abspath( os.path.join(os.path.dirname(__file__), '../.history'))) self.history = FileHistory(self.history_path) version_str = ''.join(['v', gummy.__version__, ' ']) self.logo = HTML(f''' <text> </text><bear> _ _ </bear> <text> _____ _ _ __ __ __ ____ __</text><bear> (c).-.(c) </bear> <text> / ____| | | | \/ | \/ \ \ / /</text><bear> / ._. \ </bear> <text> | | __| | | | \ / | \ / |\ \_/ / </text><bear> __\( Y )/__ </bear> <text> | | |_ | | | | |\/| | |\/| | \ / </text><bear> (_.-/'-'\-._)</bear> <text> | |__| | |__| | | | | | | | | | </text><bear> || </bear><text>G</text><bear> || </bear> <text> \_____|\____/|_| |_|_| |_| |_| </text><bear> _.' `-' '._ </bear> <text> </text><bear> (.-./`-'\.-.)</bear> <text>{version_str:>38}</text><bear> `-' `-'</bear> ''') self.prompt_str = [('class:prompt_for_input', '>>> ')] self.counter = 0 self.sync_config_stat = 0
return compile(r""" (\s* (?P<operator>[a-z]+) (\s+ (?P<var>[0-9.]+) )*) """) example_style = Style.from_dict({ 'filter': '#33aa33 bold', 'trailing-input': 'bg:#662222 #ffffff', }) if __name__ == '__main__': g = create_grammar() lexer = GrammarLexer(g, lexers={ 'operator': SimpleLexer('class:operator'), 'var': SimpleLexer('class:number'), }) completer = GrammarCompleter(g, { 'operator': WordCompleter(operators), }) try: # REPL loop. while True: # Read input and parse the result. text = prompt('Calculate: ', lexer=lexer, completer=completer, style=example_style)
Token.Operator: '#33aa33 bold', Token.Number: '#aa3333 bold', Token.Menu.Completions.Completion.Current: 'bg:#00aaaa #000000', Token.Menu.Completions.Completion: 'bg:#008888 #ffffff', Token.Menu.Completions.ProgressButton: 'bg:#003333', Token.Menu.Completions.ProgressBar: 'bg:#00aaaa', } if __name__ == '__main__': g = create_grammar() lexer = GrammarLexer(g, tokens={ 'operator1': Token.Operator, 'operator2': Token.Operator, 'var1': Token.Number, 'var2': Token.Number }) completer = GrammarCompleter( g, { 'operator1': WordCompleter(operators1), 'operator2': WordCompleter(operators2), }) try: # REPL loop. while True: # Read input and parse the result. text = get_input('Calculate: ',
""") example_style = style_from_dict({ Token.Operator: '#33aa33 bold', Token.Number: '#aa3333 bold', Token.TrailingInput: 'bg:#662222 #ffffff', }) if __name__ == '__main__': g = create_grammar() lexer = GrammarLexer(g, lexers={ 'operator1': SimpleLexer(Token.Operator), 'operator2': SimpleLexer(Token.Operator), 'var1': SimpleLexer(Token.Number), 'var2': SimpleLexer(Token.Number), }) completer = GrammarCompleter( g, { 'operator1': WordCompleter(operators1), 'operator2': WordCompleter(operators2), }) try: # REPL loop. while True: # Read input and parse the result. text = prompt('Calculate: ',
example_style = Style.from_dict({ "operator": "#33aa33 bold", "number": "#ff0000 bold", "trailing-input": "bg:#662222 #ffffff", }) if __name__ == "__main__": g = create_grammar() lexer = GrammarLexer( g, lexers={ "operator1": SimpleLexer("class:operator"), "operator2": SimpleLexer("class:operator"), "var1": SimpleLexer("class:number"), "var2": SimpleLexer("class:number"), }, ) completer = GrammarCompleter( g, { "operator1": WordCompleter(operators1), "operator2": WordCompleter(operators2), }, ) try: # REPL loop.
"end": SimpleLexer("class:integer"), "delta": SimpleLexer("class:integer"), "offset": SimpleLexer("class:integer"), "count": SimpleLexer("class:integer"), "index": SimpleLexer("class:index"), "clientid": SimpleLexer("class:integer"), "password": SimpleLexer("class:password"), "min": SimpleLexer("class:integer"), "max": SimpleLexer("class:integer"), "score": SimpleLexer("class:integer"), "timeout": SimpleLexer("class:integer"), "position": SimpleLexer("class:integer"), "cursor": SimpleLexer("class:integer"), "pattern": SimpleLexer("class:pattern"), "type": SimpleLexer("class:string"), "fields": SimpleLexer("class:field"), "field": SimpleLexer("class:field"), "parameter": SimpleLexer("class:field"), "channel": SimpleLexer("class:channel"), "double_lua": PygmentsLexer(LuaLexer), "single_lua": PygmentsLexer(LuaLexer), "command": SimpleLexer("class:command"), } lexers_dict.update({key: SimpleLexer("class:const") for key in CONST}) return lexers_dict lexers_mapping = get_lexer_mapping() default_lexer = GrammarLexer(command_grammar, lexers=lexers_mapping)
def initializeGrammarLexer(self): self.grammarLexer = GrammarLexer(self.grammar, lexers=self.lexers)