def createLexers(self): lex = {} lex['.c'] = CFamilyLexer() lex['.h'] = CFamilyLexer() lex['.cpp'] = CppLexer() lex['.hpp'] = CppLexer() lex['.css'] = CssLexer() lex['.sass'] = SassLexer() lex['.yaml'] = YamlLexer() lex['.yml'] = YamlLexer() lex['.json'] = JsonLexer() lex['.cs'] = CSharpLexer() lex['.fs'] = FSharpLexer() lex['.e'] = EiffelLexer() lex['.erl'] = ErlangLexer() lex['.hrl'] = ErlangLexer() lex['.es'] = ErlangLexer() lex['.f03'] = FortranLexer() lex['.f90'] = FortranLexer() lex['.F03'] = FortranLexer() lex['.F90'] = FortranLexer() lex['.go'] = GoLexer() lex['.hs'] = HaskellLexer() lex['.v'] = VerilogLexer() lex['.vhdl'] = VhdlLexer() lex['.vhd'] = VhdlLexer() lex['.html'] = HtmlLexer() lex['.htm'] = HtmlLexer() lex['.xhtml'] = HtmlLexer() lex['.xml'] = XmlLexer() lex['.js'] = JavascriptLexer() lex['.tex'] = TypeScriptLexer() lex['.coffee'] = CoffeeScriptLexer() lex['.java'] = JavaLexer() lex['.scala'] = ScalaLexer() lex['.kt'] = KotlinLexer() lex['.ktm'] = KotlinLexer() lex['.kts'] = KotlinLexer() lex['.lisp'] = CommonLispLexer() lex['make'] = MakefileLexer() lex['Make'] = MakefileLexer() lex['CMake'] = CMakeLexer() lex['cmake'] = CMakeLexer() lex['.m'] = MatlabLexer() lex['.mat'] = MatlabLexer() lex['.dpr'] = DelphiLexer() lex['.perl'] = PerlLexer() lex['.php'] = PhpLexer() lex['.pr'] = PrologLexer() lex['.py'] = Python3Lexer() lex['.rb'] = RubyLexer() lex['.sh'] = BashLexer() lex['.sql'] = MySqlLexer() lex['.mysql'] = MySqlLexer() lex['.tcl'] = TclLexer() lex['.awk'] = AwkLexer() return lex
def connections_definition(): """Connection definition in connections.yml""" out = pygments.highlight(code=(Settings.typhoon_home / 'connections.yml').read_text(), lexer=YamlLexer(), formatter=Terminal256Formatter()) pydoc.pager(out)
def dump_object(data, format: str = "json"): if type(data).__name__ == "APIError": return bcolors.FAIL + str(data) + bcolors.ENDC if format == "json": return json.dumps(data) elif format == "yaml": return pygments.highlight(yaml.safe_dump(data), YamlLexer(), TerminalFormatter()) else: return "Unknown output format"
def print_yaml(fname, return_lines=False, no_numbers=False): with open(fname, 'r') as fd: lines = fd.read() out = highlight(lines, YamlLexer(), Terminal256Formatter()) if return_lines: return out if no_numbers: print(out) else: print_lines_numbered(out)
def connections_get(args): """Get a connection.""" try: conn = BaseHook.get_connection(args.conn_id) except AirflowNotFoundException: raise SystemExit("Connection not found.") yaml_content = _yamulate_connection(conn) if should_use_colors(args): yaml_content = pygments.highlight(code=yaml_content, formatter=get_terminal_formatter(), lexer=YamlLexer()) print(yaml_content)
def admin_nat64_data(self, measurement): response = yaml.dump(measurement.nat64_data) # Get the Pygments formatter formatter = HtmlFormatter(style='colorful') # Highlight the data response = highlight(response, YamlLexer(), formatter) # Get the stylesheet style = "<style>" + formatter.get_style_defs() + "</style><br>" # Safe the output return mark_safe(style + response)
def dag_definition(dag_name: str): """Show definition of DAG""" matching_dags = list(Settings.dags_directory.rglob(f'*{dag_name}.yml')) if not matching_dags: print(f'FATAL: No DAGs found matching {dag_name}.yml', file=sys.stderr) sys.exit(-1) elif len(matching_dags) > 1: print( f'FATAL: Expected one matching DAG for {dag_name}.yml. Found {len(matching_dags)}', file=sys.stderr) out = colored(ascii_art_logo, 'cyan') + '\n' + pygments.highlight( code=matching_dags[0].read_text(), lexer=YamlLexer(), formatter=Terminal256Formatter()) pydoc.pager(out) print(matching_dags[0])
def lexer(self): def strip(lex): return [x[2:] for x in lex.filenames] if self.extension in strip(RenpyLexer): return RenpyLexer() elif self.extension in strip(DiffLexer): return DiffLexer() elif self.extension in strip(JsonLexer): return JsonLexer() elif self.extension in strip(YamlLexer): return YamlLexer() elif self.extension in strip(MarkdownLexer): return MarkdownLexer() elif self.extension in strip(XmlLexer): return XmlLexer() else: return NullLexer()
def provider_get(args): """Get a provider info.""" providers = ProvidersManager().providers if args.provider_name in providers: provider_version, provider_info = providers[args.provider_name] print("#") print(f"# Provider: {args.provider_name}") print(f"# Version: {provider_version}") print("#") if args.full: yaml_content = yaml.dump(provider_info) if should_use_colors(args): yaml_content = pygments.highlight( code=yaml_content, formatter=get_terminal_formatter(), lexer=YamlLexer()) print(yaml_content) else: raise SystemExit(f"No such provider installed: {args.provider_name}")
def highlight_syntax( stuff: str, theme: Optional[str], lang: str = JSON ) -> str: """Colorize stuff with syntax highlighting""" if lang == JSON: lexer = JsonLexer() else: lexer = YamlLexer() if theme is None: return stuff elif theme == SHELLECTRIC: pygment_theme = Shellectric else: # theme already validated in preferences loading pygment_theme = get_style_by_name(theme) return cast(str, highlight( stuff, lexer, Terminal256Formatter(style=pygment_theme), ))
def lexer_yaml(): yield YamlLexer()
tests_root = os.path.join(get_repo_path(repo_name), package_name, 'tests', 'machine_files') print("Creating '{}' link to {}".format(link_name, tests_root)) os.symlink(tests_root, link_name) def build_event_references(): sys.path.append(get_repo_path("mpf")) from _doc_tools.build_events_reference_docs import run run(os.path.join(os.getcwd(), "events"), get_repo_path("mpf"), get_repo_path("mpf-mc")) setup_tests_link(mpf_examples, 'mpf', 'mpf') setup_tests_link(mpfmc_examples, 'mpf-mc', 'mpfmc') build_event_references() source_dirs = { os.path.join(os.getcwd(), "mpf_examples"): "/mpf_examples", os.path.join(os.getcwd(), "mpfmc_examples"): "/mpfmc_examples" } examples_root = os.path.join(os.getcwd(), 'examples') b = ExampleBuilder(source_dirs, examples_root) b.build() lexers['mpf-config'] = MpfLexer(startinline=True) lexers['mpf-mc-config'] = MpfLexer(startinline=True) lexers['test'] = YamlLexer(startinline=True)
def pretty_yaml(val: dict, indentation: int = 0) -> str: raw = highlight(yaml.dump(val, sort_keys=False), YamlLexer(), _get_formatter()).strip() return _indent(raw, indentation)
'venv', 'bin', 'README.md', '.git', '.idea', 'CONTRIBUTING.md', 'aio.md', '_includes/aio-script.md' ] source_suffix = ['.rst', '.md'] # -- Options for Pygments ---------------------------------------------------- # TODO: maybe write our own lexer to customize tokens, keywords, etc? lexers['cwl'] = YamlLexer() # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'pydata_sphinx_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] html_logo = '_static/images/logos/cwl/CWL-Logo-HD-cropped2.png' html_favicon = '_static/images/favicons/cwl/favicon.ico'
import sys import os import shlex from sphinx.highlighting import lexers from pygments.lexers.web import PhpLexer from pygments.lexers.data import YamlLexer from pygments.lexers.data import JsonLexer from pygments.lexers.templates import TwigHtmlLexer from pygments.lexers.templates import CheetahHtmlLexer lexers['php'] = PhpLexer(startinline=True, linenos=1) lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1) lexers['yaml'] = YamlLexer() lexers['twig'] = TwigHtmlLexer() lexers['html'] = CheetahHtmlLexer() lexers['json'] = JsonLexer() extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.ifconfig', 'sphinxcontrib.inlinesyntaxhighlight' ] project = u'Polyphonic Polymer Bundle' copyright = u'2015, Sean Hickey' author = u'Sean Hickey' version = '0.0.4' release = '0.0.4' language = "en" templates_path = ['_templates']
def pprint_yaml(obj): print(highlight(obj, YamlLexer(), Terminal256Formatter(style=STYLE)))
def highlight_yaml_example(example_text: str) -> str: """Filter. Return a highlighted YAML version of the provided JSON text""" return highlight(yaml_example(example_text), YamlLexer(), HtmlFormatter())
class MpfLexer(YamlLexer): name = 'mpf-config' def nothing(token_class): """Do not produce empty tokens.""" def callback(lexer, match, context): yield match.start(), token_class, "" context.pos = match.end() return callback tokens = { # the root rules 'root': [ # ignored whitespaces (r'[ ]+(?=#|$)', Text), # line breaks (r'\n+', Text), # doc ignore comment (r'#![^\n]*\n', nothing(Text)), (r'##! (?!mode:|show:)[^\n]*\n', nothing(Text)), # a comment (r'#[^\n]*', Comment.Single), # the '%YAML' directive (r'^%YAML(?=[ ]|$)', YamlLexer.reset_indent(Name.Tag), 'yaml-directive'), # the %TAG directive (r'^%TAG(?=[ ]|$)', YamlLexer.reset_indent(Name.Tag), 'tag-directive'), # document start and document end indicators (r'^(?:---|\.\.\.)(?=[ ]|$)', YamlLexer.reset_indent(Name.Namespace), 'block-line'), # indentation spaces (r'[ ]*(?!\s|$)', YamlLexer.save_indent(Text, start=True), ('block-line', 'indentation')), ], # trailing whitespaces after directives or a block scalar indicator 'ignored-line': [ (r'#[^\n]*', YamlLexer.something(Text)), # ignored whitespaces (r'[ ]+(?=#|$)', Text), # a comment (r'#[^\n]*', Comment.Single), # line break (r'\n', Text, '#pop:2'), ], # the %YAML directive 'yaml-directive': [ # the version number (r'([ ]+)([0-9]+\.[0-9]+)', bygroups(Text, Number), 'ignored-line'), ], # the %YAG directive 'tag-directive': [ # a tag handle and the corresponding prefix (r'([ ]+)(!|![\w-]*!)' r'([ ]+)(!|!?[\w;/?:@&=+$,.!~*\'()\[\]%-]+)', bygroups(Text, Keyword.Type, Text, Keyword.Type), 'ignored-line'), ], # block scalar indicators and indentation spaces 'indentation': [ # trailing whitespaces are ignored (r'[ ]*$', YamlLexer.something(Text), '#pop:2'), # whitespaces preceeding block collection indicators (r'[ ]+(?=[?:-](?:[ ]|$))', YamlLexer.save_indent(Text)), # block collection indicators (r'[?:-](?=[ ]|$)', YamlLexer.set_indent(Punctuation.Indicator)), # the beginning a block line (r'[ ]*', YamlLexer.save_indent(Text), '#pop'), ], # an indented line in the block context 'block-line': [ # the line end (r'[ ]*(?=#|$)', YamlLexer.something(Text), '#pop'), # whitespaces separating tokens (r'[ ]+', Text), # tags, anchors and aliases, include('descriptors'), # block collections and scalars include('block-nodes'), # flow collections and quoted scalars include('flow-nodes'), # a plain scalar (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`-]|[?:-]\S)', YamlLexer.something(Name.Variable), 'plain-scalar-in-block-context'), ], # tags, anchors, aliases 'descriptors': [ # a full-form tag (r'!<[\w#;/?:@&=+$,.!~*\'()\[\]%-]+>', Keyword.Type), # a tag in the form '!', '!suffix' or '!handle!suffix' (r'!(?:[\w-]+!)?' r'[\w#;/?:@&=+$,.!~*\'()\[\]%-]+', Keyword.Type), # an anchor (r'&[\w-]+', Name.Label), # an alias (r'\*[\w-]+', Name.Variable), ], # block collections and scalars 'block-nodes': [ # implicit key (r':(?=[ ]|$)', YamlLexer.set_indent(Punctuation.Indicator, implicit=True)), # literal and folded scalars (r'[|>]', Punctuation.Indicator, ('block-scalar-content', 'block-scalar-header')), ], # flow collections and quoted scalars 'flow-nodes': [ # a flow sequence (r'\[', Punctuation.Indicator, 'flow-sequence'), # a flow mapping (r'\{', Punctuation.Indicator, 'flow-mapping'), # a single-quoted scalar (r'\'', String, 'single-quoted-scalar'), # a double-quoted scalar (r'\"', String, 'double-quoted-scalar'), ], # the content of a flow collection 'flow-collection': [ # whitespaces (r'[ ]+', Text), # line breaks (r'\n+', Text), # a comment (r'#[^\n]*', Comment.Single), # simple indicators (r'[?:,]', Punctuation.Indicator), # tags, anchors and aliases include('descriptors'), # nested collections and quoted scalars include('flow-nodes'), # a plain scalar (r'(?=[^\s?:,\[\]{}#&*!|>\'"%@`])', YamlLexer.something(Name.Variable), 'plain-scalar-in-flow-context'), ], # a flow sequence indicated by '[' and ']' 'flow-sequence': [ # include flow collection rules include('flow-collection'), # the closing indicator (r'\]', Punctuation.Indicator, '#pop'), ], # a flow mapping indicated by '{' and '}' 'flow-mapping': [ # include flow collection rules include('flow-collection'), # the closing indicator (r'\}', Punctuation.Indicator, '#pop'), ], # block scalar lines 'block-scalar-content': [ # line break (r'\n', Text), # empty line (r'^[ ]+$', YamlLexer.parse_block_scalar_empty_line(Text, Name.Constant)), # indentation spaces (we may leave the state here) (r'^[ ]*', YamlLexer.parse_block_scalar_indent(Text)), # line content (r'[\S\t ]+', Name.Constant), ], # the content of a literal or folded scalar 'block-scalar-header': [ # indentation indicator followed by chomping flag (r'([1-9])?[+-]?(?=[ ]|$)', YamlLexer.set_block_scalar_indent(Punctuation.Indicator), 'ignored-line'), # chomping flag followed by indentation indicator (r'[+-]?([1-9])?(?=[ ]|$)', YamlLexer.set_block_scalar_indent(Punctuation.Indicator), 'ignored-line'), ], # ignored and regular whitespaces in quoted scalars 'quoted-scalar-whitespaces': [ # leading and trailing whitespaces are ignored (r'^[ ]+', Text), (r'[ ]+$', Text), # line breaks are ignored (r'\n+', Text), # other whitespaces are a part of the value (r'[ ]+', Name.Variable), ], # single-quoted scalars 'single-quoted-scalar': [ # include whitespace and line break rules include('quoted-scalar-whitespaces'), # escaping of the quote character (r'\'\'', String.Escape), # regular non-whitespace characters (r'[^\s\']+', String), # the closing quote (r'\'', String, '#pop'), ], # double-quoted scalars 'double-quoted-scalar': [ # include whitespace and line break rules include('quoted-scalar-whitespaces'), # escaping of special characters (r'\\[0abt\tn\nvfre "\\N_LP]', String), # escape codes (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})', String.Escape), # regular non-whitespace characters (r'[^\s"\\]+', String), # the closing quote (r'"', String, '#pop'), ], # the beginning of a new line while scanning a plain scalar 'plain-scalar-in-block-context-new-line': [ # empty lines (r'^[ ]+$', Text), # line breaks (r'\n+', Text), # document start and document end indicators (r'^(?=---|\.\.\.)', YamlLexer.something(Name.Namespace), '#pop:3'), # indentation spaces (we may leave the block line state here) (r'^[ ]*', YamlLexer.parse_plain_scalar_indent(Text), '#pop'), ], # a plain scalar in the block context 'plain-scalar-in-block-context': [ # the scalar ends with the ':' indicator (r'[ ]*(?=:[ ]|:$)', YamlLexer.something(Text), '#pop'), # the scalar ends with whitespaces followed by a comment (r'[ ]+(?=#)', Text, '#pop'), # trailing whitespaces are ignored (r'[ ]+$', Text), # line breaks are ignored (r'\n+', Text, 'plain-scalar-in-block-context-new-line'), # other whitespaces are a part of the value (r'[ ]+', Literal.Scalar.Plain), # regular non-whitespace characters (r'(?::(?!\s)|[^\s:])+', Literal.Scalar.Plain), ], # a plain scalar is the flow context 'plain-scalar-in-flow-context': [ # the scalar ends with an indicator character (r'[ ]*(?=[,:?\[\]{}])', YamlLexer.something(Text), '#pop'), # the scalar ends with a comment (r'[ ]+(?=#)', Text, '#pop'), # leading and trailing whitespaces are ignored (r'^[ ]+', Text), (r'[ ]+$', Text), # line breaks are ignored (r'\n+', Text), # other whitespaces are a part of the value (r'[ ]+', Name.Variable), # regular non-whitespace characters (r'[^\s,:?\[\]{}]+', Name.Variable), ], }
def pretty_yaml(val: dict, indentation: int = 0) -> str: raw = highlight(yaml.dump(val), YamlLexer(), TerminalTrueColorFormatter()).strip() return indent(raw, indentation)
def print_formatted_yaml_text(yaml_text: str) -> None: """ """ tokens = list(lex(yaml_text, lexer=YamlLexer())) print_formatted_text(PygmentsTokens(tokens), style=prompt_style)