Example #1
0
    def __export_api_call(self, api_call):
        last_exchange = app_settings.app_data_cache.get_last_exchange(api_call.id)
        doc = f"""# {api_call.title}
# 
{to_slow_cooker(api_call, last_exchange)}
"""
        return highlight(doc, BashLexer(), HtmlFormatter())
Example #2
0
def main(zipfile, source):
    '''(str, str) Accepts 2 file paths
    Returns nothing
    For each version of source, store code and output on disk in HTML format
    '''
    zipfile = zipfile.replace('.zip', '')
    zipdir = pathjoin(ziproot, basename(zipfile))
    savedir = pathjoin(saveroot, basename(zipfile))

    if not isdir(zipdir):
        makedirs(zipdir)
    if not isdir(savedir):
        makedirs(savedir)

    run([unzip_cmd, '-u', '-o', '-q', '-d', zipdir, zipfile])  # unzip automatically adds extension
#    run(['chmod', '-R', '777', zipdir])

    previous = None
    for version in sorted(listdir(zipdir)):
        original = pathjoin(zipdir, version)
        saves = pathjoin(savedir, version)

        # overwrite existing
        if isdir(saves):
            rmtree(saves)
        makedirs(saves)

        # not required
        compile_output = ''
        if pathexists(pathjoin(original, compile_snap)):
            try:
                compile_output = run([sh_cmd, compile_snap],
                        stdout=PIPE, cwd=original).stdout.decode()
            except FileNotFoundError:
                pass  # Shouldn't happen but just in case

        output = ''
        if pathexists(pathjoin(original, run_snap)):
            try:
                output = run([sh_cmd, run_snap],
                        stdout=PIPE, cwd=original).stdout.decode()
                # output = '\n'.join(i.decode() for i in output)
            except FileNotFoundError:
                pass  # Shouldn't happen but just in case
            except OSError as e:
                output = 'OSError in snap %s\n%s' % (version, e)

        pygments.highlight(output, BashLexer(), HtmlFormatter(),
                open(pathjoin(saves, 'output.html'), 'x'))
        code = ''.join(open(pathjoin(original, source)).readlines())
        code = pygments.highlight(code, get_lexer_for_filename(source),
                HtmlFormatter(), open(pathjoin(saves, 'code.html'), 'x'))

        # add bolding
        if previous is not None:
            diff_result = file_diff(pathjoin(previous, source), pathjoin(original, source))
            add_strongs(diff_result, pathjoin(saves, 'code.html'))

        previous = original
Example #3
0
    def createLexers(self):

        lex = {}
        lex['.c'] = CFamilyLexer()
        lex['.h'] = CFamilyLexer()
        lex['.cpp'] = CppLexer()
        lex['.hpp'] = CppLexer()
        lex['.css'] = CssLexer()
        lex['.sass'] = SassLexer()
        lex['.yaml'] = YamlLexer()
        lex['.yml'] = YamlLexer()
        lex['.json'] = JsonLexer()
        lex['.cs'] = CSharpLexer()
        lex['.fs'] = FSharpLexer()
        lex['.e'] = EiffelLexer()
        lex['.erl'] = ErlangLexer()
        lex['.hrl'] = ErlangLexer()
        lex['.es'] = ErlangLexer()
        lex['.f03'] = FortranLexer()
        lex['.f90'] = FortranLexer()
        lex['.F03'] = FortranLexer()
        lex['.F90'] = FortranLexer()
        lex['.go'] = GoLexer()
        lex['.hs'] = HaskellLexer()
        lex['.v'] = VerilogLexer()
        lex['.vhdl'] = VhdlLexer()
        lex['.vhd'] = VhdlLexer()
        lex['.html'] = HtmlLexer()
        lex['.htm'] = HtmlLexer()
        lex['.xhtml'] = HtmlLexer()
        lex['.xml'] = XmlLexer()
        lex['.js'] = JavascriptLexer()
        lex['.tex'] = TypeScriptLexer()
        lex['.coffee'] = CoffeeScriptLexer()
        lex['.java'] = JavaLexer()
        lex['.scala'] = ScalaLexer()
        lex['.kt'] = KotlinLexer()
        lex['.ktm'] = KotlinLexer()
        lex['.kts'] = KotlinLexer()
        lex['.lisp'] = CommonLispLexer()
        lex['make'] = MakefileLexer()
        lex['Make'] = MakefileLexer()
        lex['CMake'] = CMakeLexer()
        lex['cmake'] = CMakeLexer()
        lex['.m'] = MatlabLexer()
        lex['.mat'] = MatlabLexer()
        lex['.dpr'] = DelphiLexer()
        lex['.perl'] = PerlLexer()
        lex['.php'] = PhpLexer()
        lex['.pr'] = PrologLexer()
        lex['.py'] = Python3Lexer()
        lex['.rb'] = RubyLexer()
        lex['.sh'] = BashLexer()
        lex['.sql'] = MySqlLexer()
        lex['.mysql'] = MySqlLexer()
        lex['.tcl'] = TclLexer()
        lex['.awk'] = AwkLexer()

        return lex
Example #4
0
 def get_tokens_unprocessed(self, text: str) -> (int, Token, str):
     for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
         if token is Text and value in self.EXTRA_KEYWORDS:
             yield index, Name.Builtin, value
         elif token is Text and re.search(self.ADDRESS, value):
             yield index, Keyword, value
         else:
             yield index, token, value
Example #5
0
def find_lexer(file: Path, content: str):
    try:
        if file.name in ('.gitignore', '.dockerignore'):
            lexer = BashLexer()
        else:
            lexer = lexers.guess_lexer_for_filename(str(file),
                                                    content,
                                                    stripnl=False,
                                                    ensurenl=False)
        logger.debug('Detected lexer by filename')
    except pygments.util.ClassNotFound:
        try:
            lexer = lexers.guess_lexer(content, stripnl=False, ensurenl=False)
            logger.debug('Detected lexer by file content')
        except pygments.util.ClassNotFound:
            lexer = BashLexer(stripnl=False, ensurenl=False)
            logger.debug('Using fallback lexer')
    return lexer
Example #6
0
def humanize(cheat):
    """
    Renders a human readable cheat.
    """
    description = cheat["description"]
    command = cheat["command"]

    print(description)
    print()
    print("\t" + highlight(command, BashLexer(), TerminalFormatter()))
    print()
Example #7
0
def upload_file_remote(dest_file: Path, ext: str, file_path: Path, zip_flag,
                       plain, in_file, ppb_path_on_host, ppb_target_host,
                       metadata: dict) -> None:
    """
    dest_file: destination filename + extension (example: 77a80349869f.png)
    ext: file extension, like dest_file.suffix
    file_path: path to input file.
    """
    with tempfile.TemporaryDirectory() as tmpdirname:
        send_metadata(dest_file, metadata, ppb_path_on_host, ppb_target_host,
                      tmpdirname)
        tempfile_path = os.path.join(tmpdirname, in_file)
        if zip_flag:

            zip_command = f'zip -r {tempfile_path} {file_path}'
            log.info(zip_command)
            os.system(zip_command)
            file_path = tempfile_path

        elif not plain and not check_extension(
                ext, {
                    *IMAGE_EXTENSIONS, *VIDEO_EXTENSIONS,
                    *COMPRESSION_EXTENSIONS, '', 'pdf'
                }):
            with open(file_path, mode='r') as c:
                code = c.read()
            with open(tempfile_path, mode='w') as f:
                if ext == '.log':
                    lexer = BashLexer()
                else:
                    try:
                        lexer = guess_lexer_for_filename(file_path, c)
                    except Exception as e:
                        print(e)
                try:
                    highlight(code,
                              lexer,
                              HtmlFormatter(linenos=True, full=True),
                              outfile=f)
                except:
                    # if pygments fails to find a lexer, continue without highlighting
                    tempfile_path = file_path
                    lexer = None
            file_path = tempfile_path
            log.info(f'Highlighted {file_path} with lexer {lexer}.')

        rsync_command = f'rsync -avP {file_path} {ppb_target_host}:{ppb_path_on_host}/{dest_file}'
        log.info(rsync_command)
        os.system(rsync_command)
Example #8
0
def execute_files(file_dir):
    files = []
    for file in os.listdir(os.path.join(app.config['UPLOAD_FOLDER'],
                                        file_dir)):
        filepath = os.path.join(app.config['UPLOAD_FOLDER'], file_dir, file)
        with open(filepath, "r") as py:
            code = Markup(highlight(py.read(), Python3Lexer(),
                                    HtmlFormatter()))
        outputs = []

        for test in TEST_CASES[file]:
            out = Markup(
                highlight(run_file(filepath, test), BashLexer(),
                          HtmlFormatter()))
            outputs.append(out)

        files.append((file, code, outputs))

    return files
Example #9
0
def run():
    arguments = docopt(__doc__, version=VERSION)

    f = Clf(format="json",
            order=arguments['--order'],
            proxy=arguments['--proxy'])

    if arguments['--browse']:
        commands = f.browse()
    elif arguments['<command>']:
        commands = f.command(arguments['<command>'])
    elif arguments['<keyword>']:
        commands = f.search(arguments['<keyword>'])

    for command in commands:
        if (arguments['--color']) or (os.getenv('CLF_COLOR')):
            output = '{}# {}{}\n'.format(BLUE, command.summary, END)
            output += highlight(command.command, BashLexer(),
                                TerminalFormatter(bg="dark"))
        else:
            output = '# {}\n'.format(command.summary)
            output += command.command + "\n"

        print(output)
Example #10
0
# -- General configuration ------------------------------------------------

# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
    'sphinx.ext.todo',
]

# Add Pygments lexers
lexers['JSON'] = JsonLexer()
lexers['Bash'] = BashLexer()
lexers['Javascript'] = JavascriptLexer()

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'

# The encoding of source files.
#source_encoding = 'utf-8-sig'

# The master toctree document.
master_doc = 'index'
Example #11
0
dust_output_format = "Written on {written_on}, proofread on {proofread_on}."
dust_datetime_format = "%Y-%m-%d"
dust_node_classes = ['reviewed']

# number configurations
numfig = True
numfig_format = {
    'figure': '<b>Fig. %s:</b>',
    'code-block': '<b>Example %s:</b>',
    'table': '<b>Table %s:</b>',
    'section': '<b>§%s:</b>',
}

# languages
highlight_language = 'none'
lexers['bash'] = BashLexer()
lexers['console'] = BashLexer()
lexers['hcl'] = TerraformLexer()
lexers['javascript'] = JavascriptLexer()
lexers['json'] = JavascriptLexer()
lexers['php'] = PhpLexer(startinline=True, funcnamehighlighting=True)
lexers['php-annotations'] = PhpLexer(startinline=True,
                                     funcnamehighlighting=True)
lexers['python'] = PythonLexer()

#templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'SimplePie NG'
copyright = u'2017 Ryan Parman'
version = '2.0'
Example #12
0
 def get_output(command):
     detail = highlight(command.command,
                        BashLexer(), TerminalFormatter(bg="dark"))
     return '{}#{} {}{}\n{}'.format(BLUE, sid(command),
                                    command.summary, END, detail)
Example #13
0
        pass

    if extens == "py" or extens == "pyw" or extens == "sc" or extens == "sage" or extens == "tac":
        ui_core = CoreUI(lexer=PythonLexer())
    elif extens == "txt" or extens == "README" or extens == "text":
        ui_core = CoreUI(lexer=TextLexer())
    elif extens == "htm" or extens == "html" or extens == "css" or extens == "js" or extens == "md":
        ui_core = CoreUI(lexer=HtmlLexer())
    elif extens == "xml" or extens == "xsl" or extens == "rss" or extens == "xslt" or extens == "xsd" or extens == "wsdl" or extens == "wsf":
        ui_core = CoreUI(lexer=XmlLexer())
    elif extens == "php" or extens == "php5":
        ui_core = CoreUI(lexer=HtmlPhpLexer())
    elif extens == "pl" or extens == "pm" or extens == "nqp" or extens == "p6" or extens == "6pl" or extens == "p6l" or extens == "pl6" or extens == "pm" or extens == "p6m" or extens == "pm6" or extens == "t":
        ui_core = CoreUI(lexer=Perl6Lexer())
    elif extens == "rb" or extens == "rbw" or extens == "rake" or extens == "rbx" or extens == "duby" or extens == "gemspec":
        ui_core = CoreUI(lexer=RubyLexer())
    elif extens == "ini" or extens == "init":
        ui_core = CoreUI(lexer=IniLexer())
    elif extens == "conf" or extens == "cnf" or extens == "config":
        ui_core = CoreUI(lexer=ApacheConfLexer())
    elif extens == "sh" or extens == "cmd" or extens == "bashrc" or extens == "bash_profile":
        ui_core = CoreUI(lexer=BashLexer())
    elif extens == "diff" or extens == "patch":
        ui_core = CoreUI(lexer=DiffLexer())
    elif extens == "cs":
        ui_core = CoreUI(lexer=CSharpLexer())
    elif extens == "sql":
        ui_core = CoreUI(lexer=MySqlLexer())
    else:
        ui_core = CoreUI(lexer=PythonLexer())  # default (no extension) lexer is python
    ui_core.mainloop()
Example #14
0
    'p6': Perl6Lexer(),
    '6pl': Perl6Lexer(),
    'p6l': Perl6Lexer(),
    'pl6': Perl6Lexer(),
    'p6m': Perl6Lexer(),
    'pm6': Perl6Lexer(),
    't': Perl6Lexer(),
    'rb': RubyLexer(),
    'rbw': RubyLexer(),
    'rake': RubyLexer(),
    'rbx': RubyLexer(),
    'duby': RubyLexer(),
    'gemspec': RubyLexer(),
    'ini': IniLexer(),
    'init': IniLexer(),
    'sh': BashLexer(),
    'diff': DiffLexer(),
    'patch': DiffLexer(),
    'cs': CSharpLexer(),
    'md': MarkdownLexer(),  # WAIT: Virker dårlig
}

known_extensions = list(lexer_from_ext.keys())

# class EditorFrame(ScrollableNotebook):


class EditorFrame(tk.ttk.Frame):
    """ A container for the notebook, including bottom console """
    def __init__(
        self,
Example #15
0
#latex_show_urls = False

# Additional stuff for the LaTeX preamble.
#latex_preamble = ''

# Documents to append as an appendix to all manuals.
#latex_appendices = []

# If false, no module index is generated.
#latex_domain_indices = True

# -- Options for manual page output --------------------------------------------

# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', 'vulcandrestapi', u'Vulcand REST API Documentation',
              [u'Vulcand Inc'], 1)]

# Normally, the Pygments PHP lexer requires PHP code to be between '<?php' '?>'
# delimiters. The hack below allows highlighting for PHP code which is not
# between those tags.
from sphinx.highlighting import lexers
from pygments.lexers import PhpLexer, TextLexer
from pygments.lexers.shell import BashLexer

lexers['php'] = PhpLexer(startinline=True)
lexers['url'] = TextLexer()
lexers['etcd'] = BashLexer()
lexers['api'] = BashLexer()
lexers['cli'] = BashLexer()
Example #16
0
from pygments.lexers.web import PhpLexer
from pygments.lexers.web import HtmlLexer
from pygments.lexers.shell import BashLexer

# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
    'sensio.sphinx.refinclude', 'sensio.sphinx.configurationblock',
    'sensio.sphinx.phpcode', 'sphinx.ext.autodoc', 'sphinx.ext.doctest',
    'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig',
    'sphinxcontrib.youtube'
]
lexers['php'] = PhpLexer(startinline=True)
lexers['php-annotations'] = PhpLexer(startinline=True)
lexers['html'] = HtmlLexer()
lexers['bash'] = BashLexer()

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

# The suffix of source filenames.
source_suffix = '.rst'

# The encoding of source files.
#source_encoding = 'utf-8-sig'

# The master toctree document.
master_doc = 'index'

# General information about the project.
project = u'Akeneo PIM'
Example #17
0
# Re-write index.html so that the code block comments are replaced by the
# Pygment-ized code.
blocks = get_blocks()
with codecs.open('index-src.html', 'r', 'utf-8') as fi:
    with codecs.open('index.html', 'w', 'utf-8') as fo:
        for line in fi:
            line_els = line.strip().split()
            if len(line_els) > 1 and line_els[1] == 'code-block:':
                block_name = line_els[2]
                if block_name in blocks:
                    block = blocks[block_name]['block']
                    lang = blocks[block_name]['lang']
                    if lang == 'python':
                        lexer = PythonLexer()
                        formatter = pyformatter
                    elif lang == 'shell':
                        lexer = BashLexer()
                        formatter = shformatter
                    elif lang == 'sql':
                        lexer = MySqlLexer()
                        formatter = sqlformatter
                    elif lang == 'ini':
                        lexer = IniLexer()
                        formatter = iniformatter
                    else:
                        lexer = JsonLexer()
                        formatter = jsonformatter
                    fo.write(highlight(block, lexer, formatter))
            else:
                fo.write(line)
Example #18
0
# -- Settings for symfony doc extension ---------------------------------------------------

# enable highlighting for PHP code not between ``<?php ... ?>`` by default
lexers['markdown'] = TextLexer()
lexers['php'] = PhpLexer(startinline=True)
lexers['php-annotations'] = PhpLexer(startinline=True)
lexers['php-attributes'] = PhpLexer(startinline=True)
lexers['php-standalone'] = PhpLexer(startinline=True)
lexers['php-symfony'] = PhpLexer(startinline=True)
lexers['rst'] = RstLexer()
lexers['varnish2'] = CLexer()
lexers['varnish3'] = CLexer()
lexers['varnish4'] = CLexer()
lexers['terminal'] = TerminalLexer()
lexers['env'] = BashLexer()

config_block = {
    'apache': 'Apache',
    'markdown': 'Markdown',
    'nginx': 'Nginx',
    'rst': 'reStructuredText',
    'varnish2': 'Varnish 2',
    'varnish3': 'Varnish 3',
    'varnish4': 'Varnish 4',
    'env': '.env'
}

# don't enable Sphinx Domains
primary_domain = None
Example #19
0
    tokens = {
        'root': [
            (r'^(.*)(-- .*)$', bygroups(token.Text, token.Comment)),
            (r'{-(?:.|\n)*?-}', token.Comment),
            (r"'[^']+'", token.String.Char),
            (r'"[^"]+"', token.String.Char),
            # / is a \breaker and this avoid prelude url to be highlighted
            (r'(http[:a-zA-Z/\.-]+)', token.Text),
            (r'(\'\'(?:.|\n)*?\'\')', token.String.Char),
            (r'\b(\+\d+|-\d+|\d+)', token.Number.Integer),
            (r'\b(None|Some|Bool|Natural|Integer|Double|Text|Type|List|Optional)\b',
             token.Keyword.Type),
            (r'\b(%s)\b' % DhallKeywords, token.Keyword),
            (r'(%s)' % DhallWords, token.Operator.Word),
            (r'\b(True|False)\b', token.Name.Builtin.Pseudo),
            (r'-- .*$', token.Comment),
            (r',', token.Punctuation),
            (r'.', token.Text),
        ]
    }


lexers['dhall'] = DhallLexer(startinline=True)

# Alias for compat with github syntax highligher name
from pygments.lexers.shell import BashLexer
from pygments.lexers.haskell import HaskellLexer
lexers['bash'] = BashLexer(startinline=True)
lexers['haskell'] = HaskellLexer(startinline=True)