Пример #1
0
    def inner():
        assert re.match(r'^[a-zA-Z._-]+$', script)
        exec_path = "scripts/" + script + ".py"
        cmd = ["python3", "-u", exec_path]  # -u: don't buffer output

        error = False

        proc = subprocess.Popen(
            cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
        )

        for line in proc.stdout:
            yield highlight(line, BashLexer(), HtmlFormatter())

        # Maybe there is more stdout after an error...
        for line in proc.stderr:
            error = True
            yield highlight(line, BashLexer(), HtmlFormatter())

        if error:
            yield "<script>parent.stream_error()</script>"
        else:
            yield "<script>parent.stream_success()</script>"
Пример #2
0
def bash(txt):
    from pygments import highlight
    from pygments.lexers import BashLexer
    from pygments.formatters import HtmlFormatter

    fmtd = highlight("$ " + txt, BashLexer(), HtmlFormatter())
    return "%s\n" % fmtd
Пример #3
0
def serverstatus(request):
    """Server Status Page"""
    out_dict = {}
    out_dict = {}
    out_dict["headLinks"] = homepage.genHeadUrls(request)
    out_dict["sideLinks"] = homepage.genSideUrls(request)
    the_user = homepage.getUser(request)
    out_dict["user"] = the_user.username

    out_dict["pgTitle"] = "Server Status"

    #I want to check the server status
    out = subprocess.Popen(["service", "ch-sf", "status"],
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE).communicate()

    #If running
    running = "start" in out[0]
    if running:
        out_dict["chsf"] = 0
    else:
        if out[1]:
            out_dict["chsf"] = out[1]
        else:
            out_dict["chsf"] = out[0]

    #And the Base Server

    out = subprocess.Popen(["service", "ch-base", "status"],
                           stdout=subprocess.PIPE,
                           stderr=subprocess.PIPE).communicate()

    LOG.debug(out)
    # If running
    running = "start" in out[0]
    LOG.debug("base running{0}".format(running))
    if running:
        out_dict["chbase"] = 0
    else:
        if out[1]:
            out_dict["chbase"] = out[1]
        else:
            out_dict["chbase"] = out[0]


    try:
        #check_output only in 2.7
        out = subprocess.Popen(["tail", "/var/log/ch/BaseLogger.log"],
                               stdout=subprocess.PIPE).communicate()[0]
        out_dict["logpass"] = True
        out_dict["logtail"] = highlight(out, BashLexer(), HtmlFormatter())
    except subprocess.CalledProcessError:
        out_dict["logpass"] = False

    out = subprocess.call(["ping", "cogentee.coventry.ac.uk", "-c", "4"])
    out_dict["ping"] = out

    out_dict["ifconfig"] = "None"

    return out_dict
Пример #4
0
 def load(self):
     with open(self.name, 'r', encoding="utf-8") as f:
         data = f.read()
     data = data.expandtabs(4)
     self.raw_lines = data.splitlines()
     self.lines = highlight(data, BashLexer(),
                            TerminalFormatter(bg="dark")).splitlines()
Пример #5
0
class BashTest(unittest.TestCase):

    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #6
0
def syntax_highlight_file(input_filename,
                          to_stdout=False,
                          bg='light',
                          colors_file=None,
                          style=None):
    if to_stdout:
        outfile = sys.stdout
        out_filename = None
    else:
        basename = os.path.basename(input_filename)
        out_filename = mktemp('.term', basename + '_')
        try:
            outfile = open(out_filename, 'w')
        except:
            print("Unexpected error in opening output file %s" % out_filename)
            sys.exit(1)
            pass
        pass

    if input_filename:
        if not os.path.exists(input_filename):
            sys.stderr.write("input file %s doesn't exist\n" % input_filename)
            sys.exit(2)
        try:
            infile = open(input_filename)
        except:
            print("Unexpected error in opening input file %s" % input_filename)
            sys.exit(2)
            pass
        pass
    else:
        infile = sys.stdin
        pass

    if style:
        formatter = Terminal256Formatter(bg=bg, style=style)
    else:
        formatter = TerminalFormatter(bg=bg)
        formatter.colorscheme = TERMINAL_COLORS

    if colors_file is not None and os.path.isfile(colors_file):
        try:
            with open(colors_file) as f:
                code = compile(f.read(), colors_file, 'exec')
                exec(code)
        except:
            sys.exit(10)
            pass
        pass

    for code_line in infile.readlines():
        line = highlight(code_line, BashLexer(), formatter).strip("\r\n")
        outfile.write(line + "\n")
        # print line,
        pass
    outfile.close
    if out_filename:
        print(out_filename)
    sys.exit(0)
Пример #7
0
class BashTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
    def inner():
        command = ['sh', 'apk_installer_from_git.sh', git_url]
        subprocess.call(command)
        proc = subprocess.Popen(
            command,
            stdout=subprocess.PIPE,
        )

        for line in io.TextIOWrapper(proc.stdout, encoding="utf-8"):
            yield highlight(line, BashLexer(), HtmlFormatter())
Пример #9
0
    def publish_build_output(self):
        if not self.container:
            logger.error('Unable to publish build output. (Container is None)')
            return

        output = doc.logs(container=self.container, stream=True, follow=True)
        nodup = CustomSet()
        content = []

        for line in output:
            line = line.decode('UTF-8').rstrip()
            if not line or 'makepkg]# PS1="' in line:
                continue
            end = line[25:]
            if nodup.add(end):
                line = line.replace("'", '')
                line = line.replace('"', '')
                line = '[{0}]: {1}'.format(datetime.now().strftime("%m/%d/%Y %I:%M%p"), line)

                content.append(line)
                self.db.publish(self.live_output_key, line)
                self.db.setex(self.last_line_key, 1800, line)

        result_ready = self.completed != self.failed

        if not result_ready:
            waiting = 0

            while not result_ready:
                waiting += 5
                result_ready = self.completed != self.failed

                gevent.sleep(5)

                if waiting > 300:
                    logger.error('timed out will waiting for this build\'s final status')
                    self.failed = True
                    break

        if self.failed:
            self.db.publish(self.live_output_key, 'ENDOFLOG')

        if len(content) > 9001:
            content = content[:3000] + content[-3000:]
        elif len(content) > 6001:
            content = content[:1500] + content[-1500:]

        for line in content:
            self.log.rpush(line)

        log_content = '\n '.join(self.log)
        self.log_str = highlight(log_content, BashLexer(),
                                 HtmlFormatter(style='monokai', linenos='inline',
                                               prestyles="background:#272822;color:#fff;"))
Пример #10
0
def highlight_bash(data=None, file=None):
    """For use inside an IPython notebook: given a filename, print the source code. Bash version."""

    from pygments import highlight
    from pygments.lexers import BashLexer
    from pygments.formatters import HtmlFormatter
    from IPython.core.display import HTML

    if file:
        with open (file, "r") as myfile:
            data = myfile.read()

    return HTML(highlight(data, BashLexer(), HtmlFormatter(full=True)))
Пример #11
0
 def install_script(self, instance: MinecraftServerConfig) -> str:
     "Get the install script HTML formatted for display"
     html_formatter = HtmlFormatter(cssclass="install-script")
     return mark_safe(
         "".join([
             highlight(
                 instance.get_install_script(),
                 BashLexer(),
                 html_formatter
             ),
             "<style type=\"text/css\">",
             ".install-script { padding: 5px 10px; border-radius: 4px; }",
             html_formatter.get_style_defs(),
             "</style>",
         ])
     )
Пример #12
0
def colorize(sheet_content):
    """ Colorizes cheatsheet content if so configured """

    # only colorize if so configured
    if not 'CHEATCOLORS' in os.environ:
        return sheet_content

    try:
        from pygments import highlight
        from pygments.lexers import BashLexer
        from pygments.formatters import TerminalFormatter

    # if pygments can't load, just return the uncolorized text
    except ImportError:
        return sheet_content

    return highlight(sheet_content, BashLexer(), TerminalFormatter())
Пример #13
0
    def show(self, alias):
        """Print alias and alias command definition to stdout."""
        cmd_string = self.alias_dict[alias]
        if '\n' in cmd_string:
            show_output = '{0}() {{\n\t{1}\n}}'.format(
                alias, cmd_string.replace('\n', '\n\t'))
        else:
            show_output = '{0}() {{ {1}; }}'.format(alias, cmd_string)

        if self.color:
            log.logger.debug('Showing colorized output.')
            final_output = highlight(show_output, BashLexer(),
                                     TerminalFormatter()).strip()
        else:
            log.logger.debug('Showing normal output.')
            final_output = show_output

        print(final_output)
Пример #14
0
def bash_to_html(snippet: str) -> str:
    output = highlight(snippet, BashLexer(), HtmlFormatter(nowrap=True))
    # From https://github.com/richleland/pygments-css/blob/master/default.css
    output = output.replace('class="ch"', 'class="comment-hashbang"')
    output = output.replace('class="c1"', 'class="comment-single"')
    output = output.replace('class="k"', 'class="keyword"')
    output = output.replace('class="m"', 'class="literal-number"')
    output = output.replace('class="nb"', 'class="name-builtin"')
    output = output.replace('class="nv"', 'class="name-variable"')
    output = output.replace('class="o"', 'class="operator"')
    output = output.replace('class="s2"', 'class="literal-string-double"')
    output = output.replace('class="si"', 'class="literal-string-interpol"')

    # FIXME: Simulate docker as built-in
    for word in ["docker", "wait_for_url", "sleep"]:
        output = output.replace(word,
                                f'<span class="name-builtin">{word}</span>')
    return output
Пример #15
0
def inchlib_clust(req):
    code = """
import inchlib_clust

#instantiate the Cluster object
c = inchlib_clust.Cluster()

# read csv data file with specified delimiter, also specify whether there is a header row, the type of the data (numeric/binary) and the string representation of missing/unknown values
c.read_csv(filename="/path/to/file.csv", delimiter=",", header=bool, missing_value=str/False, datatype="numeric/binary")
# c.read_data(data, header=bool, missing_value=str/False, datatype="numeric/binary") use read_data() for list of lists instead of a data file

# normalize data to (0,1) scale, but after clustering write the original data to the heatmap
c.normalize_data(feature_range=(0,1), write_original=bool)

# cluster data according to the parameters
c.cluster_data(row_distance="euclidean", row_linkage="single", axis="row", column_distance="euclidean", column_linkage="ward")

# instantiate the Dendrogram class with the Cluster instance as an input
d = inchlib_clust.Dendrogram(c)

# create the cluster heatmap representation and define whether you want to compress the data by defining the maximum number of heatmap rows, the resulted value of compressed (merged) rows and whether you want to write the features
d.create_cluster_heatmap(compress=int, compressed_value="median", write_data=bool)

# read metadata file with specified delimiter, also specify whether there is a header row
d.add_metadata_from_file(metadata_file="/path/to/file.csv", delimiter=",", header=bool, metadata_compressed_value="frequency")

# read column metadata file with specified delimiter, also specify whether there is a 'header' column
d.add_column_metadata_from_file(column_metadata_file="/path/to/file.csv", delimiter=",", header=bool)

# export the cluster heatmap on the standard output or to the file if filename specified
d.export_cluster_heatmap_as_json("filename.json")
#d.export_cluster_heatmap_as_html("/path/to/directory") function exports simple HTML page with embedded cluster heatmap and dependencies to given directory 
"""

    bash = "python inchlib_clust.py input_file.csv -m metadata.csv -cm column_metadata.csv -dh -mh -cmh -d euclidean -l ward -a both -dd , -md , -cmd ,"

    code = highlight(code, PythonLexer(), HtmlFormatter())
    bash = highlight(bash, BashLexer(), HtmlFormatter())

    return render_to_response("inchlib_clust.html", {
        "code": code,
        "bash": bash
    })
Пример #16
0
    def show(self, funk):
        """Print funk and funk command definition to stdout."""
        cmd_string = self.funk_dict[funk]
        if '\n' in cmd_string:
            show_output = '{0}() {{\n\t{1}\n}}'.format(funk, cmd_string.replace('\n', '\n\t'))
        else:
            show_output = '{0}() {{ {1}; }}'.format(funk, cmd_string)

        if self.verbose:
            unalias_out = 'unalias {} &> /dev/null\n'.format(funk)

            show_output = ''.join([unalias_out, show_output])

        if self.color:
            final_output = highlight(show_output, BashLexer(), TerminalFormatter()).strip()
        else:
            final_output = show_output

        print(final_output)
Пример #17
0
def db_filter_and_add(output=None, this_log=None):
    if output is None or this_log is None:
        return
    nodup = set()
    part2 = None
    filtered = []
    for line in output:
        if not line or line == '':
            continue
        line = line.rstrip()
        end = line[20:]
        if end not in nodup:
            nodup.add(end)
            line = line.replace("can't", "can not")
            bad_date = re.search(r"\d{4}-.+Z(?=\s)", line)
            if bad_date:
                logger.info('The bad_date is %s' % bad_date)
                py_date = parser.parse(bad_date.group(0))
                logger.info('The py_date is %s' % py_date)
                good_date = py_date.strftime("%m/%d/%Y %I:%M%p")
                line = line.replace(bad_date.group(0), good_date)
            if len(line) > 210:
                part1 = line[:210]
                part2 = line[211:]
                filtered.append(part1)
                #db.rpush('%s:content' % this_log, part1)
                continue
            elif part2:
                #db.rpush('%s:content' % this_log, part2)
                filtered.append(part2)
                part2 = None
                continue
            else:
                filtered.append(line)
    filtered_string = '\n '.join(filtered)
    #db.rpush('%s:content' % this_log, line)
    pretty = highlight(
        filtered_string, BashLexer(),
        HtmlFormatter(style='monokai',
                      linenos='inline',
                      prestyles="background:#272822;color:#fff;"))
    db.set('%s:content' % this_log, pretty)
Пример #18
0
    def show(self, funk: str) -> None:
        """Print funk and funk command definition to stdout."""
        cmd_string = self.funk_dict[funk]
        if "\n" in cmd_string:
            show_output = "{0}() {{\n\t{1}\n}}".format(
                funk, cmd_string.replace("\n", "\n\t"))
        else:
            show_output = "{0}() {{ {1}; }}".format(funk, cmd_string)

        if self.verbose:
            unalias_out = "unalias {} &> /dev/null\n".format(funk)

            show_output = "".join([unalias_out, show_output])

        if self.color:
            final_output = highlight(show_output, BashLexer(),
                                     TerminalFormatter()).strip()
        else:
            final_output = show_output

        print(final_output)
Пример #19
0
    def show(self, alias):
        """Print alias and alias command definition to stdout."""
        cmd_string = self.alias_dict[alias]
        if '\n' in cmd_string:
            show_output = '{0}() {{\n\t{1}\n}}'.format(
                alias, cmd_string.replace('\n', '\n\t'))
            multiline = True
        else:
            show_output = '{0}() {{ {1}; }}'.format(alias, cmd_string)
            multiline = False

        if self.verbose:
            unalias_out = 'unalias {} &> /dev/null\n'.format(alias)

            if not multiline:
                cmd_chain = re.split(' *(?:&&?|;) *', cmd_string)

                index = -1
                for i in range(len(cmd_chain)):
                    ineg = 0 - (i + 1)
                    if '$' in cmd_chain[ineg]:
                        index = ineg
                        break

                mirrored_cmd = cmd_chain[index].split(None, 1)[0]
                compdef_out = 'compdef {}={} &> /dev/null\n'.format(
                    alias, mirrored_cmd)
            else:
                compdef_out = ''

            show_output = ''.join([unalias_out, compdef_out, show_output])

        if self.color:
            final_output = highlight(show_output, BashLexer(),
                                     TerminalFormatter()).strip()
        else:
            final_output = show_output

        print(final_output)
Пример #20
0
def pigmentize(text):
    """searches for <span></span> and replace with HTML pigmented code
    supported languages: python ; html ; css ; emacs ; bash ; hexdump ;
     DjangoLexer"""
    start_code = text.find('<pre>') + 5
    end_code = text.find('</pre')
    code = text[start_code:end_code]
    if code[0:5] == 'python':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, PythonLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:4] == 'html':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, HtmlLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:3] == 'css':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, CssLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:5] == 'emac':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, EmacsLispLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:4] == 'bash':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, BashLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:7] == 'hexdump':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, HexdumpLexer(), HtmlFormatter()) +
                     '</pre></div>')
    if code[0:6] == 'django':
        text.replace('<div class="highlight"><pre>' +
                     highlight(code, DjangoLexer(), HtmlFormatter()) +
                     '</pre></div>')
    return (text)
Пример #21
0
 def setUp(self):
     self.lexer = BashLexer()
     self.maxDiff = None
Пример #22
0
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = '%%writefile -a foo.py\nif a == b:\n    pass'
        tokens = [
            (Token.Operator, '%%writefile'),
            (Token.Text, ' -a foo.py\n'),
            (Token.Keyword, 'if'),
            (Token.Text, ' '),
            (Token.Name, 'a'),
            (Token.Text, ' '),
            (Token.Operator, '=='),
            (Token.Text, ' '),
            (Token.Name, 'b'),
            (Token.Punctuation, ':'),
            (Token.Text, '\n'),
            (Token.Text, '    '),
            (Token.Keyword, 'pass'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment = '%%timeit\nmath.sin(0)'
        tokens = [
            (Token.Operator, '%%timeit\n'),
            (Token.Name, 'math'),
            (Token.Operator, '.'),
            (Token.Name, 'sin'),
            (Token.Punctuation, '('),
            (Token.Literal.Number.Integer, '0'),
            (Token.Punctuation, ')'),
            (Token.Text, '\n'),
        ]

        fragment = '%%HTML\n<div>foo</div>'
        tokens = [
            (Token.Operator, '%%HTML'),
            (Token.Text, '\n'),
            (Token.Punctuation, '<'),
            (Token.Name.Tag, 'div'),
            (Token.Punctuation, '>'),
            (Token.Text, 'foo'),
            (Token.Punctuation, '<'),
            (Token.Punctuation, '/'),
            (Token.Name.Tag, 'div'),
            (Token.Punctuation, '>'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #23
0
class BashTest(unittest.TestCase):

    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testShortVariableNames(self):
        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
        tokens = [
            # single lone $
            (Token.Name.Variable, u'x'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'$'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # single letter shell var
            (Token.Name.Variable, u'y'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$_'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # multi-letter user var
            (Token.Name.Variable, u'z'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$abc'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testArrayNums(self):
        fragment = u'a=(1 2 3)\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Operator, u'('),
            (Token.Literal.Number, u'1'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'3'),
            (Token.Operator, u')'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testEndOfLineNums(self):
        fragment = u'a=1\nb=2 # comment\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'1'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'b'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Comment.Single, u'# comment\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #24
0
def lexer_bash():
    yield BashLexer()
class BashTest(unittest.TestCase):
    def setUp(self):
        self.lexer = BashLexer()
        self.maxDiff = None

    def testCurlyNoEscapeAndQuotes(self):
        fragment = u'echo "${a//["b"]/}"\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.Literal.String.Double, u'"'),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Double, u'"b"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testCurlyWithEscape(self):
        fragment = u'echo ${a//[\\"]/}\n'
        tokens = [
            (Token.Name.Builtin, u'echo'),
            (Token.Text, u' '),
            (Token.String.Interpol, u'${'),
            (Token.Name.Variable, u'a'),
            (Token.Punctuation, u'//['),
            (Token.Literal.String.Escape, u'\\"'),
            (Token.Punctuation, u']/'),
            (Token.String.Interpol, u'}'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testParsedSingle(self):
        fragment = u"a=$'abc\\''\n"
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.String.Single, u"$'abc\\''"),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testShortVariableNames(self):
        fragment = u'x="$"\ny="$_"\nz="$abc"\n'
        tokens = [
            # single lone $
            (Token.Name.Variable, u'x'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'$'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # single letter shell var
            (Token.Name.Variable, u'y'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$_'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
            # multi-letter user var
            (Token.Name.Variable, u'z'),
            (Token.Operator, u'='),
            (Token.Literal.String.Double, u'"'),
            (Token.Name.Variable, u'$abc'),
            (Token.Literal.String.Double, u'"'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testArrayNums(self):
        fragment = u'a=(1 2 3)\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Operator, u'('),
            (Token.Literal.Number, u'1'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Literal.Number, u'3'),
            (Token.Operator, u')'),
            (Token.Text, u'\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

    def testEndOfLineNums(self):
        fragment = u'a=1\nb=2 # comment\n'
        tokens = [
            (Token.Name.Variable, u'a'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'1'),
            (Token.Text, u'\n'),
            (Token.Name.Variable, u'b'),
            (Token.Operator, u'='),
            (Token.Literal.Number, u'2'),
            (Token.Text, u' '),
            (Token.Comment.Single, u'# comment\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #26
0
 def setUp(self):
     self.lexer = BashLexer()
     self.maxDiff = None
Пример #27
0
def beautify_task_cmd(cmd: str):
    """Highlighting the bash commands."""

    cmd = highlight(cmd, BashLexer(), TerminalFormatter())
    cmd = cmd.rstrip()
    click.echo(cmd)
Пример #28
0
            if not val: continue
            if isinstance(val, bool):
                args_strings.append('--' + arg)
            else:
                args_strings.append('--' + arg + ' \'' + str(val) + '\'')

        for package in args.usepackage:
            if package in {'amsmath', 'amssymb'}: continue
            args_strings.append('--usepackage \'' + str(package) + '\'')

        environment['args'] = ' '.join(args_strings)

        print('')
        script = post_commit_template % environment
        try:
            from pygments import highlight
            from pygments.lexers import BashLexer
            from pygments.formatters import TerminalFormatter
            print(highlight(script, BashLexer(), TerminalFormatter()))
        except NameError:
            print(script)

        response = input("Would you like to write this to %s? [y/N] " %
                         '.git/hooks/post-commit')
        if response.lower() != 'y':
            exit(1)

        if args.add_git_hook:
            with open('.git/hooks/post-commit', 'w') as f:
                f.write(script)
Пример #29
0
 def setUp(self):
     self.lexer = lexers.IPythonLexer()
     self.bash_lexer = BashLexer()
Пример #30
0
    def get(self, ident):
        origin = self.get_argument('origin', None)
        host = self.get_argument('host', None)
        collection = self.settings['db'].proxyservice['documentation']

        try:
            oid = objectid.ObjectId(ident)
        except objectid.InvalidId as e:
            print e
            self.send_error(500)
            return
        #raise tornado.web.HTTPError(400)

        entry = yield motor.Op(collection.find_one, {'_id': oid})
        if not entry:
            raise tornado.web.HTTPError(404)

        requestquery = util.nice_body(entry['request']['query'],
                                      'application/x-www-form-urlencoded')
        requestheaders = self.nice_headers(entry['request']['headers'])
        responseheaders = self.nice_headers(entry['response']['headers'])
        respctype = util.get_content_type(self.nice_headers(responseheaders))
        reqctype = util.get_content_type(self.nice_headers(requestheaders))
        requestbody = None
        responsebody = None
        reqbody = None
        resbody = None

        if 'fileid' in entry['response']:
            resbody, ctype = yield self.get_gridfs_body(
                entry['response']['fileid'], responseheaders)
            responsebody = self.get_formatted_body(resbody, ctype)
        elif 'body' in entry['response']:
            if not respctype:
                respctype = util.get_body_content_type(
                    entry['response']['body'])
            resbody = entry['response']['body']
            responsebody = self.get_formatted_body(resbody, respctype)

        if 'fileid' in entry['request']:
            reqbody, ctype = yield self.get_gridfs_body(
                entry['request']['fileid'], requestheaders)
            requestbody = self.get_formatted_body(reqbody, ctype)
        elif 'body' in entry['request']:
            reqbody = entry['request']['body']
            if not reqctype:
                reqctype = util.get_body_content_type(reqbody)
            requestbody = self.get_formatted_body(reqbody, reqctype)

        for key, value in requestheaders.iteritems():
            if key == 'Cookie':
                requestheaders[key] = util.nice_body(
                    value, 'application/x-www-form-urlencoded')

        cmd = yield self.get_curl_cmd(entry, reqbody)

        cmd = highlight(cmd, BashLexer(),
                        HtmlFormatter(cssclass='codehilite curl'))

        fmt = util.get_format(respctype) if respctype else None

        self.render("one.html",
                    item=entry,
                    body=resbody,
                    fmt=fmt,
                    cmd=cmd,
                    requestheaders=requestheaders,
                    responseheaders=responseheaders,
                    requestbody=requestbody,
                    responsebody=responsebody,
                    requestquery=requestquery,
                    finished=True,
                    socketuuid=None,
                    origin=origin,
                    host=host,
                    relativedelta=relativedelta,
                    from_doc=True)
Пример #31
0
        return """\
		<img src="http://status.rc.fas.harvard.edu/ganglia/holyoke_compute/graph.php?r=week&z=xlarge&g=holyoke_compute&s=by+name&mc=2&g=allocation_vs_utilization_cpu_report" />
		"""
    else:
        return ""


#--- misc

#syntax highlighting
try:
    from pygments import highlight
    from pygments.lexers import BashLexer
    from pygments.formatters import HtmlFormatter

    lexer = BashLexer()
    formatter = HtmlFormatter()

    syntax_highlight_css = formatter.get_style_defs('.highlight')

    def syntax_highlight(sh):
        if not sh is None:
            return highlight(sh, lexer, formatter)
        else:
            return ''
except ImportError:
    syntax_highlight_css = ''

    def syntax_highlight(sh):
        return '<pre>%s</pre>' % sh
Пример #32
0
 def setUp(self):
     self.lexer = lexers.IPythonLexer()
     self.bash_lexer = BashLexer()
Пример #33
0
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #34
0
        pass
    else:
        infile = sys.stdin
        pass

    formatter = TerminalFormatter(bg=bg)
    if colors_file is not None and os.path.isfile(colors_file):
        try:
            execfile(colors_file)
        except:
            sys.exit(10)
            pass
        pass
    formatter.colorscheme = TERMINAL_COLORS
    for code_line in infile.readlines():
        line = highlight(code_line, BashLexer(), formatter).strip("\r\n")
        outfile.write(line + "\n")
        # print line,
        pass
    outfile.close
    if out_filename: print out_filename
    sys.exit(0)
    pass


def main():
    try:
        opts, args = getopt(sys.argv[1:], "hb:", ["help", "bg="])
    except GetoptError as err:
        # print help information and exit:
        print str(err)  # will print something like "option -a not recognized"
Пример #35
0
class TestLexers(TestCase):
    """Collection of lexers tests"""
    def setUp(self):
        self.lexer = lexers.IPythonLexer()
        self.bash_lexer = BashLexer()

    def testIPythonLexer(self):
        fragment = '!echo $HOME\n'
        tokens = [
            (Token.Operator, '!'),
        ]
        tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))

        fragment_2 = '!' + fragment
        tokens_2 = [
            (Token.Operator, '!!'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t %%!\n' + fragment[1:]
        tokens_2 = [
            (Token.Text, '\t '),
            (Token.Operator, '%%!'),
            (Token.Text, '\n'),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = ' + fragment
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
        ] + tokens
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x, = %sx ' + fragment[1:]
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Punctuation, ','),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'sx'),
            (Token.Text, ' '),
        ] + tokens[1:]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'f = %R function () {}\n'
        tokens_2 = [
            (Token.Name, 'f'),
            (Token.Text, ' '),
            (Token.Operator, '='),
            (Token.Text, ' '),
            (Token.Operator, '%'),
            (Token.Keyword, 'R'),
            (Token.Text, ' function () {}\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '\t%%xyz\n$foo\n'
        tokens_2 = [
            (Token.Text, '\t'),
            (Token.Operator, '%%'),
            (Token.Keyword, 'xyz'),
            (Token.Text, '\n$foo\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = '%system?\n'
        tokens_2 = [
            (Token.Operator, '%'),
            (Token.Keyword, 'system'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = 'x != y\n'
        tokens_2 = [
            (Token.Name, 'x'),
            (Token.Text, ' '),
            (Token.Operator, '!='),
            (Token.Text, ' '),
            (Token.Name, 'y'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment_2 = ' ?math.sin\n'
        tokens_2 = [
            (Token.Text, ' '),
            (Token.Operator, '?'),
            (Token.Text, 'math.sin'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))

        fragment = ' *int*?\n'
        tokens = [
            (Token.Text, ' *int*'),
            (Token.Operator, '?'),
            (Token.Text, '\n'),
        ]
        self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
Пример #36
0
def show_script(str_script):
    html_vers = highlight(str_script, BashLexer(), HtmlFormatter(full=True))
    display(HTML(html_vers))