Ejemplo n.º 1
0
def disable_inside(item, *elems, **kwargs):
    """Prevent elems from matching inside of item.

    Returns (item with elem disabled, *new versions of elems).
    """
    _invert = kwargs.get("_invert", False)
    internal_assert(
        set(kwargs.keys()) <= set(("_invert", )),
        "excess keyword arguments passed to disable_inside")

    level = [
        0
    ]  # number of wrapped items deep we are; in a list to allow modification

    @contextmanager
    def manage_item(self, instring, loc):
        level[0] += 1
        try:
            yield
        finally:
            level[0] -= 1

    yield Wrap(item, manage_item)

    @contextmanager
    def manage_elem(self, instring, loc):
        if level[0] == 0 if not _invert else level[0] > 0:
            yield
        else:
            raise ParseException(instring, loc, self.errmsg, self)

    for elem in elems:
        yield Wrap(elem, manage_elem)
Ejemplo n.º 2
0
def longest(*args):
    """Match the longest of the given grammar elements."""
    internal_assert(len(args) >= 2, "longest expects at least two args")
    matcher = args[0] + skip_whitespace
    for elem in args[1:]:
        matcher ^= elem + skip_whitespace
    return matcher
Ejemplo n.º 3
0
def transform(grammar, text):
    """Transform text by replacing matches to grammar."""
    results = []
    intervals = []
    for result, start, stop in all_matches(grammar, text):
        if result is not ignore_transform:
            internal_assert(isinstance(result, str), "got non-string transform result", result)
            if start == 0 and stop == len(text):
                return result
            results.append(result)
            intervals.append((start, stop))

    if not results:
        return None

    split_indices = [0]
    split_indices.extend(start for start, _ in intervals)
    split_indices.extend(stop for _, stop in intervals)
    split_indices.sort()
    split_indices.append(None)

    out = []
    for i in range(len(split_indices) - 1):
        if i % 2 == 0:
            start, stop = split_indices[i], split_indices[i + 1]
            out.append(text[start:stop])
        else:
            out.append(results[i // 2])
    if i // 2 < len(results) - 1:
        raise CoconutInternalException("unused transform results", results[i // 2 + 1:])
    if stop is not None:
        raise CoconutInternalException("failed to properly split text to be transformed")
    return "".join(out)
Ejemplo n.º 4
0
def disable_inside(item, *elems, **kwargs):
    """Prevent elems from matching inside of item.

    Returns (item with elem disabled, *new versions of elems).
    """
    _invert = kwargs.get("_invert", False)
    internal_assert(set(kwargs.keys()) <= set(("_invert",)), "excess keyword arguments passed to disable_inside")

    level = [0]  # number of wrapped items deep we are; in a list to allow modification

    @contextmanager
    def manage_item(self, instring, loc):
        level[0] += 1
        try:
            yield
        finally:
            level[0] -= 1

    yield Wrap(item, manage_item)

    @contextmanager
    def manage_elem(self, instring, loc):
        if level[0] == 0 if not _invert else level[0] > 0:
            yield
        else:
            raise ParseException(instring, loc, self.errmsg, self)

    for elem in elems:
        yield Wrap(elem, manage_elem)
Ejemplo n.º 5
0
def transform(grammar, text):
    """Transform text by replacing matches to grammar."""
    results = []
    intervals = []
    for result, start, stop in all_matches(grammar, text):
        if result is not ignore_transform:
            internal_assert(isinstance(result, str), "got non-string transform result", result)
            if start == 0 and stop == len(text):
                return result
            results.append(result)
            intervals.append((start, stop))

    if not results:
        return None

    split_indices = [0]
    split_indices.extend(start for start, _ in intervals)
    split_indices.extend(stop for _, stop in intervals)
    split_indices.sort()
    split_indices.append(None)

    out = []
    for i in range(len(split_indices) - 1):
        if i % 2 == 0:
            start, stop = split_indices[i], split_indices[i + 1]
            out.append(text[start:stop])
        else:
            out.append(results[i // 2])
    if i // 2 < len(results) - 1:
        raise CoconutInternalException("unused transform results", results[i // 2 + 1:])
    if stop is not None:
        raise CoconutInternalException("failed to properly split text to be transformed")
    return "".join(out)
Ejemplo n.º 6
0
def longest(*args):
    """Match the longest of the given grammar elements."""
    internal_assert(len(args) >= 2, "longest expects at least two args")
    matcher = args[0] + skip_whitespace
    for elem in args[1:]:
        matcher ^= elem + skip_whitespace
    return matcher
Ejemplo n.º 7
0
def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs):
    """Run a console command.

    When show_output=True, prints output and returns exit code, otherwise returns output.
    When raise_errs=True, raises a subprocess.CalledProcessError if the command fails.
    """
    internal_assert(cmd and isinstance(cmd, list), "console commands must be passed as non-empty lists")
    try:
        from shutil import which
    except ImportError:
        pass
    else:
        cmd[0] = which(cmd[0]) or cmd[0]
    logger.log_cmd(cmd)
    try:
        if show_output and raise_errs:
            return subprocess.check_call(cmd, **kwargs)
        elif show_output:
            return subprocess.call(cmd, **kwargs)
        else:
            stdout, stderr, retcode = call_output(cmd, **kwargs)
            output = "".join(stdout + stderr)
            if retcode and raise_errs:
                raise subprocess.CalledProcessError(retcode, cmd, output=output)
            return output
    except OSError:
        logger.log_exc()
        if raise_errs:
            raise subprocess.CalledProcessError(oserror_retcode, cmd)
        elif show_output:
            return oserror_retcode
        else:
            return ""
Ejemplo n.º 8
0
def transform(grammar, text):
    """Transform text by replacing matches to grammar."""
    results = []
    intervals = []
    for tokens, start, stop in grammar.parseWithTabs().scanString(text):
        internal_assert(
            len(tokens) == 1, "invalid transform result tokens", tokens)
        if tokens[0] is not ignore_transform:
            results.append(tokens[0])
            intervals.append((start, stop))

    if not results:
        return None

    split_indices = [0]
    split_indices.extend(start for start, _ in intervals)
    split_indices.extend(stop for _, stop in intervals)
    split_indices.sort()
    split_indices.append(None)

    out = []
    for i in range(len(split_indices) - 1):
        if i % 2 == 0:
            start, stop = split_indices[i], split_indices[i + 1]
            out.append(text[start:stop])
        else:
            out.append(results[i // 2])
    if i // 2 < len(results) - 1:
        raise CoconutInternalException("unused transform results",
                                       results[i // 2 + 1:])
    if stop is not None:
        raise CoconutInternalException(
            "failed to properly split text to be transformed")
    return "".join(out)
Ejemplo n.º 9
0
 def _combine(self, original, loc, tokens):
     """Implement the parse action for Combine."""
     combined_tokens = super(CombineNode,
                             self).postParse(original, loc, tokens)
     internal_assert(
         len(combined_tokens) == 1, "Combine produced multiple tokens",
         combined_tokens)
     return combined_tokens[0]
Ejemplo n.º 10
0
def interleaved_join(outer_list, inner_list):
    """Interleaves two lists of strings and joins the result.

    Example: interleaved_join(['1', '3'], ['2']) == '123'
    The first list must be 1 longer than the second list.
    """
    internal_assert(len(outer_list) == len(inner_list) + 1, "invalid list lengths to interleaved_join", (outer_list, inner_list))
    interleaved = []
    for xx in zip(outer_list, inner_list):
        interleaved.extend(xx)
    interleaved.append(outer_list[-1])
    return "".join(interleaved)
Ejemplo n.º 11
0
 def match_trailer(self, tokens, item):
     """Matches typedefs and as patterns."""
     internal_assert(len(tokens) > 1 and len(tokens) % 2 == 1, "invalid trailer match tokens", tokens)
     match, trailers = tokens[0], tokens[1:]
     for i in range(0, len(trailers), 2):
         op, arg = trailers[i], trailers[i + 1]
         if op == "is":
             self.add_check("_coconut.isinstance(" + item + ", " + arg + ")")
         elif op == "as":
             if arg in self.names:
                 self.add_check(self.names[arg] + " == " + item)
             elif arg != wildcard:
                 self.add_def(arg + " = " + item)
                 self.register_name(arg, item)
         else:
             raise CoconutInternalException("invalid trailer match operation", op)
     self.match(match, item)
Ejemplo n.º 12
0
def memoized_parse_block(code):
    """Memoized version of parse_block."""
    internal_assert(lambda: code not in parse_block_memo.values(),
                    "attempted recompilation of", code)
    success, result = parse_block_memo.get(code, (None, None))
    if success is None:
        try:
            parsed = COMPILER.parse_block(code)
        except Exception as err:
            success, result = False, err
        else:
            success, result = True, parsed
        parse_block_memo[code] = (success, result)
    if success:
        return result
    else:
        raise result
Ejemplo n.º 13
0
 def match_trailer(self, tokens, item):
     """Matches typedefs and as patterns."""
     internal_assert(len(tokens) > 1 and len(tokens) % 2 == 1, "invalid trailer match tokens", tokens)
     match, trailers = tokens[0], tokens[1:]
     for i in range(0, len(trailers), 2):
         op, arg = trailers[i], trailers[i + 1]
         if op == "is":
             self.add_check("_coconut.isinstance(" + item + ", " + arg + ")")
         elif op == "as":
             if arg in self.names:
                 self.add_check(self.names[arg] + " == " + item)
             elif arg != wildcard:
                 self.add_def(arg + " = " + item)
                 self.names[arg] = item
         else:
             raise CoconutInternalException("invalid trailer match operation", op)
     self.match(match, item)
Ejemplo n.º 14
0
 def input(self, more=False):
     """Prompt for code input."""
     sys.stdout.flush()
     if more:
         msg = more_prompt
     else:
         msg = main_prompt
     if self.style is not None:
         internal_assert(prompt_toolkit is not None, "without prompt_toolkit cannot highlight style", self.style)
         try:
             return self.prompt(msg)
         except EOFError:
             raise  # issubclass(EOFError, Exception), so we have to do this
         except (Exception, AssertionError):
             logger.display_exc()
             logger.show_sig("Syntax highlighting failed; switching to --style none.")
             self.style = None
     return input(msg)
Ejemplo n.º 15
0
 def evaluate(self):
     """Get the result of evaluating the computation graph at this node."""
     if DEVELOP:
         internal_assert(not self.been_called, "inefficient reevaluation of action " + self.name + " with tokens", self.tokens)
         self.been_called = True
     evaluated_toks = evaluate_tokens(self.tokens)
     if logger.tracing:  # avoid the overhead of the call if not tracing
         logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens)
     try:
         return _trim_arity(self.action)(
             self.original,
             self.loc,
             evaluated_toks,
         )
     except CoconutException:
         raise
     except (Exception, AssertionError):
         traceback.print_exc()
         raise CoconutInternalException("error computing action " + self.name + " of evaluated tokens", evaluated_toks)
Ejemplo n.º 16
0
 def evaluate(self):
     """Get the result of evaluating the computation graph at this node."""
     if DEVELOP:
         internal_assert(not self.been_called, "inefficient reevaluation of action " + self.name + " with tokens", self.tokens)
         self.been_called = True
     evaluated_toks = evaluate_tokens(self.tokens)
     if logger.tracing:  # avoid the overhead of the call if not tracing
         logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens)
     try:
         return _trim_arity(self.action)(
             self.original,
             self.loc,
             evaluated_toks,
         )
     except CoconutException:
         raise
     except (Exception, AssertionError):
         traceback.print_exc()
         raise CoconutInternalException("error computing action " + self.name + " of evaluated tokens", evaluated_toks)
Ejemplo n.º 17
0
def minify(compiled):
    """Perform basic minifications.

    Fails on non-tabideal indentation or a string with a #.
    """
    compiled = compiled.strip()
    if compiled:
        out = []
        for line in compiled.splitlines():
            line = line.split("#", 1)[0].rstrip()
            if line:
                ind = 0
                while line.startswith(" "):
                    line = line[1:]
                    ind += 1
                internal_assert(ind % tabideal == 0, "invalid indentation in", line)
                out.append(" " * (ind // tabideal) + line)
        compiled = "\n".join(out) + "\n"
    return compiled
Ejemplo n.º 18
0
def handle_indentation(inputstr, add_newline=False):
    """Replace tabideal indentation with openindent and closeindent."""
    out_lines = []
    prev_ind = None
    for line in inputstr.splitlines():
        new_ind_str, _ = split_leading_indent(line)
        internal_assert(new_ind_str.strip(" ") == "", "invalid indentation characters for handle_indentation", new_ind_str)
        internal_assert(len(new_ind_str) % tabideal == 0, "invalid indentation level for handle_indentation", len(new_ind_str))
        new_ind = len(new_ind_str) // tabideal
        if prev_ind is None:  # first line
            indent = ""
        elif new_ind > prev_ind:  # indent
            indent = openindent * (new_ind - prev_ind)
        elif new_ind < prev_ind:  # dedent
            indent = closeindent * (prev_ind - new_ind)
        else:
            indent = ""
        out_lines.append(indent + line)
        prev_ind = new_ind
    if add_newline:
        out_lines.append("")
    if prev_ind > 0:
        out_lines[-1] += closeindent * prev_ind
    return "\n".join(out_lines)
Ejemplo n.º 19
0
 def show_tabulated(self, begin, middle, end):
     """Shows a tabulated message."""
     internal_assert(
         len(begin) < info_tabulation, "info message too long", begin)
     self.show(begin + " " * (info_tabulation - len(begin)) + middle + " " +
               end)
Ejemplo n.º 20
0
def getheader(which, target="", use_hash=None, no_tco=False, strict=False):
    """Generate the specified header."""
    internal_assert(
        which.startswith("package") or which in (
            "none",
            "initial",
            "__coconut__",
            "sys",
            "code",
            "file",
        ),
        "invalid header type",
        which,
    )

    if which == "none":
        return ""

    # initial, __coconut__, package:n, sys, code, file

    format_dict, target_startswith, target_info = process_header_args(
        which, target, use_hash, no_tco, strict)

    if which == "initial" or which == "__coconut__":
        header = '''#!/usr/bin/env python{target_startswith}
# -*- coding: {default_encoding} -*-
{hash_line}{typing_line}
# Compiled with Coconut version {VERSION_STR}

{module_docstring}'''.format(**format_dict)
    elif use_hash is not None:
        raise CoconutInternalException(
            "can only add a hash to an initial or __coconut__ header, not",
            which)
    else:
        header = ""

    if which == "initial":
        return header

    # __coconut__, package:n, sys, code, file

    header += section("Coconut Header")

    if target_startswith != "3":
        header += "from __future__ import print_function, absolute_import, unicode_literals, division\n"
    elif target_info >= (3, 7):
        header += "from __future__ import generator_stop, annotations\n"
    elif target_info >= (3, 5):
        header += "from __future__ import generator_stop\n"

    if which.startswith("package"):
        levels_up = int(which[len("package:"):])
        coconut_file_path = "_coconut_os_path.dirname(_coconut_os_path.abspath(__file__))"
        for _ in range(levels_up):
            coconut_file_path = "_coconut_os_path.dirname(" + coconut_file_path + ")"
        return header + '''import sys as _coconut_sys, os.path as _coconut_os_path
_coconut_file_path = {coconut_file_path}
_coconut_cached_module = _coconut_sys.modules.get({__coconut__})
if _coconut_cached_module is not None and _coconut_os_path.dirname(_coconut_cached_module.__file__) != _coconut_file_path:
    del _coconut_sys.modules[{__coconut__}]
_coconut_sys.path.insert(0, _coconut_file_path)
from __coconut__ import *
from __coconut__ import {underscore_imports}
{sys_path_pop}

'''.format(
            coconut_file_path=coconut_file_path,
            __coconut__=('"__coconut__"'
                         if target_startswith == "3" else 'b"__coconut__"' if
                         target_startswith == "2" else 'str("__coconut__")'),
            sys_path_pop=(
                # we can't pop on Python 2 if we want __coconut__ objects to be pickleable
                "_coconut_sys.path.pop(0)"
                if target_startswith == "3" else "" if target_startswith == "2"
                else '''if _coconut_sys.version_info >= (3,):
    _coconut_sys.path.pop(0)'''),
            **format_dict) + section("Compiled Coconut")

    if which == "sys":
        return header + '''import sys as _coconut_sys
from coconut.__coconut__ import *
from coconut.__coconut__ import {underscore_imports}
'''.format(**format_dict)

    # __coconut__, code, file

    header += "import sys as _coconut_sys\n"

    if target_startswith == "3":
        header += PY3_HEADER
    elif target_info >= (2, 7):
        header += PY27_HEADER
    elif target_startswith == "2":
        header += PY2_HEADER
    else:
        header += PYCHECK_HEADER

    header += get_template("header").format(**format_dict)

    if which == "file":
        header += "\n" + section("Compiled Coconut")

    return header
Ejemplo n.º 21
0
 def show_tabulated(self, begin, middle, end):
     """Shows a tabulated message."""
     internal_assert(len(begin) < info_tabulation, "info message too long", begin)
     self.show(begin + " " * (info_tabulation - len(begin)) + middle + " " + end)
Ejemplo n.º 22
0
 def _combine(self, original, loc, tokens):
     """Implement the parse action for Combine."""
     combined_tokens = super(CombineNode, self).postParse(original, loc, tokens)
     internal_assert(len(combined_tokens) == 1, "Combine produced multiple tokens", combined_tokens)
     return combined_tokens[0]
Ejemplo n.º 23
0
def getheader(which, target="", use_hash=None, no_tco=False, strict=False):
    """Generate the specified header."""
    internal_assert(which in allowed_headers, "invalid header type", which)

    if which == "none":
        return ""

    # initial, __coconut__, package, sys, code, file

    format_dict, target_startswith, target_info = process_header_args(which, target, use_hash, no_tco, strict)

    if which == "initial" or which == "__coconut__":
        header = '''#!/usr/bin/env python{target_startswith}
# -*- coding: {default_encoding} -*-
{hash_line}{typing_line}
# Compiled with Coconut version {VERSION_STR}

{module_docstring}'''.format(**format_dict)
    elif use_hash is not None:
        raise CoconutInternalException("can only add a hash to an initial or __coconut__ header, not", which)
    else:
        header = ""

    if which == "initial":
        return header

    # __coconut__, package, sys, code, file

    header += section("Coconut Header")

    if target_startswith != "3":
        header += "from __future__ import print_function, absolute_import, unicode_literals, division\n"
    elif target_info >= (3, 5):
        header += "from __future__ import generator_stop\n"

    if which == "package":
        return header + '''import sys as _coconut_sys, os.path as _coconut_os_path
_coconut_file_path = _coconut_os_path.dirname(_coconut_os_path.abspath(__file__))
_coconut_cached_module = _coconut_sys.modules.get({__coconut__})
if _coconut_cached_module is not None and _coconut_os_path.dirname(_coconut_cached_module.__file__) != _coconut_file_path:
    del _coconut_sys.modules[{__coconut__}]
_coconut_sys.path.insert(0, _coconut_file_path)
from __coconut__ import {underscore_imports}
from __coconut__ import *
_coconut_sys.path.remove(_coconut_file_path)

'''.format(**format_dict) + section("Compiled Coconut")

    if which == "sys":
        return header + '''import sys as _coconut_sys
from coconut.__coconut__ import {underscore_imports}
from coconut.__coconut__ import *
'''.format(**format_dict)

    # __coconut__, code, file

    header += "import sys as _coconut_sys\n"

    if target_startswith == "3":
        header += PY3_HEADER
    elif target_info >= (2, 7):
        header += PY27_HEADER
    elif target_startswith == "2":
        header += PY2_HEADER
    else:
        header += PYCHECK_HEADER

    header += get_template("header").format(**format_dict)

    if which == "file":
        header += "\n" + section("Compiled Coconut")

    return header