Example #1
0
def transform(grammar, text):
    """Transform text by replacing matches to grammar."""
    results = []
    intervals = []
    for result, start, stop in all_matches(grammar, text):
        if result is not ignore_transform:
            internal_assert(isinstance(result, str), "got non-string transform result", result)
            if start == 0 and stop == len(text):
                return result
            results.append(result)
            intervals.append((start, stop))

    if not results:
        return None

    split_indices = [0]
    split_indices.extend(start for start, _ in intervals)
    split_indices.extend(stop for _, stop in intervals)
    split_indices.sort()
    split_indices.append(None)

    out = []
    for i in range(len(split_indices) - 1):
        if i % 2 == 0:
            start, stop = split_indices[i], split_indices[i + 1]
            out.append(text[start:stop])
        else:
            out.append(results[i // 2])
    if i // 2 < len(results) - 1:
        raise CoconutInternalException("unused transform results", results[i // 2 + 1:])
    if stop is not None:
        raise CoconutInternalException("failed to properly split text to be transformed")
    return "".join(out)
Example #2
0
 def match_star(self, tokens, item):
     """Matches starred assignment."""
     internal_assert(1 <= len(tokens) <= 3, "invalid star match tokens", tokens)
     head_matches, last_matches = None, None
     if len(tokens) == 1:
         middle = tokens[0]
     elif len(tokens) == 2:
         if isinstance(tokens[0], str):
             middle, last_matches = tokens
         else:
             head_matches, middle = tokens
     else:
         head_matches, middle, last_matches = tokens
     self.add_check("_coconut.isinstance(" + item + ", _coconut.abc.Iterable)")
     if head_matches is None and last_matches is None:
         if middle != wildcard:
             self.add_def(middle + " = _coconut.list(" + item + ")")
     else:
         itervar = self.get_temp_var()
         self.add_def(itervar + " = _coconut.list(" + item + ")")
         with self.down_a_level():
             req_length = (len(head_matches) if head_matches is not None else 0) + (len(last_matches) if last_matches is not None else 0)
             self.add_check("_coconut.len(" + itervar + ") >= " + str(req_length))
             if middle != wildcard:
                 head_splice = str(len(head_matches)) if head_matches is not None else ""
                 last_splice = "-" + str(len(last_matches)) if last_matches is not None else ""
                 self.add_def(middle + " = " + itervar + "[" + head_splice + ":" + last_splice + "]")
             if head_matches is not None:
                 self.match_all_in(head_matches, itervar)
             if last_matches is not None:
                 for x in range(1, len(last_matches) + 1):
                     self.match(last_matches[-x], itervar + "[-" + str(x) + "]")
Example #3
0
 def match_iterator(self, tokens, item):
     """Matches a lazy list or a chain."""
     internal_assert(2 <= len(tokens) <= 3, "invalid iterator match tokens", tokens)
     tail = None
     if len(tokens) == 2:
         _, matches = tokens
     else:
         _, matches, tail = tokens
     self.add_check("_coconut.isinstance(" + item + ", _coconut.abc.Iterable)")
     if tail is None:
         itervar = self.get_temp_var()
         self.add_def(itervar + " = _coconut.tuple(" + item + ")")
     elif matches:
         itervar = self.get_temp_var()
         if tail == wildcard:
             tail = item
         else:
             self.add_def(tail + " = _coconut.iter(" + item + ")")
         self.add_def(itervar + " = _coconut.tuple(_coconut_igetitem(" + tail + ", _coconut.slice(None, " + str(len(matches)) + ")))")
     else:
         itervar = None
         if tail != wildcard:
             self.add_def(tail + " = " + item)
     if itervar is not None:
         with self.down_a_level():
             self.add_check("_coconut.len(" + itervar + ") == " + str(len(matches)))
             self.match_all_in(matches, itervar)
Example #4
0
def disable_inside(item, *elems, **kwargs):
    """Prevent elems from matching inside of item.

    Returns (item with elem disabled, *new versions of elems).
    """
    _invert = kwargs.pop("_invert", False)
    internal_assert(not kwargs, "excess keyword arguments passed to disable_inside")

    level = [0]  # number of wrapped items deep we are; in a list to allow modification

    @contextmanager
    def manage_item(self, instring, loc):
        level[0] += 1
        try:
            yield
        finally:
            level[0] -= 1

    yield Wrap(item, manage_item)

    @contextmanager
    def manage_elem(self, instring, loc):
        if level[0] == 0 if not _invert else level[0] > 0:
            yield
        else:
            raise ParseException(instring, loc, self.errmsg, self)

    for elem in elems:
        yield Wrap(elem, manage_elem)
Example #5
0
def longest(*args):
    """Match the longest of the given grammar elements."""
    internal_assert(len(args) >= 2, "longest expects at least two args")
    matcher = args[0] + skip_whitespace
    for elem in args[1:]:
        matcher ^= elem + skip_whitespace
    return matcher
Example #6
0
def add_action(item, action):
    """Add a parse action to the given item."""
    item_ref_count = sys.getrefcount(item) if CPYTHON else float("inf")
    internal_assert(item_ref_count >= temp_grammar_item_ref_count, "add_action got item with too low ref count", (item, type(item), item_ref_count))
    if item_ref_count > temp_grammar_item_ref_count:
        item = item.copy()
    return item.addParseAction(action)
Example #7
0
    def split_data_or_class_match(self, tokens):
        """Split data/class match tokens into cls_name, pos_matches, name_matches, star_match."""
        cls_name, matches = tokens

        pos_matches = []
        name_matches = {}
        star_match = None
        for match_arg in matches:
            if len(match_arg) == 1:
                match, = match_arg
                if star_match is not None:
                    raise CoconutDeferredSyntaxError("positional arg after starred arg in data/class match", self.loc)
                if name_matches:
                    raise CoconutDeferredSyntaxError("positional arg after named arg in data/class match", self.loc)
                pos_matches.append(match)
            elif len(match_arg) == 2:
                internal_assert(match_arg[0] == "*", "invalid starred data/class match arg tokens", match_arg)
                _, match = match_arg
                if star_match is not None:
                    raise CoconutDeferredSyntaxError("duplicate starred arg in data/class match", self.loc)
                if name_matches:
                    raise CoconutDeferredSyntaxError("both starred arg and named arg in data/class match", self.loc)
                star_match = match
            elif len(match_arg) == 3:
                internal_assert(match_arg[1] == "=", "invalid named data/class match arg tokens", match_arg)
                name, _, match = match_arg
                if star_match is not None:
                    raise CoconutDeferredSyntaxError("both named arg and starred arg in data/class match", self.loc)
                if name in name_matches:
                    raise CoconutDeferredSyntaxError("duplicate named arg {name!r} in data/class match".format(name=name), self.loc)
                name_matches[name] = match
            else:
                raise CoconutInternalException("invalid data/class match arg", match_arg)

        return cls_name, pos_matches, name_matches, star_match
Example #8
0
def minify(compiled):
    """Perform basic minification of the header.

    Fails on non-tabideal indentation, strings with #s, or multi-line strings.
    (So don't do those things in the header.)
    """
    compiled = compiled.strip()
    if compiled:
        out = []
        for line in compiled.splitlines():
            new_line, comment = split_comment(line)
            new_line = new_line.rstrip()
            if new_line:
                ind = 0
                while new_line.startswith(" "):
                    new_line = new_line[1:]
                    ind += 1
                internal_assert(ind % tabideal == 0, "invalid indentation in",
                                line)
                new_line = " " * (ind // tabideal) + new_line
            comment = comment.strip()
            if comment:
                new_line += "#" + comment
            if new_line:
                out.append(new_line)
        compiled = "\n".join(out) + "\n"
    return compiled
Example #9
0
def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs):
    """Run a console command.

    When show_output=True, prints output and returns exit code, otherwise returns output.
    When raise_errs=True, raises a subprocess.CalledProcessError if the command fails.
    """
    internal_assert(cmd and isinstance(cmd, list),
                    "console commands must be passed as non-empty lists")
    if hasattr(shutil, "which"):
        cmd[0] = shutil.which(cmd[0]) or cmd[0]
    logger.log_cmd(cmd)
    try:
        if show_output and raise_errs:
            return subprocess.check_call(cmd, **kwargs)
        elif show_output:
            return subprocess.call(cmd, **kwargs)
        else:
            stdout, stderr, retcode = call_output(cmd, **kwargs)
            output = "".join(stdout + stderr)
            if retcode and raise_errs:
                raise subprocess.CalledProcessError(retcode,
                                                    cmd,
                                                    output=output)
            return output
    except OSError:
        logger.log_exc()
        if raise_errs:
            raise subprocess.CalledProcessError(oserror_retcode, cmd)
        elif show_output:
            return oserror_retcode
        else:
            return ""
Example #10
0
 def match_infix(self, tokens, item):
     """Matches infix patterns."""
     internal_assert(len(tokens) > 1 and len(tokens) % 2 == 1, "invalid infix match tokens", tokens)
     match = tokens[0]
     for i in range(1, len(tokens), 2):
         op, arg = tokens[i], tokens[i + 1]
         self.add_check("(" + op + ")(" + item + ", " + arg + ")")
     self.match(match, item)
Example #11
0
def base_pycondition(target,
                     ver,
                     if_lt=None,
                     if_ge=None,
                     indent=None,
                     newline=False,
                     fallback=""):
    """Produce code that depends on the Python version for the given target."""
    internal_assert(isinstance(ver, tuple), "invalid pycondition version")
    internal_assert(if_lt or if_ge, "either if_lt or if_ge must be specified")

    if if_lt:
        if_lt = if_lt.strip()
    if if_ge:
        if_ge = if_ge.strip()

    target_supported_vers = get_vers_for_target(target)

    if all(tar_ver < ver for tar_ver in target_supported_vers):
        if not if_lt:
            return fallback
        out = if_lt

    elif all(tar_ver >= ver for tar_ver in target_supported_vers):
        if not if_ge:
            return fallback
        out = if_ge

    else:
        if if_lt and if_ge:
            out = """if _coconut_sys.version_info < {ver}:
{lt_block}
else:
{ge_block}""".format(
                ver=repr(ver),
                lt_block=_indent(if_lt, by=1),
                ge_block=_indent(if_ge, by=1),
            )
        elif if_lt:
            out = """if _coconut_sys.version_info < {ver}:
{lt_block}""".format(
                ver=repr(ver),
                lt_block=_indent(if_lt, by=1),
            )
        else:
            out = """if _coconut_sys.version_info >= {ver}:
{ge_block}""".format(
                ver=repr(ver),
                ge_block=_indent(if_ge, by=1),
            )

    if indent is not None:
        out = _indent(out, by=indent)
    if newline:
        out += "\n"
    return out
Example #12
0
def interleaved_join(first_list, second_list):
    """Interleaves two lists of strings and joins the result.

    Example: interleaved_join(['1', '3'], ['2']) == '123'
    The first list must be 1 longer than the second list.
    """
    internal_assert(len(first_list) == len(second_list) + 1, "invalid list lengths to interleaved_join", (first_list, second_list))
    interleaved = []
    for first_second in zip(first_list, second_list):
        interleaved.extend(first_second)
    interleaved.append(first_list[-1])
    return "".join(interleaved)
Example #13
0
 def match_trailer(self, tokens, item):
     """Matches typedefs and as patterns."""
     internal_assert(len(tokens) > 1 and len(tokens) % 2 == 1, "invalid trailer match tokens", tokens)
     match, trailers = tokens[0], tokens[1:]
     for i in range(0, len(trailers), 2):
         op, arg = trailers[i], trailers[i + 1]
         if op == "as":
             self.match_var([arg], item, bind_wildcard=True)
         elif op == "is":
             self.add_check("_coconut.isinstance(" + item + ", " + arg + ")")
         else:
             raise CoconutInternalException("invalid trailer match operation", op)
     self.match(match, item)
Example #14
0
def keyword(name, explicit_prefix=None):
    """Construct a grammar which matches name as a Python keyword."""
    if explicit_prefix is not False:
        internal_assert(
            (name in reserved_vars) is (explicit_prefix is not None),
            "invalid keyword call for", name,
            extra="pass explicit_prefix to keyword for all reserved_vars and only reserved_vars",
        )

    base_kwd = regex_item(name + r"\b")
    if explicit_prefix in (None, False):
        return base_kwd
    else:
        return Optional(explicit_prefix.suppress()) + base_kwd
Example #15
0
def memoized_parse_block(code):
    """Memoized version of parse_block."""
    internal_assert(lambda: code not in parse_block_memo.values(), "attempted recompilation of", code)
    success, result = parse_block_memo.get(code, (None, None))
    if success is None:
        try:
            parsed = COMPILER.parse_block(code)
        except Exception as err:
            success, result = False, err
        else:
            success, result = True, parsed
        parse_block_memo[code] = (success, result)
    if success:
        return result
    else:
        raise result
Example #16
0
    def match_dict(self, tokens, item):
        """Matches a dictionary."""
        internal_assert(1 <= len(tokens) <= 2, "invalid dict match tokens", tokens)
        if len(tokens) == 1:
            matches, rest = tokens[0], None
        else:
            matches, rest = tokens

        self.add_check("_coconut.isinstance(" + item + ", _coconut.abc.Mapping)")

        if rest is None:
            self.rule_conflict_warn(
                "ambiguous pattern; could be Coconut-style len-checking dict match or Python-style len-ignoring dict match",
                if_coconut='resolving to Coconut-style len-checking dict match by default',
                if_python='resolving to Python-style len-ignoring dict match due to Python-style "match: case" block',
                extra="use explicit '{..., **_}' or '{..., **{}}' syntax to dismiss",
            )
            check_len = not self.using_python_rules
        elif rest == "{}":
            check_len = True
            rest = None
        else:
            check_len = False

        if check_len:
            self.add_check("_coconut.len(" + item + ") == " + str(len(matches)))

        seen_keys = set()
        for k, v in matches:
            if k in seen_keys:
                raise CoconutDeferredSyntaxError("duplicate key {k!r} in dictionary pattern".format(k=k), self.loc)
            seen_keys.add(k)
            key_var = self.get_temp_var()
            self.add_def(key_var + " = " + item + ".get(" + k + ", _coconut_sentinel)")
            with self.down_a_level():
                self.add_check(key_var + " is not _coconut_sentinel")
                self.match(v, key_var)

        if rest is not None and rest != wildcard:
            match_keys = [k for k, v in matches]
            with self.down_a_level():
                self.add_def(
                    rest + " = dict((k, v) for k, v in "
                    + item + ".items() if k not in set(("
                    + ", ".join(match_keys) + ("," if len(match_keys) == 1 else "")
                    + ")))",
                )
Example #17
0
 def match_sequence(self, tokens, item):
     """Matches a sequence."""
     internal_assert(2 <= len(tokens) <= 3, "invalid sequence match tokens", tokens)
     tail = None
     if len(tokens) == 2:
         series_type, matches = tokens
     else:
         series_type, matches, tail = tokens
     self.add_check("_coconut.isinstance(" + item + ", _coconut.abc.Sequence)")
     if tail is None:
         self.add_check("_coconut.len(" + item + ") == " + str(len(matches)))
     else:
         self.add_check("_coconut.len(" + item + ") >= " + str(len(matches)))
         if tail != wildcard:
             if len(matches) > 0:
                 splice = "[" + str(len(matches)) + ":]"
             else:
                 splice = ""
             self.assign_to_series(tail, series_type, item + splice)
     self.match_all_in(matches, item)
Example #18
0
 def input(self, more=False):
     """Prompt for code input."""
     sys.stdout.flush()
     if more:
         msg = more_prompt
     else:
         msg = main_prompt
     if self.style is not None:
         internal_assert(prompt_toolkit is not None,
                         "without prompt_toolkit cannot highlight style",
                         self.style)
         try:
             return self.prompt(msg)
         except EOFError:
             raise  # issubclass(EOFError, Exception), so we have to do this
         except (Exception, AssertionError):
             logger.print_exc()
             logger.show_sig(
                 "Syntax highlighting failed; switching to --style none.")
             self.style = None
     return input(msg)
Example #19
0
def handle_indentation(inputstr, add_newline=False):
    """Replace tabideal indentation with openindent and closeindent.
    Ignores whitespace-only lines."""
    out_lines = []
    prev_ind = None
    for line in inputstr.splitlines():
        line = line.rstrip()
        if line:
            new_ind_str, _ = split_leading_indent(line)
            internal_assert(new_ind_str.strip(" ") == "", "invalid indentation characters for handle_indentation", new_ind_str)
            internal_assert(len(new_ind_str) % tabideal == 0, "invalid indentation level for handle_indentation", len(new_ind_str))
            new_ind = len(new_ind_str) // tabideal
            if prev_ind is None:  # first line
                indent = ""
            elif new_ind > prev_ind:  # indent
                indent = openindent * (new_ind - prev_ind)
            elif new_ind < prev_ind:  # dedent
                indent = closeindent * (prev_ind - new_ind)
            else:
                indent = ""
            out_lines.append(indent + line)
            prev_ind = new_ind
    if add_newline:
        out_lines.append("")
    if prev_ind > 0:
        out_lines[-1] += closeindent * prev_ind
    out = "\n".join(out_lines)
    internal_assert(lambda: out.count(openindent) == out.count(closeindent), "failed to properly handle indentation in", out)
    return out
Example #20
0
 def __init__(self, comp, original, loc, check_var, style="coconut", name_list=None, checkdefs=None, parent_names={}, var_index_obj=None):
     """Creates the matcher."""
     self.comp = comp
     self.original = original
     self.loc = loc
     self.check_var = check_var
     internal_assert(style in self.valid_styles, "invalid Matcher style", style)
     self.style = style
     self.name_list = name_list
     self.position = 0
     self.checkdefs = []
     if checkdefs is None:
         self.increment()
     else:
         for checks, defs in checkdefs:
             self.checkdefs.append((checks[:], defs[:]))
         self.set_position(-1)
     self.parent_names = parent_names
     self.names = OrderedDict()  # ensures deterministic ordering of name setting code
     self.var_index_obj = [0] if var_index_obj is None else var_index_obj
     self.guards = []
     self.child_groups = []
Example #21
0
 def evaluate(self):
     """Get the result of evaluating the computation graph at this node."""
     if DEVELOP:
         internal_assert(not self.been_called, "inefficient reevaluation of action " + self.name + " with tokens", self.tokens)
         self.been_called = True
     evaluated_toks = evaluate_tokens(self.tokens)
     if logger.tracing:  # avoid the overhead of the call if not tracing
         logger.log_trace(self.name, self.original, self.loc, evaluated_toks, self.tokens)
     try:
         return _trim_arity(self.action)(
             self.original,
             self.loc,
             evaluated_toks,
         )
     except CoconutException:
         raise
     except (Exception, AssertionError):
         logger.print_exc()
         error = CoconutInternalException("error computing action " + self.name + " of evaluated tokens", evaluated_toks)
         if embed_on_internal_exc:
             logger.warn_err(error)
             embed(depth=2)
         else:
             raise error
Example #22
0
def add_timing_to_method(cls, method_name, method):
    """Add timing collection to the given method.
    It's a monstrosity, but it's only used for profiling."""
    from coconut.terminal import internal_assert  # hide to avoid circular import

    args, varargs, keywords, defaults = inspect.getargspec(method)
    internal_assert(args[:1] == ["self"], "cannot add timing to method",
                    method_name)

    if not defaults:
        defaults = []
    num_undefaulted_args = len(args) - len(defaults)
    def_args = []
    call_args = []
    fix_arg_defaults = []
    defaults_dict = {}
    for i, arg in enumerate(args):
        if i >= num_undefaulted_args:
            default = defaults[i - num_undefaulted_args]
            def_args.append(arg + "=_timing_sentinel")
            defaults_dict[arg] = default
            fix_arg_defaults.append(
                """
    if {arg} is _timing_sentinel:
        {arg} = _exec_dict["defaults_dict"]["{arg}"]
""".strip("\n").format(arg=arg, ), )
        else:
            def_args.append(arg)
        call_args.append(arg)
    if varargs:
        def_args.append("*" + varargs)
        call_args.append("*" + varargs)
    if keywords:
        def_args.append("**" + keywords)
        call_args.append("**" + keywords)

    new_method_name = "new_" + method_name + "_func"
    _exec_dict = globals().copy()
    _exec_dict.update(locals())
    new_method_code = """
def {new_method_name}({def_args}):
{fix_arg_defaults}

    _all_args = (lambda *args, **kwargs: args + tuple(kwargs.values()))({call_args})
    _exec_dict["internal_assert"](not any(_arg is _timing_sentinel for _arg in _all_args), "error handling arguments in timed method {new_method_name}({def_args}); got", _all_args)

    _start_time = _exec_dict["get_clock_time"]()
    try:
        return _exec_dict["method"]({call_args})
    finally:
        _timing_info[0][str(self)] += _exec_dict["get_clock_time"]() - _start_time
{new_method_name}._timed = True
    """.format(
        fix_arg_defaults="\n".join(fix_arg_defaults),
        new_method_name=new_method_name,
        def_args=", ".join(def_args),
        call_args=", ".join(call_args),
    )
    exec(new_method_code, _exec_dict)

    setattr(cls, method_name, _exec_dict[new_method_name])
    return True
Example #23
0
 def _combine(self, original, loc, tokens):
     """Implement the parse action for Combine."""
     combined_tokens = super(CombineNode, self).postParse(original, loc, tokens)
     if DEVELOP:  # avoid the overhead of the call if not develop
         internal_assert(len(combined_tokens) == 1, "Combine produced multiple tokens", combined_tokens)
     return combined_tokens[0]
Example #24
0
def stores_loc_action(loc, tokens):
    """Action that just parses to loc."""
    internal_assert(len(tokens) == 0, "invalid store loc tokens", tokens)
    return str(loc)
Example #25
0
 def increment(self, by=1):
     """Advances the if-statement position."""
     new_pos = self.position + by
     internal_assert(new_pos > 0, "invalid increment/decrement call to set pos to", new_pos)
     self.set_position(new_pos)
Example #26
0
def getheader(which, target, use_hash, no_tco, strict, no_wrap):
    """Generate the specified header."""
    internal_assert(
        which.startswith("package") or which in (
            "none",
            "initial",
            "__coconut__",
            "sys",
            "code",
            "file",
        ),
        "invalid header type",
        which,
    )

    if which == "none":
        return ""

    target_startswith = one_num_ver(target)
    target_info = get_target_info(target)

    # initial, __coconut__, package:n, sys, code, file

    format_dict = process_header_args(which, target, use_hash, no_tco, strict,
                                      no_wrap)

    if which == "initial" or which == "__coconut__":
        header = '''#!/usr/bin/env python{target_startswith}
# -*- coding: {default_encoding} -*-
{hash_line}{typing_line}
# Compiled with Coconut version {VERSION_STR}

{module_docstring}'''.format(**format_dict)
    elif use_hash is not None:
        raise CoconutInternalException(
            "can only add a hash to an initial or __coconut__ header, not",
            which)
    else:
        header = ""

    if which == "initial":
        return header

    # __coconut__, package:n, sys, code, file

    header += section("Coconut Header", newline_before=False)

    if target_startswith != "3":
        header += "from __future__ import print_function, absolute_import, unicode_literals, division\n"
    elif target_info >= (3, 7):
        if no_wrap:
            header += "from __future__ import generator_stop\n"
        else:
            header += "from __future__ import generator_stop, annotations\n"
    elif target_info >= (3, 5):
        header += "from __future__ import generator_stop\n"

    if which.startswith("package"):
        levels_up = int(which[len("package:"):])
        coconut_file_dir = "_coconut_os.path.dirname(_coconut_os.path.abspath(__file__))"
        for _ in range(levels_up):
            coconut_file_dir = "_coconut_os.path.dirname(" + coconut_file_dir + ")"
        return header + '''import sys as _coconut_sys, os as _coconut_os
_coconut_file_dir = {coconut_file_dir}
_coconut_cached_module = _coconut_sys.modules.get({__coconut__})
if _coconut_cached_module is not None and _coconut_os.path.dirname(_coconut_cached_module.__file__) != _coconut_file_dir:
    del _coconut_sys.modules[{__coconut__}]
_coconut_sys.path.insert(0, _coconut_file_dir)
_coconut_module_name = _coconut_os.path.splitext(_coconut_os.path.basename(_coconut_file_dir))[0]
if _coconut_module_name and _coconut_module_name[0].isalpha() and all(c.isalpha() or c.isdigit() for c in _coconut_module_name) and "__init__.py" in _coconut_os.listdir(_coconut_file_dir):
    _coconut_full_module_name = str(_coconut_module_name + ".__coconut__")
    import __coconut__ as _coconut__coconut__
    _coconut__coconut__.__name__ = _coconut_full_module_name
    for _coconut_v in vars(_coconut__coconut__).values():
        if getattr(_coconut_v, "__module__", None) == {__coconut__}:
            try:
                _coconut_v.__module__ = _coconut_full_module_name
            except AttributeError:
                _coconut_v_type = type(_coconut_v)
                if getattr(_coconut_v_type, "__module__", None) == {__coconut__}:
                    _coconut_v_type.__module__ = _coconut_full_module_name
    _coconut_sys.modules[_coconut_full_module_name] = _coconut__coconut__
from __coconut__ import *
from __coconut__ import {underscore_imports}
_coconut_sys.path.pop(0)
'''.format(coconut_file_dir=coconut_file_dir,
           __coconut__=('"__coconut__"'
                        if target_startswith == "3" else 'b"__coconut__"'
                        if target_startswith == "2" else 'str("__coconut__")'),
           **format_dict) + section("Compiled Coconut")

    if which == "sys":
        return header + '''import sys as _coconut_sys
from coconut.__coconut__ import *
from coconut.__coconut__ import {underscore_imports}
'''.format(**format_dict)

    # __coconut__, code, file

    header += "import sys as _coconut_sys\n"

    if target_info >= (3, 7):
        header += PY37_HEADER
    elif target_startswith == "3":
        header += PY3_HEADER
    elif target_info >= (2, 7):
        header += PY27_HEADER
    elif target_startswith == "2":
        header += PY2_HEADER
    else:
        header += PYCHECK_HEADER

    header += get_template("header").format(**format_dict)

    if which == "file":
        header += section("Compiled Coconut")

    return header
Example #27
0
def evaluate_tokens(tokens, **kwargs):
    """Evaluate the given tokens in the computation graph."""
    # can't have this be a normal kwarg to make evaluate_tokens a valid parse action
    evaluated_toklists = kwargs.pop("evaluated_toklists", ())
    internal_assert(not kwargs, "invalid keyword arguments to evaluate_tokens", kwargs)

    if isinstance(tokens, ParseResults):

        # evaluate the list portion of the ParseResults
        old_toklist, name, asList, modal = tokens.__getnewargs__()
        new_toklist = None
        for eval_old_toklist, eval_new_toklist in evaluated_toklists:
            if old_toklist == eval_old_toklist:
                new_toklist = eval_new_toklist
                break
        if new_toklist is None:
            new_toklist = [evaluate_tokens(toks, evaluated_toklists=evaluated_toklists) for toks in old_toklist]
            # overwrite evaluated toklists rather than appending, since this
            #  should be all the information we need for evaluating the dictionary
            evaluated_toklists = ((old_toklist, new_toklist),)
        new_tokens = ParseResults(new_toklist, name, asList, modal)
        new_tokens._ParseResults__accumNames.update(tokens._ParseResults__accumNames)

        # evaluate the dictionary portion of the ParseResults
        new_tokdict = {}
        for name, occurrences in tokens._ParseResults__tokdict.items():
            new_occurrences = []
            for value, position in occurrences:
                new_value = evaluate_tokens(value, evaluated_toklists=evaluated_toklists)
                new_occurrences.append(_ParseResultsWithOffset(new_value, position))
            new_tokdict[name] = new_occurrences
        new_tokens._ParseResults__tokdict.update(new_tokdict)

        return new_tokens

    else:

        if evaluated_toklists:
            for eval_old_toklist, eval_new_toklist in evaluated_toklists:
                indices = multi_index_lookup(eval_old_toklist, tokens, indexable_types=indexable_evaluated_tokens_types)
                if indices is not None:
                    new_tokens = eval_new_toklist
                    for ind in indices:
                        new_tokens = new_tokens[ind]
                    return new_tokens
            complain(
                lambda: CoconutInternalException(
                    "inefficient reevaluation of tokens: {tokens} not in:\n{toklists}".format(
                        tokens=tokens,
                        toklists=pformat([eval_old_toklist for eval_old_toklist, eval_new_toklist in evaluated_toklists]),
                    ),
                ),
            )

        if isinstance(tokens, str):
            return tokens

        elif isinstance(tokens, ComputationNode):
            return tokens.evaluate()

        elif isinstance(tokens, list):
            return [evaluate_tokens(inner_toks, evaluated_toklists=evaluated_toklists) for inner_toks in tokens]

        elif isinstance(tokens, tuple):
            return tuple(evaluate_tokens(inner_toks, evaluated_toklists=evaluated_toklists) for inner_toks in tokens)

        else:
            raise CoconutInternalException("invalid computation graph tokens", tokens)