コード例 #1
0
ファイル: parsing.py プロジェクト: rawlins/lambda-notebook
def parse_te(line, env=None, use_env=False):
    from lamb import meta
    line = remove_comments(line)
    reduce = False
    if line.startswith("reduce "):
        line = line[7:]
        reduce = True
    if env is None or not use_env:
        env = dict()
    var_env = vars_only(env)
    try:
        result = meta.te(line, assignment=var_env)
        if isinstance(result, meta.TypedExpr):
            result = result.regularize_type_env(var_env, constants=True)
            if reduce:
                result = result.reduce_all()
        else:
            pass # warning here?
    except Exception as e:
        meta.logger.error("Parsing of typed expression failed with exception:")
        meta.logger.error(e)
        return (None, dict())

    accum = dict()
    accum["_llast"] = result
    return (result, accum)
コード例 #2
0
def continuize_app_combinator(ftype, argtype):
    try:
        b = ftype.left.left
    except: # note that exceptions other than TypeMismatch will halt composition; a TypeMismatch just signals that a particular attempt won't succeed.
        raise types.TypeMismatch(ftype, None, "Not a continuized function type")
    try:
        abar = types.FunType(b.right, types.type_t)
    except:
        raise types.TypeMismatch(ftype, None, "Not a continuized function type")
    try:
        c = argtype.left.left
    except:
        raise types.TypeMismatch(argtype, None, "Not a continuation type")
    comb_s = ("L f_%s : L arg_%s : L abar_%s : f(L b_%s : arg(L c_%s : abar(b(c))))" % (repr(ftype), repr(argtype), repr(abar), repr(b), repr(c)))
    comb = te(comb_s) # parse the combinator string
    return comb
コード例 #3
0
def continuize_app_combinator2(ftype, argtype):
    try:
        b = ftype.left.left
    except:
        raise types.TypeMismatch(ftype, None, "Not a continuation type")
    try:
        abar = types.FunType(b.right, types.type_t)
    except:
        raise types.TypeMismatch(ftype, None, "Not a continuized function type")
    try:
        c = argtype.left.left
    except:
        raise types.TypeMismatch(argtype, None, "Not a continuation type")
    comb_s = ("L f_%s : L arg_%s : L abar_%s : arg(L c_%s : f(L b_%s : abar(b(c))))" % (repr(ftype), repr(argtype), repr(abar), repr(c), repr(b)))
    comb = te(comb_s)
    return comb
コード例 #4
0
def continuize_app_combinator2(ftype, argtype):
    try:
        b = ftype.left.left
    except:
        raise types.TypeMismatch(ftype, None, "Not a continuation type")
    try:
        abar = types.FunType(b.right, types.type_t)
    except:
        raise types.TypeMismatch(ftype, None,
                                 "Not a continuized function type")
    try:
        c = argtype.left.left
    except:
        raise types.TypeMismatch(argtype, None, "Not a continuation type")
    comb_s = (
        "L f_%s : L arg_%s : L abar_%s : arg(L c_%s : f(L b_%s : abar(b(c))))"
        % (repr(ftype), repr(argtype), repr(abar), repr(c), repr(b)))
    comb = te(comb_s)
    return comb
コード例 #5
0
def continuize_app_combinator(ftype, argtype):
    try:
        b = ftype.left.left
    except:
        # note that exceptions other than TypeMismatch will halt
        # composition; a TypeMismatch just signals that a particular attempt
        # won't succeed.
        raise types.TypeMismatch(ftype, None,
                                 "Not a continuized function type")
    try:
        abar = types.FunType(b.right, types.type_t)
    except:
        raise types.TypeMismatch(ftype, None,
                                 "Not a continuized function type")
    try:
        c = argtype.left.left
    except:
        raise types.TypeMismatch(argtype, None, "Not a continuation type")
    comb_s = (
        "L f_%s : L arg_%s : L abar_%s : f(L b_%s : arg(L c_%s : abar(b(c))))"
        % (repr(ftype), repr(argtype), repr(abar), repr(b), repr(c)))
    comb = te(comb_s)  # parse the combinator string
    return comb
コード例 #6
0
ファイル: parsing.py プロジェクト: rawlins/lambda-notebook
def parse_equality_line(s, env=None, transforms=None, ambiguity=False):
    from lamb import meta, lang, types
    # TODO should this go by lines....
    if env is None:
        env = dict()
    if transforms is None:
        transforms = dict()
    var_env = vars_only(env)
    system = lang.get_system()
    a_ctl = system.assign_controller
    l = s.split("=", 1)
    if len(l) != 2:
        raise ParseError("Missing =") # TODO expand
    transform = None
    right_str = l[1]
    if right_str[0] == "<":
        trans_match = re.match(r'^\<([a-zA-Z0-9_]*)\>', right_str)
        if trans_match:
            trans_name = trans_match.group(1)
            if transforms and trans_name in transforms:
                transform = transforms[trans_name]
                right_str = right_str[trans_match.end(0):]
            else:
                raise ParseError("Unknown transform '<%s>'" % (trans_name))
    if transform is None and "default" in transforms:
        transform = transforms["default"]


    # right side should be typed expr no matter what
    left_s = l[0].strip()
    lex_name, item_index = try_parse_item_name(left_s, env=env,
                                                        ambiguity=ambiguity)
    if lex_name:
        default = a_ctl.default()
        db_env = default.modify(var_env)
        try:
            right_side = meta.te(right_str.strip(), assignment=db_env)
            right_side = right_side.regularize_type_env(db_env)
            right_side = right_side.under_assignment(db_env)
        except Exception as e:
            meta.logger.error(
                "Parsing of assignment to '%s' failed with exception:" % left_s)
            meta.logger.error(e)
            return (dict(), env)

        # lexical assignment
        if transform:
            right_side = transform(right_side)

        item = lang.Item(lex_name, right_side)
        # TODO: add to composition system's lexicon?  Different way of tracking
        # lexicons?
        if item_index is None:
            env[lex_name] = item
        else:
            # item_index is only set to a value if the item already exists in
            # env.
            if isinstance(env[lex_name], lang.Item):
                tmp_list = list([env[lex_name]])
                if item_index is True:
                    tmp_list.append(item)
                else:
                    tmp_list[item_index] = item # may throw an exception
                item = lang.Items(tmp_list)
                env[lex_name] = item
            else:
                if item_index is True:
                    env[lex_name].add_result(item)
                else:
                    env[lex_name][item_index] = item
                item = env[lex_name]
        return ({lex_name: item}, env)
    else: # assignment to variable
        try:
            right_side = meta.te(right_str.strip(), assignment=var_env)
            right_side = right_side.regularize_type_env(var_env, constants=True)
            right_side = right_side.under_assignment(var_env)
        except Exception as e:
            meta.logger.error(
                "Parsing of assignment to '%s' failed with exception:" % left_s)
            meta.logger.error(e)
            #raise e
            return (dict(), env)

        # variable assignment case
        # don't pass assignment here, to allow for redefinition.  TODO: revisit
        term = meta.TypedExpr.term_factory(left_s)
        if not term.variable():
            raise ParseError("Assignment to non-variable term '%s'" % term)
        ts = meta.get_type_system()
        u_result = ts.unify(term.type, right_side.type)
        # there are two ways in which unify could fail.  One is the built-in ad
        # hoc type_guessed flag, and one is a genuine type mismatch. We want to
        # silently override guessed types here.
        if u_result is None:
            if term.type_guessed:
                term.type = right_side.type
            else:
                raise types.TypeMismatch(term, right_side,
                                                        "Variable assignment")
        else:
            # brute force
            term.type = u_result
        if transform:
            right_side = transform(right_side)
        # NOTE side-effect here
        env[term.op] = right_side
        return ({term.op : right_side}, env)
コード例 #7
0
from lamb import meta, types, lang
from lamb.meta import te, tp

# combinators for predicate modification
pm_combinator = te("L f_<e,t> : L g_<e,t> : L x_e : f(x) & g(x)")
pm_generalized_combinator = te("L f_<X,t> : L g_<X,t> : L x_X : f(x) & g(x)")



# the Geach combinator -- for the g rule, and for function composition
geach_combinator = te("L g_<Y,Z> : L f_<X,Y> : L x_X : g(f(x))")

# for the sake of posterity, here is a way of constructing the Geach combinator
# without polymorphism:

def build_geach_combinator(gtype, ftype):
    body = meta.term("g", gtype)(meta.term("f", ftype)(meta.term("x", ftype.left)))
    combinator = meta.LFun(gtype,
                           meta.LFun(ftype,
                                     meta.LFun(ftype.left, body, varname="x"),
                                     varname="f"),
                           varname="g")
    return combinator

def function_composition_nopoly(g, f):
    if (not (g.type.functional() and f.type.functional()
             and g.type.left == f.type.right)):
        raise types.TypeMismatch(g, f, "Function composition")
    combinator = geach_combinator(g.type, f.type)
    result = (combinator(g)(f)).reduce_all()
    return result
コード例 #8
0
from lamb import meta, types, lang
from lamb.meta import te, tp

# combinators for predicate modification
pm_combinator = te("L f_<e,t> : L g_<e,t> : L x_e : f(x) & g(x)")
pm_generalized_combinator = te("L f_<X,t> : L g_<X,t> : L x_X : f(x) & g(x)")

# the Geach combinator -- for the g rule, and for function composition
geach_combinator = te("L g_<Y,Z> : L f_<X,Y> : L x_X : g(f(x))")

# for the sake of posterity, here is a way of constructing the Geach combinator
# without polymorphism:


def build_geach_combinator(gtype, ftype):
    body = meta.term("g", gtype)(meta.term("f",
                                           ftype)(meta.term("x", ftype.left)))
    combinator = meta.LFun(gtype,
                           meta.LFun(ftype,
                                     meta.LFun(ftype.left, body, varname="x"),
                                     varname="f"),
                           varname="g")
    return combinator


def function_composition_nopoly(g, f):
    if (not (g.type.functional() and f.type.functional()
             and g.type.left == f.type.right)):
        raise types.TypeMismatch(g, f, "Function composition")
    combinator = geach_combinator(g.type, f.type)
    result = (combinator(g)(f)).reduce_all()