def test_multiple_args(self): double = fn(_ + _) assert double(2) == 4 poly = fn(_ ** 2 + 2 * _ + 1) assert poly(0) == 1 assert poly(1) == 4
def test_nested_algebraic_expresions(self): f = fn(_.real + _.imag) assert f(42) == 42 assert f(21 + 21j) == 42 f = fn(_.real / (_.real + _.imag)) assert f(42) == 1.0 assert f(21 + 21j) == 0.5 f = fn(_.real / (_.real * _.real)) assert f(2) == 0.5 assert f(2 + 2j) == 0.5
def lexer_from_grammar(grammar: str, functions, token_names=None) -> Lexer: """ Create lexer from an incomplete Lark grammar. """ if token_names is None: token_names = get_tokens(grammar) token_names = " | ".join(token_names) full_grammar = "start : tk*\ntk : {}\n\n{}".format(token_names, grammar) def lex(src): return lex_function(src) callbacks = {name: token_callback(fn) for name, fn in functions.items()} try: lark = Lark(full_grammar, parser="lalr", lexer_callbacks=callbacks) except UnexpectedToken as exc: print("Error creating grammar:") print(full_grammar) print() raise ValueError(f"invalid token declarations: {exc}") lex_function = lark.lex lex.grammar = grammar lex.lexer_callbacks = callbacks return sk.fn(lex)
def make_lexer(rules, which='auto'): """ A lexer factory. This function expects to receive a list of (tok_type, regex) strings and returns a function that tokenizes a input string into a sequence of tokens. Args: rules: A list of rules. which ('auto', 'ply' or 'simple'): lexer factory type. """ if which == 'auto': # The default is the ply lexer, unless PLY is not installed. try: _import('ply') which = 'ply' except: which = 'simple' if which == 'ply': lexer = ply_lexer(rules) elif which == 'simple': lexer = simple_lexer(rules) else: raise ValueError('invalid lexer: %r' % which) return fn(wraps(lexer)(lambda expr: list(lexer(expr))))
def test_with_math_operators(self): print(dir(_)) inc = fn(_ + 1) assert inc(1) == 2 assert inc(2) == 3 half = fn(_ / 2) assert half(2) == 1.0 assert half(4) == 2.0 inv = fn(1 / _) assert inv(2) == 0.5 assert inv(0.5) == 2.0 expr = fn(+(2 * _) + 1) assert expr(0) == 1.0 assert expr(1) == 3.0
def test_fn_preserves_function_attributes(self): def foo(x): return x foo.attr = 'foo' g = fn(foo) assert g.__name__ == 'foo' assert g.attr == 'foo'
def test_function_application(self): f = fn(F(abs, _)) assert f(-1) == 1
def test_method_call(self): bit = fn(_.bit_length()) assert bit(2) == 2 assert bit(42) == 6
def test_attr_access(self): imag = fn(_.imag) assert imag(1) == 0 assert imag(1j) == 1
def fn_double(self, double): return fn(double)
from sidekick import fn from lazyutils import lazy import importlib fn_property = lambda x: property(fn(x)._) fn_lazy = lambda x: lazy(fn(x)._) class LazyModule: """ A lazy module object. """ def __init__(self, name): self.__path = name self.__mod = None def __load(self): self.__mod = importlib.import_module(self.__path) def __getattr__(self, item): if self.__mod is None: self.__load() value = getattr(self.__mod, item) setattr(self, item, value) return value def lazy_module(mod): """ Load a lazy module.
def test_fn_accepts_attribute_assignment(self, g): g = fn(g) g.foo = 'foo' assert g.foo == 'foo'
def test_fn_partial_function_application(self, g): g = fn(g) assert g[1](2, 3) == g(1, 2, 3) == (1, 2, 3) assert g[1, 2](3) == (1, 2, 3) assert g[1, 2, 3]() == (1, 2, 3)
def fn_inc(self): return fn(lambda x: x + 1)
def test_nested_attribute_access(self): x = record(foo=record(bar=42)) assert fn(_.foo.bar == 42)(x) is True assert fn(_.foo.bar == 40)(x) is False assert fn(_.foo.bar.bit_length())(x) == 6
from lazyutils import lazy from sidekick import fn fn_property = lambda x: property(fn(x)._) # noqa: E731 fn_lazy = lambda x: lazy(fn(x)._) # noqa: E731
def make_parser(rules, tokens, start=None): """ Alias to ply_parser. """ return fn(ply_parser(rules, tokens, start=start))