def test_normalize_function_dataclass_field_no_repr(): A = dataclasses.make_dataclass( "A", [("param", float, dataclasses.field(repr=False))], namespace={"__dask_tokenize__": lambda self: self.param}, ) a1, a2 = A(1), A(2) assert normalize_function(a1) == normalize_function(a1) assert normalize_function(a1) != normalize_function(a2)
def test_normalize_function(): def f1(a, b, c=1): pass cf1 = curry(f1) def f2(a, b=1, c=2): pass def f3(a): pass assert normalize_function(f2) == str(f2) f = lambda a: a assert normalize_function(f) == str(f) comp = compose(partial(f2, b=2), f3) assert normalize_function(comp) == ((str(f2), (), (('b', 2),)), str(f3)) assert normalize_function(cf1) == (str(f1), (), ()) assert normalize_function(cf1(2, c=2)) == (str(f1), (2,), (('c', 2),)) assert normalize_token(cf1) == normalize_function(cf1)
def test_normalize_function_limited_size(): for i in range(1000): normalize_function(lambda x: x) assert 50 < len(function_cache) < 600
def test_normalize_function(): def f1(a, b, c=1): pass def f2(a, b=1, c=2): pass def f3(a): pass assert normalize_function(f2) assert normalize_function(lambda a: a) assert (normalize_function(tz.partial(f2, b=2)) == normalize_function(tz.partial(f2, b=2))) assert (normalize_function(tz.partial(f2, b=2)) != normalize_function(tz.partial(f2, b=3))) assert (normalize_function(tz.partial(f1, b=2)) != normalize_function(tz.partial(f2, b=2))) assert (normalize_function(tz.compose(f2, f3)) == normalize_function(tz.compose(f2, f3))) assert (normalize_function(tz.compose(f2, f3)) != normalize_function(tz.compose(f2, f1))) assert normalize_function(tz.curry(f2)) == normalize_function(tz.curry(f2)) assert normalize_function(tz.curry(f2)) != normalize_function(tz.curry(f1)) assert (normalize_function(tz.curry(f2, b=1)) == normalize_function(tz.curry(f2, b=1))) assert (normalize_function(tz.curry(f2, b=1)) != normalize_function(tz.curry(f2, b=2)))
def test_normalize_function(): assert normalize_function(f2) assert normalize_function(lambda a: a) assert normalize_function(tz.partial(f2, b=2)) == normalize_function( tz.partial(f2, b=2)) assert normalize_function(tz.partial(f2, b=2)) != normalize_function( tz.partial(f2, b=3)) assert normalize_function(tz.partial(f1, b=2)) != normalize_function( tz.partial(f2, b=2)) assert normalize_function(tz.compose(f2, f3)) == normalize_function( tz.compose(f2, f3)) assert normalize_function(tz.compose(f2, f3)) != normalize_function( tz.compose(f2, f1)) assert normalize_function(tz.curry(f2)) == normalize_function(tz.curry(f2)) assert normalize_function(tz.curry(f2)) != normalize_function(tz.curry(f1)) assert normalize_function(tz.curry(f2, b=1)) == normalize_function( tz.curry(f2, b=1)) assert normalize_function(tz.curry(f2, b=1)) != normalize_function( tz.curry(f2, b=2))
def test_normalize_function(): assert normalize_function(f2) assert normalize_function(lambda a: a) assert (normalize_function(tz.partial(f2, b=2)) == normalize_function(tz.partial(f2, b=2))) assert (normalize_function(tz.partial(f2, b=2)) != normalize_function(tz.partial(f2, b=3))) assert (normalize_function(tz.partial(f1, b=2)) != normalize_function(tz.partial(f2, b=2))) assert (normalize_function(tz.compose(f2, f3)) == normalize_function(tz.compose(f2, f3))) assert (normalize_function(tz.compose(f2, f3)) != normalize_function(tz.compose(f2, f1))) assert normalize_function(tz.curry(f2)) == normalize_function(tz.curry(f2)) assert normalize_function(tz.curry(f2)) != normalize_function(tz.curry(f1)) assert (normalize_function(tz.curry(f2, b=1)) == normalize_function(tz.curry(f2, b=1))) assert (normalize_function(tz.curry(f2, b=1)) != normalize_function(tz.curry(f2, b=2)))
def __dask_tokenize__(self): return (normalize_function(self._func), self.nin, self.nout)