def test_find_index(): seq = ({"key": 1}, {"key": 2}, {"key": 3}, {"key": 2}) assert (functional_generic.find_index( functional_generic.compose_left( dict_utils.itemgetter("key"), functional.equals(2), ), )(iter(seq)) == 1) assert (functional_generic.find_index( functional_generic.compose_left( dict_utils.itemgetter("key"), functional.equals(4), ), )(iter(seq)) == -1)
def _do_on_positions(f, predicate: Callable[[int], bool]): return functional_generic.compose_left( enumerate, functional_generic.curried_map( sync.ternary( functional_generic.compose_left( functional.head, predicate, ), functional_generic.compose_left(functional.second, f), functional.second, ), ), )
def groupby_many(f: Callable, it: Iterable) -> Dict[Text, Any]: """Return a mapping `{y: {x s.t. y in f(x)}}, where x in it. ` Parameters: Key function (gets an object in collection and outputs tuple of keys). A Collection. Returns a dictionary where key has been computed by the `f` key function. >>> names = ['alice', 'bob', 'charlie', 'dan', 'edith', 'frank'] >>> groupby_many(lambda name: (name[0], name[-1]), names) {'a': frozenset({'alice'}), 'e': frozenset({'alice', 'charlie', 'edith'}), 'b': frozenset({'bob'}), 'c': frozenset({'charlie'}), 'd': frozenset({'dan'}), 'n': frozenset({'dan'}), 'h': frozenset({'edith'}), 'f': frozenset({'frank'}), 'k': frozenset({'frank'})}""" return functional_generic.pipe( it, functional_generic.mapcat( functional_generic.compose_left( lambda element: (f(element), [element]), sync.star(itertools.product), ), ), edges_to_graph, )
async def atraverse_graph_by_radius( source: Any, aget_neighbors: Callable[[Any], AsyncGenerator], radius: int, key: Callable = functional.identity, ) -> AsyncGenerator[Any, None]: """Gets a graph and a function to get a node's neighbours, BFS over it from a single source node, returns an AsyncGenerator of unique nodes. Does not traverse farther from given `radius` >>> g = {'1': ['2', '3'], '2': ['3'], '3': ['4'], '4': []} >>> async def get_item(x): >>> for a in g.get(x): >>> yield a >>> async def to_list(ag): >>> return [i async for i in ag] >>> gamla.run_sync(to_list(gamla.atraverse_graph_by_radius('1', get_item, 1))) ['1', '2', '3']""" async def get_neighbors_limiting_radius( current_and_distance: Tuple[Text, int] ) -> AsyncGenerator[Tuple[Any, int], None]: current, distance = current_and_distance if distance < radius: async for neighbor in aget_neighbors(current): yield neighbor, distance + 1 async for s in agraph_traverse( source=(source, 0), aget_neighbors=get_neighbors_limiting_radius, key=functional_generic.compose_left(functional.head, key), ): yield functional.head(s)
def explode(*positions: Collection[int]): """Flattens a non homogeneous iterable. For an iterable where some positions are iterable and some are not, "explodes" the iterable, so that each element appears in a single row, and duplicates the non iterable. >>> functional_generic.pipe( [ "x", [ "y1", "y2", "y3", ], "z", ], data.explode(1), tuple, ) ( ("x", "y1", "z"), ("x", "y2", "z"), ("x", "y3", "z"), ) """ return functional_generic.compose_left( _do_on_positions( functional.wrap_tuple, functional_generic.complement(functional.contains(positions)), ), sync.star(itertools.product), )
async def _async_graph_traverse_many_inner( seen: Set[_Node], get_neighbors: Callable[[_Node], Awaitable[Iterable[_Node]]], process_node: Callable[[_Node], None], roots: Iterable[_Node], ): assert set(roots).isdisjoint(seen) functional_generic.pipe( roots, functional_generic.juxt( seen.update, functional_generic.compose_left( functional_generic.curried_map(process_node), tuple, ), ), ) await functional_generic.pipe( roots, functional_generic.mapcat(get_neighbors), functional_generic.remove(functional.contains(seen)), frozenset, functional_generic.unless( functional.empty, _async_graph_traverse_many_inner(seen, get_neighbors, process_node), ), )
def groupby_many(keys: Callable[[Any], Iterable], reducer: Reducer, initial): """Given a `keys` function, that maps an element into multiple keys, transduces the collection into a dictionary of key to group of matching elements. >>> transducer.transduce( transducer.groupby_many( lambda x: ("even",) if x % 2 == 0 else ("odd",), lambda s, x: (*s, x), (), ), lambda s, _: s, {}, [1, 2, 3, 4, 5], ) {"even": (2, 4), "odd": (1, 3, 5)} """ return functional_generic.compose( mapcat( functional_generic.compose_left( functional_generic.juxt(keys, functional.wrap_tuple), sync.star(itertools.product), ), ), lambda step: lambda s, x: step( toolz.assoc(s, x[0], reducer(s.get(x[0], initial), x[1])), x, ), )
def groupby(key: Callable[[Any], Any], reducer: Reducer, initial): """Like `groupby_many`, just with a key function that returns a single element.""" return groupby_many( functional_generic.compose_left(key, functional.wrap_tuple), reducer, initial, )
def test_side_effect(): side_result = [] side_effect = functional_generic.compose_left( functional.multiply(2), side_result.append, ) assert functional_generic.side_effect(side_effect)(2) == 2 assert side_result == [4]
async def test_side_effect_async(): side_result = [] side_effect = functional_generic.compose_left( currying.curry(_equals)(2), side_result.append, ) assert await functional_generic.side_effect(side_effect)(2) == 2 assert side_result == [True]
def test_async_compositions_have_name(): async def async_identity(x): asyncio.sleep(1) return x assert (functional_generic.compose_left( functional.identity, async_identity, functional.unique, ).__name__ == "unique_OF_async_identity_OF_identity")
async def test_async_graph_traverse_many(): graph = {1: (1, 2, 3, 5), 2: (4, ), 3: (1, 2)} res = [] await graph_async.async_graph_traverse_many( functional_generic.compose_left( dict_utils.dict_to_getter_with_default((), graph), async_functions.to_awaitable, ), res.append, [1], ) assert sorted(res) == [1, 2, 3, 4, 5]
def graph_traverse_many( sources: Any, get_neighbors: Callable, key: Callable = functional.identity, ) -> Iterable: """Gets a graph and a function to get a node's neighbours, BFS over it starting from multiple sources, return an iterator of unique nodes. >>> g = {'1': ['2', '3'], '2': ['3'], '3': ['4'], '4': []} >>> list(graph_traverse_many(['1', '3'], g.__getitem__)) ['3', '1', '4', '2'] Note: `get_neighbors` must return elements without duplicates.""" seen_set: Set = set() remember = functional_generic.compose_left(key, seen_set.add) should_traverse = functional_generic.compose_left( key, functional_generic.complement(functional.contains(seen_set)), ) yield from general_graph_traverse_many( sources, get_neighbors, remember, should_traverse, )
def _get_children(element): return functional_generic.case_dict( { _is_terminal: functional.just(()), functional.is_instance(tuple): functional.identity, functional.is_instance(list): functional.identity, functional.is_instance(dict): functional_generic.compose_left( dict.items, functional.curried_map_sync(sync.star(KeyValue)), ), functional.is_instance(KeyValue): functional_generic.compose_left( lambda x: x.value, sync.ternary( _is_terminal, functional.wrap_tuple, _get_children, ), ), }, )(element)
def get_leaves_by_ancestor_predicate(predicate: Callable): """Gets a predicate, and builds a function that gets a dictionary, potentially nested and returns an iterable of leaf values. The values returned are of leafs where some ancestor (possibly indirect) passes the predicate. >>> gamla.pipe({"x": {"y": (1, 2, 3)}}, gamla.get_leaves_by_ancestor_predicate(gamla.equals("x")), tuple) (1, 2, 3) >>> gamla.pipe({"x": {"y": (1, 2, 3)}}, gamla.get_leaves_by_ancestor_predicate(gamla.equals("z")), tuple) () Useful for retrieving values from large json objects, where the exact path is unimportant. """ return functional_generic.compose_left( tree_reduce(_get_children, _get_anywhere_reducer(predicate)), _get_matched, )
def make_index( steps: Iterable[Callable[[Iterable], Dict]], ) -> Callable[[Iterable], Any]: """Builds an index with arbitrary amount of steps from an iterable. >>> index = dict_utils.make_index(map(gamla.groupby, [gamla.head, gamla.second]))(["uri", "dani"]) >>> index("d")("a") frozenset(["dani"]) """ steps = tuple(steps) if not steps: return frozenset return functional_generic.compose_left( functional.head(steps), functional_generic.valmap(make_index(steps[1:])), lambda d: lambda x: d.get( x, _return_after_n_calls(len(steps) - 1, frozenset()), ), )
def tuple_of_tuples_to_csv( tuple_of_tuples: Tuple[Tuple[Any], ...], separator: Text = "\t", ) -> Text: """Return a CSV formatted string given a tuple of tuples. Each element is separated by the character "separator" (default is \t). >>> tuple_of_tuples_to_csv((("name", "age"), ("David", "23"), ("Itay", "26"))) 'name\\tage\\nDavid\\t23\\nItay\\t26' >>> tuple_of_tuples_to_csv((("name", "age"), ("David", "23"), ("Itay", "26")), " ") 'name age\\nDavid 23\\nItay 26' """ return functional_generic.pipe( tuple_of_tuples, functional_generic.curried_map( functional_generic.compose_left( functional_generic.curried_map(str), tuple, separator.join, ), ), "\n".join, )
}, )(element) _MATCHED = "matched" _UNMATCHED = "unmatched" _get_matched = dict_utils.itemgetter(_MATCHED) _get_unmatched = dict_utils.itemgetter(_UNMATCHED) def _make_matched_unmatched(matched, unmatched): return {_MATCHED: matched, _UNMATCHED: unmatched} _merge_children_as_matched = functional_generic.compose_left( sync.mapcat(sync.juxtcat(_get_matched, _get_unmatched)), tuple, functional_generic.pair_right(functional.just(())), sync.star(_make_matched_unmatched), ) _merge_children = sync.compose_left( functional_generic.bifurcate( sync.compose_left(sync.mapcat(_get_matched), tuple), sync.compose_left( sync.mapcat(_get_unmatched), tuple, ), ), sync.star(_make_matched_unmatched), )
def test_compositions_have_name(): assert (functional_generic.compose_left( functional.identity, functional.identity, functional.unique, ).__name__ == "unique_OF_identity_OF_identity")
#: {1: frozenset({2}), 2: frozenset({3}), 3: frozenset({1, 2})} edges_to_graph = functional_generic.compose( functional_generic.valmap( functional_generic.compose( frozenset, functional_generic.curried_map(functional.second), ), ), sync.groupby(functional.head), ) #: Gets a graph and returns an iterator of all edges in it. #: #: >>> list(graph_to_edges({'1': ['2', '3'], '2': ['3'], '3': ['4'], '4': []})) #: [('1', '2'), ('1', '3'), ('2', '3'), ('3', '4')] graph_to_edges = functional_generic.compose_left( sync.keymap(functional.wrap_tuple), dict.items, sync.mapcat(sync.star(itertools.product)), ) #: Gets a graph and returns the graph with its edges reversed #: #: >>> reverse_graph({'1': ['2', '3'], '2': ['3'], '3': ['4'], '4': []}) #: {'2': frozenset({'1'}), '3': frozenset({'1', '2'}), '4': frozenset({'3'})} reverse_graph = functional_generic.compose_left( graph_to_edges, functional_generic.curried_map( functional_generic.compose_left(reversed, tuple)), edges_to_graph, ) #: Gets a sequence of nodes (cliques) and returns the bidirectional graph they represent
def test_try_and_excepts_with_exception(): assert (excepts_decorator.try_and_excepts( SomeException, functional_generic.compose_left(functional.pack, functional.identity), functional.make_raise(SomeException), )(1) == (SomeException(), 1))
def _debug_generic(f): return functional_generic.compose_left( lambda *funcs: toolz.interleave([funcs, [debug] * len(funcs)]), sync.star(f), )
#: Combines transducers in a `dict` into a transducer that produces a `dict`. #: >>> transducer.transduce( #: transducer.apply_spec( # This will combine the inner stuff into one new transducer. #: { #: "incremented": _increment(_append_to_tuple), # This is a transducer. #: "sum": lambda s, x: x + s, # This is another transducer. #: }, #: ), #: lambda s, _: s, #: {"incremented": (), "sum": 0}, #: [1, 2, 3], #: ) #: {"incremented": (2, 3, 4), "sum": 6} apply_spec = functional_generic.compose_left( dict.items, sync.map(sync.star(_transform_by_key(toolz.assoc))), sync.star(functional_generic.compose), ) #: Combines transducers in a `tuple` into a transducer that produces a `tuple`. #: transducer.transduce( #: transducer.juxt( # This will combine the inner stuff into one new transducer. #: _increment(_append_to_tuple), # This is a transducer. #: lambda s, x: x + s, # This is another transducer. #: ), #: lambda s, _: s, #: [(), 0], #: [1, 2, 3], #: ) #: ((2, 3, 4), 6) juxt = functional_generic.compose_left(
INFO hello world It's me! 'It's me!' """ return functional_generic.side_effect(lambda x: logging.log(level, text.format(x))) def _is_generator(iterable): return hasattr(iterable, "__iter__") and not hasattr(iterable, "__len__") #: A util to inspect a pipline by opening a debug prompt. #: Note: #: - Materializes generators, as most of the time we are interested in looking into them, so can have unexpected results. #: - The current value can be referenced by `x` in the debug prompt. debug = functional_generic.compose_left( functional_generic.when(_is_generator, tuple), functional_generic.side_effect(lambda x: builtins.breakpoint()), ) debug_after = functional_generic.after(debug) debug_before = functional_generic.before(debug) def _debug_generic(f): return functional_generic.compose_left( lambda *funcs: toolz.interleave([funcs, [debug] * len(funcs)]), sync.star(f), ) #: Replace regular `compose` calls with this to get a breakpoint at each step. debug_compose = _debug_generic(functional_generic.compose)