def test_igraph_to_reference_graph(self): graph = references_graph_to_igraph(references_graph) nodes_by_path = { node["path"]: node for node in references_graph } result = igraph_to_reference_graph(graph) self.assertEqual( len(result), len(references_graph) ) pick_preserved_keys = pick_keys([ "path", *reference_graph_node_keys_to_keep ]) for node in result: original_node = nodes_by_path[node["path"]] self.assertDictEqual( pick_preserved_keys(original_node), pick_preserved_keys(node) ) revove_self_ref = tlz.remove(lambda a: a == node["path"]) self.assertListEqual( sorted(node["references"]), sorted(revove_self_ref(original_node["references"])) )
def remove_paths(paths, graph): # Allow passing a single path. if isinstance(paths, str): paths = [paths] indices_to_remove = tlz.compose( list, tlz.map(lambda v: v.index), tlz.remove(is_None), tlz.map(find_vertex_by_name_or_none(graph)))(paths) return graph - indices_to_remove if len(indices_to_remove) > 0 else graph
def get_children_of(graph, vertex_names): return unnest_iterable(map( graph.successors, tlz.remove( is_None, map( find_vertex_by_name_or_none(graph), vertex_names ) ) ))
def flatten_references_graph(references_graph, pipeline, exclude_paths=None): if exclude_paths is not None: exclude_paths = frozenset(exclude_paths) references_graph = tlz.compose( tlz.map( over("references", lambda xs: frozenset(xs).difference(exclude_paths))), tlz.remove(lambda node: node["path"] in exclude_paths))( references_graph) igraph_graph = references_graph_to_igraph(references_graph) return create_list_of_lists_of_strings(pipe(pipeline, igraph_graph))
def subcomponent(mode, paths, graph): path_indices = tlz.compose( tlz.map(attrgetter('index')), tlz.remove(is_None), tlz.map(find_vertex_by_name_or_none(graph)))(paths) debug("path_indices", path_indices) main_indices = list(subcomponent_multi(graph, path_indices, mode)) debug('main_indices', main_indices) return { "main": graph.induced_subgraph(main_indices), "rest": graph - main_indices }
def read_url_list(yaml_path=TOC_PATH) -> list: """ Generates flat list of section HTML names from the table of contents. List looks like: [ '', 'about_this_book.html', 'ch/01/lifecycle_intro.html', ... ] """ with open(yaml_path) as f: data = yaml.load(f) return t.pipe( data, t.remove(_not_internal_link), flatmap(_flatten_sections), t.map(t.get('url')), list, )
def generate_url_map(yaml_path=TOC_PATH) -> dict: """ Generates mapping from each URL to its previous and next URLs in the textbook. The dictionary looks like: { 'ch/10/some_page.html' : { 'prev': 'ch/09/foo.html', 'next': 'ch/10/bar.html', }, ... } """ with open(yaml_path) as f: data = yaml.load(f) pipeline = [ t.remove(_not_internal_link), flatmap(_flatten_sections), t.map(t.get('url')), list, _sliding_three, t.map(_adj_pages), t.merge() ] return t.pipe(data, *pipeline)
def make_lookup(closures): return { # remove self reference node["path"]: over("references", tlz.remove(eq(node["path"])), node) for node in closures }
def test_toolz_remove(executor): actual = executor(remove(lambda x: x % 2 == 1), range(10), npartitions=3) assert list(actual) == [0, 2, 4, 6, 8]
chained(collections.Counter, dict), [1, 2, 2, 3, 3, 3, 4, 4, 4], ), "collections.Counter.elements": ( chained(collections.Counter, op.methodcaller("elements"), sorted), [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], ), "collections.Counter.most_common": ( chained(collections.Counter, op.methodcaller("most_common", 2), sorted), [1, 2, 2, 3, 3, 3, 4, 4, 4, 4], ), "set": (chained(set, sorted), [1, 2, 2, 3, 3, 3, 4, 4, 4, 4]), "map": (chained(curried.map(lambda x: 2 * x), list), [1, 2, 3, 4, 5, 6]), "filter": (chained(curried.filter(lambda x: x % 2 == 0), list), range(10)), "remove": (chained(curried.remove(lambda x: x % 2 == 0), list), range(10)), # example from toolz docs "pluck-single": ( chained(curried.pluck("name"), list), [{ "id": 1, "name": "Cheese" }, { "id": 2, "name": "Pies" }], ), # example from toolz docs "pluck-mulitple": (chained(curried.pluck([0, 1]), list), [[1, 2, 3], [4, 5, 7]]), # example from toolz docs