def v(*elements): """ Create a new persistent vector containing all parameters to this function. >>> v1 = v(1, 2, 3) >>> v1 pvector([1, 2, 3]) """ return pvector(elements)
Create a new persistent vector containing the elements in iterable. >>> v1 = pvector([1, 2, 3]) >>> v1 pvector([1, 2, 3]) """ return _EMPTY_PVECTOR.extend(iterable) try: # Use the C extension as underlying trie implementation if it is available import os if os.environ.get('PYRSISTENT_NO_C_EXTENSION'): pvector = python_pvector else: from pvectorc import pvector PVector.register(type(pvector())) except ImportError: pvector = python_pvector def v(*elements): """ Create a new persistent vector containing all parameters to this function. >>> v1 = v(1, 2, 3) >>> v1 pvector([1, 2, 3]) """ return pvector(elements)
""" Create a new persistent vector containing the elements in iterable. >>> v1 = pvector([1, 2, 3]) >>> v1 pvector([1, 2, 3]) """ return _EMPTY_PVECTOR.extend(iterable) try: # Use the C extension as underlying trie implementation if it is available import os if os.environ.get('PYRSISTENT_NO_C_EXTENSION'): pvector = python_pvector else: from pvectorc import pvector PVector.register(type(pvector())) except ImportError: pvector = python_pvector def v(*elements): """ Create a new persistent vector containing all parameters to this function. >>> v1 = v(1, 2, 3) >>> v1 pvector([1, 2, 3]) """ return pvector(elements)
def discovering_func(search_nodes: List[Union[PGMStartSearchNode, PGMSearchNode]], args: PGMBeamSearchArgs) -> List[PGMSearchNode]: global _logger next_nodes: List[PGMSearchNode] = [] merged_plans = [] if isinstance(search_nodes[0], PGMStartSearchNode): # can only have one starter node search_node = search_nodes[0] G_explorers: Dict[bytes, GraphExplorer] = {} G_terminals: Dict[bytes, Graph] = {} G_scored: Dict[bytes, float] = {} # create graph & graph explorer for each terminals for terminal in search_node.remained_terminals: g: Graph = Graph(index_node_type=True, index_node_label=True) g.add_new_node(GraphNodeType.DATA_NODE, terminal) G_terminals[terminal] = g G_scored[terminal] = 1 G_explorers[terminal] = args.graph_explorer_builder.build(g) search_node.G_terminals = pmap(G_terminals) search_node.G_scored = pmap(G_scored) search_node.G_explorers = pmap(G_explorers) search_node.remained_terminals = pvector(search_node.remained_terminals) # final all possible merged points between every terminal pairs & release it as terminal nodes # TOO EXPENSIVE # for T_i, T_j in ( # tuple(c) # for c in unique_values(frozenset(c) if c[0] != c[1] else c for c in combinations(search_node.remained_terminals, 2))): # G_ti, G_tj = G_terminals[T_i], G_terminals[T_j] for T_i in args.top_attributes: for T_j in search_node.remained_terminals: if T_i == T_j: continue G_ti, G_tj = G_terminals[T_i], G_terminals[T_j] merged_plans += [(T_i, T_j, plan, search_node, MergeGraph.create(G_ti, G_tj, plan.int_tree, plan.int_a, plan.int_b)) for plan in py_make_plan4case1(G_ti, G_tj, G_explorers[T_i], G_explorers[T_j])] # doing filter to speed up, will remove all merge graph that have more than 3 nodes (because the good result is usually # two data nodes connect to one single class node) merged_plans = [x for x in merged_plans if x[-1].get_n_nodes() == 3] else: for search_node in search_nodes: T_i = search_node.working_terminal G_ti_explorer = search_node.G_explorers[T_i] G_ti = search_node.G_terminals[T_i] for T_j in unique_values(search_node.remained_terminals): G_tj = search_node.G_terminals[T_j] merged_plans += [(T_i, T_j, plan, search_node, MergeGraph.create(G_ti, G_tj, plan.int_tree, plan.int_a, plan.int_b)) for plan in make_merge_plans(G_ti, G_tj, G_ti_explorer, search_node.G_explorers[T_j])] if args.pre_filter_func is not None: n_next_states = len(merged_plans) filtered_merged_plans = [] for merged_plan in merged_plans: if args.pre_filter_func(merged_plan[-1]): filtered_merged_plans.append(merged_plan) merged_plans = filtered_merged_plans _logger.debug("(%s) #possible next states: %s (filtered down to: %s)", args.source_id, n_next_states, len(merged_plans)) else: _logger.debug("(%s) #possible next states: %s", args.source_id, len(merged_plans)) merged_graphs = [x[-1] for x in merged_plans] merged_probs = args.predict_graph_prob_func(merged_graphs) best_plans = sorted( zip(merged_plans, merged_graphs, merged_probs), key=lambda x: x[-1], reverse=True)[:args.beam_width] need_remove_T_i: bool = isinstance(search_nodes[0], PGMStartSearchNode) for merged_plan, merged_graph, score, in best_plans: T_i, T_j, __, search_node, __ = merged_plan working_terminal = b'%b---%b' % (T_i, T_j) remained_terminals = search_node.remained_terminals.remove(T_j) if need_remove_T_i: remained_terminals = remained_terminals.remove(T_i) g: Graph = merged_graph.proceed_merging() current_G_explorers = search_node.G_explorers.set(working_terminal, args.graph_explorer_builder.build(g)) current_G_terminals = search_node.G_terminals.set(working_terminal, g) current_G_scored = search_node.G_scored.set(working_terminal, score) next_nodes.append( PGMSearchNode(args.get_and_increment_id(), args, working_terminal, remained_terminals, current_G_explorers, current_G_terminals, current_G_scored)) return next_nodes