def test_sort_args(elements, sort_args, expected_result): """Test the sorting function used by the schema.""" sort = SortArgs() sort.keys = sort_args['keys'] sort.reverse = sort_args['reverse'] args = {'sort': sort} if isclass(expected_result): with pytest.raises(expected_result): sort_elements(elements, args) else: sort_elements(elements, args) assert elements == expected_result
async def get_nodes_all(self, node_type, args): """Return nodes from all workflows, filter by args.""" return sort_elements([ n for flow in await self.get_workflows_data(args) for n in flow.get(node_type).values() if node_filter(n, node_type, args) ], args)
async def get_edges_all(self, args): """Return edges from all workflows, filter by args.""" return sort_elements( [e for flow in await self.get_workflows_data(args) for e in flow.get(EDGES).values()], args)
async def get_nodes_by_ids(self, node_type, args): """Return protobuf node objects for given id.""" nat_ids = set(args.get('native_ids', [])) # Both cases just as common so 'if' not 'try' if 'sub_id' in args and args['delta_store']: flow_data = [ delta[args['delta_type']] for delta in get_flow_data_from_ids( self.delta_store[args['sub_id']], nat_ids) ] else: flow_data = get_flow_data_from_ids( self.data_store_mgr.data, nat_ids) if node_type == PROXY_NODES: node_types = [TASK_PROXIES, FAMILY_PROXIES] else: node_types = [node_type] return sort_elements( [node for flow in flow_data for node_type in node_types for node in get_data_elements(flow, nat_ids, node_type) if node_filter(node, node_type, args)], args)
async def get_nodes_edges(self, root_nodes, args): """Return nodes and edges within a specified distance of root nodes.""" # Initial root node selection. nodes = root_nodes node_ids = set(n.id for n in root_nodes) edges = [] edge_ids = set() # Setup for edgewise search. new_nodes = root_nodes for _ in range(args['distance']): # Gather edges. # Edges should be unique (graph not circular), # but duplicates will be present as node holds all associated. new_edge_ids = set( e_id for n in new_nodes for e_id in n.edges).difference(edge_ids) edge_ids.update(new_edge_ids) new_edges = [ edge for flow in get_flow_data_from_ids( self.data_store_mgr.data, new_edge_ids) for edge in get_data_elements(flow, new_edge_ids, EDGES) ] edges += new_edges # Gather nodes. # One of source or target will be in current set of nodes. new_node_ids = set( [e.source for e in new_edges] + [e.target for e in new_edges]).difference(node_ids) # Stop searching on no new nodes if not new_node_ids: break node_ids.update(new_node_ids) new_nodes = [ node for flow in get_flow_data_from_ids( self.data_store_mgr.data, new_node_ids) for node in get_data_elements(flow, new_node_ids, TASK_PROXIES) ] nodes += new_nodes return NodesEdges( nodes=sort_elements(nodes, args), edges=sort_elements(edges, args))
async def get_edges_by_ids(self, args): """Return protobuf edge objects for given id.""" nat_ids = set(args.get('native_ids', [])) if 'sub_id' in args and args['delta_store']: flow_data = [ delta[args['delta_type']] for delta in get_flow_data_from_ids( self.delta_store[args['sub_id']], nat_ids) ] else: flow_data = get_flow_data_from_ids(self.data_store_mgr.data, nat_ids) return sort_elements([ edge for flow in flow_data for edge in get_data_elements(flow, nat_ids, EDGES) ], args)
async def get_workflows(self, args): """Return workflow elements.""" return sort_elements( [flow[WORKFLOW] for flow in await self.get_workflows_data(args)], args)