def flowing(where, to, parallel_converges): """ mark target's stream from target :param where: :param to: :param parallel_converges: :return: """ is_parallel = where[PE.type] in PARALLEL_GATEWAYS stream = None if is_parallel: # add parallel's stream to its converge parallel_converge = to[where[PE.converge_gateway_id]] blend(source=where, target=parallel_converge, custom_stream=stream) if len(parallel_converge[STREAM]) > 1: raise exceptions.StreamValidateError(node_id=parallel_converge) # flow to target for i, target_id in enumerate(where[PE.target]): target = to[target_id] fake = False # generate different stream if is_parallel: stream = '%s_%s' % (where[PE.id], i) if target_id in parallel_converges: is_valid_branch = where[STREAM].issubset(parallel_converges[target_id][P_STREAM]) is_direct_connect = where.get(PE.converge_gateway_id) == target_id if is_valid_branch or is_direct_connect: # do not flow when branch of parallel converge to its converge gateway fake = True if not fake: blend(source=where, target=target, custom_stream=stream) # sanity check if len(target[STREAM]) != 1: raise exceptions.StreamValidateError(node_id=target_id)
def validate_stream(tree): """ validate flow stream :param tree: pipeline tree :return: """ # data preparation start_event_id = tree[PE.start_event][PE.id] end_event_id = tree[PE.end_event][PE.id] nodes = get_nodes_dict(tree) nodes[start_event_id][STREAM] = {MAIN_STREAM} nodes[end_event_id][STREAM] = {MAIN_STREAM} parallel_converges = {} visited = set({}) for nid, node in nodes.items(): node.setdefault(STREAM, set()) # set allow streams for parallel's converge if node[PE.type] in PARALLEL_GATEWAYS: parallel_converges[node[PE.converge_gateway_id]] = { P_STREAM: streams_for_parallel(node), P: nid } # build stream from start node_queue = Queue.Queue() node_queue.put(nodes[start_event_id]) while not node_queue.empty(): # get node node = node_queue.get() if node[PE.id] in visited: # flow again to validate stream, but do not add target to queue flowing(where=node, to=nodes, parallel_converges=parallel_converges) continue # add to queue for target_id in node[PE.target]: node_queue.put(nodes[target_id]) # mark as visited visited.add(node[PE.id]) # flow flowing(where=node, to=nodes, parallel_converges=parallel_converges) # data clean for nid, n in nodes.items(): if len(n[STREAM]) != 1: raise exceptions.StreamValidateError(node_id=nid) # replace set to str n[STREAM] = n[STREAM].pop() # isolate node check for __, node in nodes.items(): if not node[STREAM]: raise exceptions.IsolateNodeError() return nodes