def test_crashing_dynlib(self): """Like test_multiple_inputs_in_remote_nm, but C crashes""" g1 = [ {"oid": "A", "type": "plain", "storage": Categories.MEMORY}, {"oid": "B", "type": "plain", "storage": Categories.MEMORY}, ] g2 = [ { "oid": "C", "type": "app", "app": self.app, "lib": _libpath, "print_stats": print_stats, "bufsize": bufsize, "crash_and_burn": True, }, {"oid": "D", "type": "plain", "storage": Categories.MEMORY, "producers": ["C"]}, ] rels = [ DROPRel("A", DROPLinkType.INPUT, "C"), DROPRel("B", DROPLinkType.INPUT, "C"), ] a_data = os.urandom(32) self._test_runGraphInTwoNMs( g1, g2, rels, a_data, None, root_oids=("A", "B"), leaf_oid="D", expected_failures=("C", "D"), )
def test_removeUnmetRelationships(self): # Unmet relationsips are # DROPRel(D, CONSUMER, A) # DROPRel(D, STREAMING_CONSUMER, C) # DROPRel(Z, PRODUCER, A) # DROPRel(X, PRODUCER, A) graphDesc = [{'oid':'A', 'consumers':['B', 'D'], 'producers':['Z','X']}, {'oid':'B', 'outputs':['C']}, {'oid':'C', 'streamingConsumers':['D']}] unmetRelationships = graph_loader.removeUnmetRelationships(graphDesc) self.assertEqual(4, len(unmetRelationships)) self.assertIn(DROPRel('D', DROPLinkType.CONSUMER, 'A'), unmetRelationships) self.assertIn(DROPRel('D', DROPLinkType.STREAMING_CONSUMER, 'C'), unmetRelationships) self.assertIn(DROPRel('Z', DROPLinkType.PRODUCER, 'A'), unmetRelationships) self.assertIn(DROPRel('X', DROPLinkType.PRODUCER, 'A'), unmetRelationships) # The original dropSpecs have changed as well a = graphDesc[0] c = graphDesc[2] self.assertEqual(1, len(a['consumers'])) self.assertEqual('B', a['consumers'][0]) self.assertFalse('producers' in a) self.assertFalse('streamingConsumers' in c)
def test_multiple_inputs_in_remote_nm(self): """Like the above, but with this graph. In this case two inputs are located in a remote Node Manager. NM #1 NM #2 ======= =============== | A --|----|-| | | | | |-> C --> D | | B --|----|-| | ======= =============== """ g1 = [ {"oid": "A", "type": "plain", "storage": Categories.MEMORY}, {"oid": "B", "type": "plain", "storage": Categories.MEMORY}, ] g2 = [ { "oid": "C", "type": "app", "app": self.app, "lib": _libpath, "print_stats": print_stats, "bufsize": bufsize, }, {"oid": "D", "type": "plain", "storage": Categories.MEMORY, "producers": ["C"]}, ] rels = [ DROPRel("A", DROPLinkType.INPUT, "C"), DROPRel("B", DROPLinkType.INPUT, "C"), ] a_data = os.urandom(32) self._test_runGraphInTwoNMs( g1, g2, rels, a_data, a_data * 2, root_oids=("A", "B"), leaf_oid="D" )
def test_runGraphSeveralDropsPerDM(self): """ A test that creates several DROPs in two different DMs and runs the graph. The graph looks like this DM #1 DM #2 =================== ================ | A --> C --> D --|----|-| | | | | |--> E --> F | | B --------------|----|-| | =================== ================ :see: `self.test_runGraphSingleDOPerDOM` """ dm1, dm2 = [self._start_dm() for _ in range(2)] sessionId = 's1' g1 = [{"oid":"A", "type":"plain", "storage": "memory", "consumers":["C"]}, {"oid":"B", "type":"plain", "storage": "memory"}, {"oid":"C", "type":"app", "app":"dlg.apps.crc.CRCApp"}, {"oid":"D", "type":"plain", "storage": "memory", "producers": ["C"]}] g2 = [{"oid":"E", "type":"app", "app":"test.test_drop.SumupContainerChecksum"}, {"oid":"F", "type":"plain", "storage": "memory", "producers":["E"]}] rels = [DROPRel('D', DROPLinkType.INPUT, 'E'), DROPRel('B', DROPLinkType.INPUT, 'E')] quickDeploy(dm1, sessionId, g1, {nm_conninfo(1): rels}) quickDeploy(dm2, sessionId, g2, {nm_conninfo(0): rels}) self.assertEqual(4, len(dm1._sessions[sessionId].drops)) self.assertEqual(2, len(dm2._sessions[sessionId].drops)) # Run! The sole fact that this doesn't throw exceptions is already # a good proof that everything is working as expected a,b,c,d = [dm1._sessions[sessionId].drops[x] for x in ('A', 'B', 'C', 'D')] e,f = [dm2._sessions[sessionId].drops[x] for x in ('E', 'F')] with droputils.DROPWaiterCtx(self, f, 5): a.write(b'a') a.setCompleted() b.write(b'a') b.setCompleted() for drop in a,b,c,d,e,f: self.assertEqual(DROPStates.COMPLETED, drop.status, "DROP %s is not COMPLETED" % (drop.uid)) self.assertEqual(a.checksum, int(droputils.allDropContents(d))) self.assertEqual(b.checksum + d.checksum, int(droputils.allDropContents(f))) dm1.destroySession(sessionId) dm2.destroySession(sessionId)
def test_output_in_remote_nm(self): """ Like the above, but with this graph. In this case the output (instead of the input) is in a remote Node Manager. NM #1 NM #2 ============= ======= | A --> B --|----|-> C | ============= ======= """ g1 = [{ "oid": "A", "type": "plain", "storage": Categories.MEMORY, "consumers": ['B'] }, { "oid": "B", "type": "app", "app": "dfms.apps.pyfunc.PyFuncApp", "func_name": __name__ + '.func1' }] g2 = [{"oid": "C", "type": "plain", "storage": Categories.MEMORY}] rels = [DROPRel('B', DROPLinkType.PRODUCER, 'C')] a_data = os.urandom(32) c_data = self._test_runGraphInTwoNMs(g1, g2, rels, pickle.dumps(a_data), None) self.assertEqual(a_data, pickle.loads(c_data))
def test_input_in_remote_nm(self): """ A test similar in spirit to TestDM.test_runGraphOneDOPerDom, but where application B is a PyFuncApp. This makes sure that PyFuncApp work fine across Node Managers. NM #1 NM #2 ======= ============= | A --|----|-> B --> C | ======= ============= """ g1 = [{"oid": "A", "type": "plain", "storage": Categories.MEMORY}] g2 = [{ "oid": "B", "type": "app", "app": "dfms.apps.pyfunc.PyFuncApp", "func_name": __name__ + '.func1' }, { "oid": "C", "type": "plain", "storage": Categories.MEMORY, "producers": ["B"] }] rels = [DROPRel('A', DROPLinkType.INPUT, 'B')] a_data = os.urandom(32) c_data = self._test_runGraphInTwoNMs(g1, g2, rels, pickle.dumps(a_data), None) self.assertEqual(a_data, pickle.loads(c_data))
def _test_runGraphOneDOPerDOM(self, repeats=1): g1 = [memory("A")] g2 = [ {"oid": "B", "type": "app", "app": "dlg.apps.crc.CRCApp"}, memory("C", producers=["B"]), ] rels = [DROPRel("B", DROPLinkType.CONSUMER, "A")] a_data = os.urandom(32) c_data = str(crc32c(a_data, 0)).encode("utf8") node_managers = [self._start_dm(threads=self.nm_threads) for _ in range(2)] ids = [0] * repeats for n in range(repeats): choice = 0 while choice in ids: choice = random.randint(0, 1000) ids[n] = choice sessionId = f"s{choice}" self._test_runGraphInTwoNMs( copy.deepcopy(g1), copy.deepcopy(g2), rels, a_data, c_data, sessionId=sessionId, node_managers=node_managers, )
def test_output_in_remote_nm(self): """ Like the above, but with this graph. In this case the output (instead of the input) is in a remote Node Manager. NM #1 NM #2 ============= ======= | A --> B --|----|-> C | ============= ======= """ g1 = [ {"oid": "A", "type": "plain", "storage": Categories.MEMORY, "consumers": ["B"]}, { "oid": "B", "type": "app", "app": self.app, "lib": _libpath, "print_stats": print_stats, "bufsize": bufsize, }, ] g2 = [{"oid": "C", "type": "plain", "storage": Categories.MEMORY}] rels = [DROPRel("B", DROPLinkType.PRODUCER, "C")] a_data = os.urandom(32) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, a_data)
def test_input_in_remote_nm(self): """ A test similar in spirit to TestDM.test_runGraphOneDOPerDom, but where application B is a DynlibApp. This makes sure that DynlibApps work fine across Node Managers. NM #1 NM #2 ======= ============= | A --|----|-> B --> C | ======= ============= """ g1 = [{"oid": "A", "type": "plain", "storage": Categories.MEMORY}] g2 = [ { "oid": "B", "type": "app", "app": self.app, "lib": _libpath, "print_stats": print_stats, "bufsize": bufsize, }, {"oid": "C", "type": "plain", "storage": Categories.MEMORY, "producers": ["B"]}, ] rels = [DROPRel("A", DROPLinkType.INPUT, "B")] a_data = os.urandom(32) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, a_data)
def test_run_streaming_consumer_remotely2(self): """ Like above, but C is hostd by DM #2. """ g1 = [ memory("A"), { "oid": "B", "type": "app", "app": "dlg.apps.simple.CopyApp", "inputs": ["A"], }, ] g2 = [ memory("C"), { "oid": "D", "type": "app", "app": "dlg.apps.crc.CRCStreamApp", "streamingInputs": ["C"], "outputs": ["E"], }, memory("E"), ] rels = [DROPRel("C", DROPLinkType.OUTPUT, "B")] a_data = os.urandom(32) e_data = str(crc32c(a_data, 0)).encode("utf8") self._test_runGraphInTwoNMs(g1, g2, rels, a_data, e_data, leaf_oid="E")
def _test_runGraphOneDOPerDOM(self, repeats=1): g1 = [{"oid": "A", "type": "plain", "storage": Categories.MEMORY}] g2 = [ { "oid": "B", "type": "app", "app": "dlg.apps.crc.CRCApp" }, { "oid": "C", "type": "plain", "storage": Categories.MEMORY, "producers": ["B"] }, ] rels = [DROPRel("B", DROPLinkType.CONSUMER, "A")] a_data = os.urandom(32) c_data = six.b(str(crc32(a_data, 0))) node_managers = [self._start_dm() for _ in range(2)] for n in range(repeats): sessionId = 's%d' % n self._test_runGraphInTwoNMs(copy.deepcopy(g1), copy.deepcopy(g2), rels, a_data, c_data, sessionId=sessionId, node_managers=node_managers)
def test_many_relationships(self): """ A test in which a drop is related to many other drops that live in a separate DM. Drop A is accessed by many applications (B1, B2, .., BN), which should not exhaust resources on DM #1. We collapse all into C so we can monitor only its status to know that the execution is over. DM #1 DM #2 ======= ==================== | | | |--> B1 --| | | | | |--> B2 --| | | A --|----|-|--> B3 --|--> C | | | | |.........| | | | | |--> BN --| | ======= ==================== """ dm1, dm2 = [self._start_dm() for _ in range(2)] sessionId = "s1" N = 100 g1 = [{"oid": "A", "type": "plain", "storage": Categories.MEMORY}] g2 = [{"oid": "C", "type": "plain", "storage": Categories.MEMORY}] rels = [] for i in range(N): b_oid = "B%d" % (i, ) # SleepAndCopyApp effectively opens the input drop g2.append({ "oid": b_oid, "type": "app", "app": "dlg.apps.simple.SleepAndCopyApp", "outputs": ["C"], "sleepTime": 0, }) rels.append(DROPRel("A", DROPLinkType.INPUT, b_oid)) add_test_reprodata(g1) add_test_reprodata(g2) quickDeploy(dm1, sessionId, g1, {nm_conninfo(1): rels}) quickDeploy(dm2, sessionId, g2, {nm_conninfo(0): rels}) self.assertEqual(1, len(dm1._sessions[sessionId].drops)) self.assertEqual(1 + N, len(dm2._sessions[sessionId].drops)) # Run! The sole fact that this doesn't throw exceptions is already # a good proof that everything is working as expected a = dm1._sessions[sessionId].drops["A"] c = dm2._sessions[sessionId].drops["C"] with droputils.DROPWaiterCtx(self, c, 10): a.write(b"a") a.setCompleted() for i in range(N): drop = dm2._sessions[sessionId].drops["B%d" % (i, )] self.assertEqual(DROPStates.COMPLETED, drop.status) dm1.destroySession(sessionId) dm2.destroySession(sessionId)
def test_removeUnmetRelationships(self): # Unmet relationsips are # DROPRel(D, CONSUMER, A) # DROPRel(D, STREAMING_CONSUMER, C) # DROPRel(Z, PRODUCER, A) # DROPRel(X, PRODUCER, A) graphDesc = [ { "oid": "A", "consumers": ["B", "D"], "producers": ["Z", "X"] }, { "oid": "B", "outputs": ["C"] }, { "oid": "C", "streamingConsumers": ["D"] }, ] unmetRelationships = graph_loader.removeUnmetRelationships(graphDesc) self.assertEqual(4, len(unmetRelationships)) self.assertIn(DROPRel("D", DROPLinkType.CONSUMER, "A"), unmetRelationships) self.assertIn(DROPRel("D", DROPLinkType.STREAMING_CONSUMER, "C"), unmetRelationships) self.assertIn(DROPRel("Z", DROPLinkType.PRODUCER, "A"), unmetRelationships) self.assertIn(DROPRel("X", DROPLinkType.PRODUCER, "A"), unmetRelationships) # The original dropSpecs have changed as well a = graphDesc[0] c = graphDesc[2] self.assertEqual(1, len(a["consumers"])) self.assertEqual("B", a["consumers"][0]) self.assertFalse("producers" in a and len(a["producers"]) > 0) self.assertFalse("streamingConsumers" in c and len(c["streamingConsumers"]) > 0)
def test_run_streaming_consumer_remotely(self): """ A test that checks that a streaming consumer works correctly across node managers when its input is in a different node, like this: DM #1 DM #2 ================== ============== | A --> B --> C -|----|--> D --> E | ================== ============== Here B is anormal application and D is a streaming consumer of C. We use A and E to compare that all data flows correctly. """ g1 = [ { "oid": "A", "type": "plain", "storage": Categories.MEMORY }, { "oid": "B", "type": "app", "app": "dlg.apps.simple.CopyApp", "inputs": ["A"], "outputs": ["C"], }, { "oid": "C", "type": "plain", "storage": Categories.MEMORY }, ] g2 = [ { "oid": "D", "type": "app", "app": "dlg.apps.crc.CRCStreamApp", "outputs": ["E"], }, { "oid": "E", "type": "plain", "storage": Categories.MEMORY }, ] add_test_reprodata(g1) add_test_reprodata(g2) rels = [DROPRel("C", DROPLinkType.STREAMING_INPUT, "D")] a_data = os.urandom(32) e_data = six.b(str(crc32(a_data, 0))) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, e_data, leaf_oid="E")
def test_crashing_dynlib(self): """Like test_multiple_inputs_in_remote_nm, but C crashes""" g1 = [{ "oid": "A", "type": "plain", "storage": "memory" }, { "oid": "B", "type": "plain", "storage": "memory" }] g2 = [{ "oid": "C", "type": "app", "app": self.app, "lib": _libpath, "print_stats": print_stats, "bufsize": bufsize, "crash_and_burn": 1 }, { "oid": "D", "type": "plain", "storage": "memory", "producers": ["C"] }] rels = [ DROPRel('A', DROPLinkType.INPUT, 'C'), DROPRel('B', DROPLinkType.INPUT, 'C') ] a_data = os.urandom(32) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, None, root_oids=('A', 'B'), leaf_oid='D', expected_failures=('C', 'D'))
def test_run_streaming_consumer_remotely2(self): """ Like above, but C is hostd by DM #2. """ g1 = [ { "oid": "A", "type": "plain", "storage": Categories.MEMORY }, { "oid": "B", "type": "app", "app": "dlg.apps.simple.CopyApp", "inputs": ["A"], }, ] g2 = [ { "oid": "C", "type": "plain", "storage": Categories.MEMORY }, { "oid": "D", "type": "app", "app": "dlg.apps.crc.CRCStreamApp", "streamingInputs": ["C"], "outputs": ["E"], }, { "oid": "E", "type": "plain", "storage": Categories.MEMORY }, ] add_test_reprodata(g1) add_test_reprodata(g2) rels = [DROPRel("C", DROPLinkType.OUTPUT, "B")] a_data = os.urandom(32) e_data = six.b(str(crc32(a_data, 0))) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, e_data, leaf_oid="E")
def test_runGraphOneDOPerDOM(self): """ A test that creates three DROPs in two different DMs and runs the graph. For this the graphs that are fed into the DMs must *not* express the inter-DM relationships, although they are still passed down separately. The graph looks like: DM #1 DM #2 ======= ============= | A --|----|-> B --> C | ======= ============= """ g1 = [{"oid":"A", "type":"plain", "storage": "memory"}] g2 = [{"oid":"B", "type":"app", "app":"dlg.apps.crc.CRCApp"}, {"oid":"C", "type":"plain", "storage": "memory", "producers":["B"]}] rels = [DROPRel('B', DROPLinkType.CONSUMER, 'A')] a_data = os.urandom(32) c_data = six.b(str(crc32(a_data, 0))) self._test_runGraphInTwoNMs(g1, g2, rels, a_data, c_data)
def test_runGraphSeveralDropsPerDM_with_get_consumer_nodes(self): """ A test that creates several DROPs in two different DMs and runs the graph. Checks the node address(s) of the consumers in the second DM. The graph looks like this DM #1 DM #2 =================== ================ | A --> C --> D --|----|-| --> E | | | | | | | | | --> F | =================== ================ :see: `self.test_runGraphSeveralDropsPerDM_with_get_consumer_nodes` """ ip_addr_1 = "8.8.8.8" ip_addr_2 = "8.8.8.9" dm1, dm2 = [self._start_dm() for _ in range(2)] sessionId = "s1" g1 = [ { "oid": "A", "type": "plain", "storage": Categories.MEMORY, "consumers": ["C"] }, { "oid": "C", "type": "app", "app": "dlg.apps.crc.CRCApp", "consumers": ["D"], }, { "oid": "D", "type": "plain", "storage": Categories.MEMORY, "producers": ["C"] }, ] g2 = [ { "oid": "E", "type": "app", "app": "test.test_drop.SumupContainerChecksum", "node": ip_addr_1, }, { "oid": "F", "type": "app", "app": "test.test_drop.SumupContainerChecksum", "node": ip_addr_2, }, ] rels = [ DROPRel("D", DROPLinkType.INPUT, "E"), DROPRel("D", DROPLinkType.INPUT, "F"), ] quickDeploy(dm1, sessionId, g1, {nm_conninfo(1): rels}) quickDeploy(dm2, sessionId, g2, {nm_conninfo(0): rels}) self.assertEqual(3, len(dm1._sessions[sessionId].drops)) self.assertEqual(2, len(dm2._sessions[sessionId].drops)) cons_nodes = dm1._sessions[sessionId].drops["D"].get_consumers_nodes() self.assertTrue(ip_addr_1 in cons_nodes) self.assertTrue(ip_addr_2 in cons_nodes) dm1.destroySession(sessionId) dm2.destroySession(sessionId)
def test_runWithFourDMs(self): """ A test that creates several DROPs in two different DMs and runs the graph. The graph looks like this DM #2 +--------------------------+ | |--> C --| | +---|--> B --|--> D --|--> F --|--| | | |--> E --| | | DM #1 | +--------------------------+ | DM #4 +-----+ | | +---------------------+ | | | |--|--> L --| | | A --|--+ | |--> N --> O | | | | |--|--> M --| | +-----+ | DM #3 | +---------------------+ | +--------------------------+ | | | |--> H --| | | +---|--> G --|--> I --|--> K --|--| | |--> J --| | +--------------------------+ B, F, G, K and N are AppDOs; the rest are plain in-memory DROPs """ dm1, dm2, dm3, dm4 = [self._start_dm() for _ in range(4)] sessionId = "s1" g1 = [memory("A", expectedSize=1)] g2 = [ sleepAndCopy("B", outputs=["C", "D", "E"], sleepTime=0), memory("C"), memory("D"), memory("E"), sleepAndCopy("F", inputs=["C", "D", "E"], sleepTime=0), ] g3 = [ sleepAndCopy("G", outputs=["H", "I", "J"], sleepTime=0), memory("H"), memory("I"), memory("J"), sleepAndCopy("K", inputs=["H", "I", "J"], sleepTime=0), ] g4 = [ memory("L"), memory("M"), sleepAndCopy("N", inputs=["L", "M"], outputs=["O"], sleepTime=0), memory("O"), ] rels_12 = [DROPRel("A", DROPLinkType.INPUT, "B")] rels_13 = [DROPRel("A", DROPLinkType.INPUT, "G")] rels_24 = [DROPRel("F", DROPLinkType.PRODUCER, "L")] rels_34 = [DROPRel("K", DROPLinkType.PRODUCER, "M")] quickDeploy(dm1, sessionId, g1, { nm_conninfo(1): rels_12, nm_conninfo(2): rels_13 }) quickDeploy(dm2, sessionId, g2, { nm_conninfo(0): rels_12, nm_conninfo(3): rels_24 }) quickDeploy(dm3, sessionId, g3, { nm_conninfo(0): rels_13, nm_conninfo(3): rels_34 }) quickDeploy(dm4, sessionId, g4, { nm_conninfo(1): rels_24, nm_conninfo(2): rels_34 }) self.assertEqual(1, len(dm1._sessions[sessionId].drops)) self.assertEqual(5, len(dm2._sessions[sessionId].drops)) self.assertEqual(5, len(dm3._sessions[sessionId].drops)) self.assertEqual(4, len(dm4._sessions[sessionId].drops)) a = dm1._sessions[sessionId].drops["A"] o = dm4._sessions[sessionId].drops["O"] drops = [] for x in (dm1, dm2, dm3, dm4): drops += x._sessions[sessionId].drops.values() # Run! This should trigger the full execution of the graph with droputils.DROPWaiterCtx(self, o, 5): a.write(b"a") for drop in drops: self.assertEqual( DROPStates.COMPLETED, drop.status, "Status of '%s' is not COMPLETED: %d" % (drop.uid, drop.status), ) for dm in [dm1, dm2, dm3, dm4]: dm.destroySession(sessionId)
def test_runWithFourDMs(self): """ A test that creates several DROPs in two different DMs and runs the graph. The graph looks like this DM #2 +--------------------------+ | |--> C --| | +---|--> B --|--> D --|--> F --|--| | | |--> E --| | | DM #1 | +--------------------------+ | DM #4 +-----+ | | +---------------------+ | | | |--|--> L --| | | A --|--+ | |--> N --> O | | | | |--|--> M --| | +-----+ | DM #3 | +---------------------+ | +--------------------------+ | | | |--> H --| | | +---|--> G --|--> I --|--> K --|--| | |--> J --| | +--------------------------+ B, F, G, K and N are AppDOs; the rest are plain in-memory DROPs """ dm1, dm2, dm3, dm4 = [self._start_dm() for _ in range(4)] sessionId = 's1' g1 = [memory('A', expectedSize=1)] g2 = [sleepAndCopy('B', outputs=['C','D','E'], sleepTime=0), memory('C'), memory('D'), memory('E'), sleepAndCopy('F', inputs=['C','D','E'], sleepTime=0)] g3 = [sleepAndCopy('G', outputs=['H','I','J'], sleepTime=0), memory('H'), memory('I'), memory('J'), sleepAndCopy('K', inputs=['H','I','J'], sleepTime=0)] g4 = [memory('L'), memory('M'), sleepAndCopy('N', inputs=['L','M'], outputs=['O'], sleepTime=0), memory('O')] rels_12 = [DROPRel('A', DROPLinkType.INPUT, 'B')] rels_13 = [DROPRel('A', DROPLinkType.INPUT, 'G')] rels_24 = [DROPRel('F', DROPLinkType.PRODUCER, 'L')] rels_34 = [DROPRel('K', DROPLinkType.PRODUCER, 'M')] quickDeploy(dm1, sessionId, g1, {nm_conninfo(1): rels_12, nm_conninfo(2): rels_13}) quickDeploy(dm2, sessionId, g2, {nm_conninfo(0): rels_12, nm_conninfo(3): rels_24}) quickDeploy(dm3, sessionId, g3, {nm_conninfo(0): rels_13, nm_conninfo(3): rels_34}) quickDeploy(dm4, sessionId, g4, {nm_conninfo(1): rels_24, nm_conninfo(2): rels_34}) self.assertEqual(1, len(dm1._sessions[sessionId].drops)) self.assertEqual(5, len(dm2._sessions[sessionId].drops)) self.assertEqual(5, len(dm3._sessions[sessionId].drops)) self.assertEqual(4, len(dm4._sessions[sessionId].drops)) a = dm1._sessions[sessionId].drops['A'] o = dm4._sessions[sessionId].drops['O'] drops = [] for x in (dm1, dm2, dm3, dm4): drops += x._sessions[sessionId].drops.values() # Run! This should trigger the full execution of the graph with droputils.DROPWaiterCtx(self, o, 5): a.write(b'a') for drop in drops: self.assertEqual(DROPStates.COMPLETED, drop.status, "Status of '%s' is not COMPLETED: %d" % (drop.uid, drop.status)) for dm in [dm1, dm2, dm3, dm4]: dm.destroySession(sessionId)