def testInitialAssignsWithInputs(self): import numpy as np from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd n1 = TensorRandint(state=np.random.RandomState(0), dtype=np.float32()).new_chunk(None, shape=(10, 10)) n2 = TensorRandint(state=np.random.RandomState(1), dtype=np.float32()).new_chunk(None, shape=(10, 10)) n3 = TensorTreeAdd(dtype=np.float32()).new_chunk(None, shape=(10, 10)) n3.op._inputs = [n1, n2] n4 = TensorTreeAdd(dtype=np.float32()).new_chunk(None, shape=(10, 10)) n4.op._inputs = [n3] graph = DAG() graph.add_node(n1) graph.add_node(n3) graph.add_node(n4) graph.add_edge(n1, n3) graph.add_edge(n3, n4) analyzer = GraphAnalyzer(graph, {}) ext_chunks = analyzer.collect_external_input_chunks(initial=False) self.assertListEqual(ext_chunks[n3.op.key], [n2.key]) self.assertEqual( len(analyzer.collect_external_input_chunks(initial=True)), 0)
def _build_chunk_dag(node_str, edge_str): from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd char_dag = DAG() for s in node_str.split(','): char_dag.add_node(s.strip()) for s in edge_str.split(','): l, r = s.split('->') char_dag.add_edge(l.strip(), r.strip()) chunk_dag = DAG() str_to_chunk = dict() for s in char_dag.topological_iter(): if char_dag.count_predecessors(s): c = TensorTreeAdd(args=[], _key=s, dtype=np.dtype(np.float32())).new_chunk( None, shape=(10, 10)).data inputs = c.op._inputs = [ str_to_chunk[ps] for ps in char_dag.predecessors(s) ] else: c = TensorRandint(_key=s, dtype=np.dtype( np.float32())).new_chunk(None, shape=(10, 10)).data inputs = [] str_to_chunk[s] = c chunk_dag.add_node(c) for inp in inputs: chunk_dag.add_edge(inp, c) return chunk_dag, str_to_chunk
def testSameKeyAssign(self): import numpy as np from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd graph = DAG() r""" Proper initial allocation should divide the graph like U U | U U | U U | | | | | | | | | | | | | | U U | U U | U U """ inputs = [ tuple( TensorRandint(_key=str(i), dtype=np.float32()).new_chunk( None, shape=(10, 10)) for _ in range(2)) for i in range(6) ] results = [ TensorTreeAdd(dtype=np.float32()).new_chunk(None, shape=(10, 10)) for _ in range(6) ] for inp, r in zip(inputs, results): r.op._inputs = list(inp) graph.add_node(r) for n in inp: graph.add_node(n) graph.add_edge(n, r) analyzer = GraphAnalyzer(graph, dict(w1=24, w2=24, w3=24)) assignments = analyzer.calc_operand_assignments( analyzer.get_initial_operand_keys()) self.assertEqual(len(assignments), 6)
def testAssignOnWorkerLost(self): import numpy as np from mars.scheduler import OperandState from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd graph = DAG() r""" Proper initial allocation should divide the graph like FL FL F F R R | FL FL F F R R | x | | x | | x | | | x | | x | | x | R R R R U U | R R R R U U U: UNSCHEDULED F: FINISHED R: READY L: LOST """ op_states = dict() inputs = [ tuple( TensorRandint( dtype=np.float32()).new_chunk(None, shape=(10, 10)) for _ in range(2)) for _ in range(6) ] results = [ tuple( TensorTreeAdd(_key=f'{i}_{j}', dtype=np.float32()).new_chunk( None, shape=(10, 10)) for j in range(2)) for i in range(6) ] for inp, outp in zip(inputs, results): for o in outp: o.op._inputs = list(inp) op_states[o.op.key] = OperandState.UNSCHEDULED graph.add_node(o) for n in inp: op_states[n.op.key] = OperandState.UNSCHEDULED graph.add_node(n) for o in outp: graph.add_edge(n, o) fixed_assigns = dict() for idx in range(4): for i in range(2): fixed_assigns[inputs[idx][i].op.key] = f'w{idx % 2 + 1}' op_states[inputs[idx][i].op.key] = OperandState.FINISHED fixed_assigns[results[idx][i].op.key] = f'w{idx % 2 + 1}' op_states[results[idx][i].op.key] = OperandState.READY for inp in inputs: for n in inp: if n.op.key in fixed_assigns: continue op_states[n.op.key] = OperandState.READY lost_chunks = [c.key for inp in (inputs[0], inputs[2]) for c in inp] worker_metrics = dict(w2=24, w3=24) analyzer = GraphAnalyzer(graph, worker_metrics, fixed_assigns, op_states, lost_chunks) changed_states = analyzer.analyze_state_changes() self.assertEqual(len(changed_states), 8) self.assertTrue( all(changed_states[c.op.key] == OperandState.READY for inp in (inputs[0], inputs[2]) for c in inp)) self.assertTrue( all(changed_states[c.op.key] == OperandState.UNSCHEDULED for res in (results[0], results[2]) for c in res)) assignments = analyzer.calc_operand_assignments( analyzer.get_initial_operand_keys()) for inp in inputs: if any(n.op.key in fixed_assigns for n in inp): continue self.assertEqual(1, len(set(assignments[n.op.key] for n in inp))) worker_assigns = dict((k, 0) for k in worker_metrics) for w in assignments.values(): worker_assigns[w] += 1 self.assertEqual(2, worker_assigns['w2']) self.assertEqual(6, worker_assigns['w3'])
def testAssignWithPreviousData(self): import numpy as np from mars.scheduler.chunkmeta import WorkerMeta from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd graph = DAG() r""" Proper initial allocation should divide the graph like U U | U U | U U \ / | \ / | \ / U | U | U """ inputs = [ tuple( TensorRandint(_key=str(i * 2 + j), dtype=np.float32()).new_chunk( None, shape=(10, 10)) for j in range(2)) for i in range(3) ] results = [ TensorTreeAdd(dtype=np.float32()).new_chunk(None, shape=(10, 10)) for _ in range(3) ] for inp, r in zip(inputs, results): r.op._inputs = list(inp) graph.add_node(r) for n in inp: graph.add_node(n) graph.add_edge(n, r) # assign with partial mismatch data_dist = { '0': dict(c00=WorkerMeta(chunk_size=5, workers=('w1', )), c01=WorkerMeta(chunk_size=5, workers=('w2', ))), '1': dict(c10=WorkerMeta(chunk_size=10, workers=('w1', ))), '2': dict(c20=WorkerMeta(chunk_size=10, workers=('w3', ))), '3': dict(c30=WorkerMeta(chunk_size=10, workers=('w3', ))), '4': dict(c40=WorkerMeta(chunk_size=7, workers=('w3', ))), } analyzer = GraphAnalyzer(graph, dict(w1=24, w2=24, w3=24)) assignments = analyzer.calc_operand_assignments( analyzer.get_initial_operand_keys(), input_chunk_metas=data_dist) self.assertEqual(len(assignments), 6) # explanation of the result: # for '1', all data are in w1, hence assigned to w1 # '0' assigned to w1 according to connectivity # '2' and '3' assigned to w3 according to connectivity # '4' assigned to w2 because it has fewer data, and the slots of w3 is used up self.assertEqual(assignments['0'], 'w1') self.assertEqual(assignments['1'], 'w1') self.assertEqual(assignments['2'], 'w3') self.assertEqual(assignments['3'], 'w3') self.assertEqual(assignments['4'], 'w2') self.assertEqual(assignments['5'], 'w2') # assign with full mismatch data_dist = { '0': dict(c00=WorkerMeta(chunk_size=5, workers=('w1', )), c01=WorkerMeta(chunk_size=5, workers=( 'w1', 'w2', ))), '1': dict(c10=WorkerMeta(chunk_size=10, workers=('w1', ))), '2': dict(c20=WorkerMeta(chunk_size=10, workers=('w3', ))), '3': dict(c30=WorkerMeta(chunk_size=10, workers=('w3', ))), '4': dict(c40=WorkerMeta(chunk_size=7, workers=('w2', ))), '5': dict(c50=WorkerMeta(chunk_size=7, workers=('w2', ))), } analyzer = GraphAnalyzer(graph, dict(w1=24, w2=24, w3=24)) assignments = analyzer.calc_operand_assignments( analyzer.get_initial_operand_keys(), input_chunk_metas=data_dist) self.assertEqual(len(assignments), 6) self.assertEqual(assignments['0'], 'w1') self.assertEqual(assignments['1'], 'w1') self.assertEqual(assignments['2'], 'w3') self.assertEqual(assignments['3'], 'w3') self.assertEqual(assignments['4'], 'w2') self.assertEqual(assignments['5'], 'w2')
def testAssignOnWorkerAdd(self): import numpy as np from mars.scheduler import OperandState from mars.tensor.random import TensorRandint from mars.tensor.arithmetic import TensorTreeAdd graph = DAG() r""" Proper initial allocation should divide the graph like F F R R | F F R R | R R R R | x | | x | | | x | | x | | | x | | x | R R U U | R R U U | U U U U U: UNSCHEDULED F: FINISHED R: READY """ inputs = [ tuple( TensorRandint( dtype=np.float32()).new_chunk(None, shape=(10, 10)) for _ in range(2)) for _ in range(6) ] results = [ tuple( TensorTreeAdd(_key='%d_%d' % (i, j), dtype=np.float32()).new_chunk( None, shape=(10, 10)) for j in range(2)) for i in range(6) ] for inp, outp in zip(inputs, results): for o in outp: o.op._inputs = list(inp) graph.add_node(o) for n in inp: graph.add_node(n) for o in outp: graph.add_edge(n, o) # mark initial assigns fixed_assigns = dict() op_states = dict() for idx in range(2): for i in range(2): fixed_assigns[inputs[idx][i].op.key] = 'w%d' % (idx + 1) op_states[results[idx][i].op.key] = OperandState.READY fixed_assigns[results[idx][i].op.key] = 'w%d' % (idx + 1) for inp in inputs: for n in inp: if n.op.key in fixed_assigns: continue op_states[n.op.key] = OperandState.READY worker_metrics = dict(w1=24, w2=24, w3=24) analyzer = GraphAnalyzer(graph, worker_metrics, fixed_assigns, op_states) assignments = analyzer.calc_operand_assignments( analyzer.get_initial_operand_keys()) for inp in inputs: if any(n.op.key in fixed_assigns for n in inp): continue self.assertEqual(1, len(set(assignments[n.op.key] for n in inp))) worker_assigns = dict((k, 0) for k in worker_metrics) for w in assignments.values(): worker_assigns[w] += 1 self.assertEqual(2, worker_assigns['w1']) self.assertEqual(2, worker_assigns['w2']) self.assertEqual(4, worker_assigns['w3'])