def test_fake_results(self): then_graph_nodes = {**valued_const_with_data('fake_const', int64_array(0)), **regular_op_with_empty_data('shapeof', {'kind': 'op', 'type': 'ShapeOf', 'op': 'ShapeOf', 'infer': Shape.infer, 'output_type': np.int64}), **regular_op_with_empty_data('res_1', {'kind': 'op', 'type': 'Result', 'op': 'Result', 'infer': lambda x: 0, 'output_id': 0})} then_graph_edges = [*connect('fake_const', 'shapeof'), *connect('shapeof', 'res_1'), ] else_graph_nodes = {**regular_op_with_empty_data('param_1', {'type': 'Parameter', 'kind': 'op', 'input_id': 1, 'shape': None, 'infer': Parameter.infer}), **regular_op_with_empty_data('res_1', {'kind': 'op', 'type': 'Result', 'op': 'Result', 'infer': lambda x: 0, 'output_id': 0})} else_graph_edges = [*connect('param_1', 'res_1')] then_graph = build_graph_with_edge_attrs(then_graph_nodes, then_graph_edges) else_graph = build_graph_with_edge_attrs(else_graph_nodes, else_graph_edges) external_graph_nodes = { **valued_const_with_data('cond', np.array([True], dtype=np.bool)), **valued_const_with_data('input_1', int64_array([[1, 2, 3], [3, 2, 3]])), **regular_op_with_empty_data('if', {'kind': 'op', 'op': 'If', 'then_graph': then_graph, 'else_graph': else_graph, 'infer': If.infer}), **result('res_1')} external_graph_edges = [*connect('cond', '0:if'), *connect('input_1', '1:if'), *connect('if', 'res_1')] graph = build_graph(external_graph_nodes, external_graph_edges) graph.stage = 'middle' partial_infer(graph) res_1 = Node(graph, 'res_1') npt.assert_array_equal(res_1.in_port(0).data.get_shape(), int64_array([2,3]))
def test_concat_edges_reshaffle(self): graph = build_graph_with_edge_attrs( { 'axis': {}, 'input_1': {}, 'input_2': {}, 'input_3': {}, 'concat': { 'op': 'Concat', 'simple_concat': True, 'axis': 1 }, }, [('axis', 'concat', { 'in': 0 }), ('input_1', 'concat', { 'in': 1 }), ('input_2', 'concat', { 'in': 2 }), ('input_3', 'concat', { 'in': 3 })], ) Concat().find_and_replace_pattern(graph=graph) for u, v, attrs in graph.in_edges('concat', data=True): if attrs['in'] == 0: self.assertEqual(u, 'input_1') if attrs['in'] == 1: self.assertEqual(u, 'input_2') if attrs['in'] == 2: self.assertEqual(u, 'input_3') if attrs['in'] == 3: self.assertEqual(u, 'axis') self.assertTrue('axis' not in graph.node['concat'])
def test_assert_cf_false(self): me_mock = Mock() nodes = { 'input_data': { 'name': 'input', 'kind': 'data', 'executable': True }, 'assert': { 'name': 'assert', 'type': 'Assert', 'value': None, 'kind': 'op', 'op': 'Assert' }, 'assert_data': { 'name': 'output', 'value': False, 'kind': 'data', 'executable': True } } edges = [('input_data', 'assert', { 'in': 0 }), ('assert', 'assert_data', { 'out': 0, 'control_flow_edge': False })] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Assert(graph=graph, attrs={}) node = Node(graph, 'assert') tested_class.assert_control_flow_infer(node=node, is_executable=True, mark_executability=me_mock) me_mock.assert_called_once_with('assert_data', False)
def test_hsigmoid_with_relu_mul_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('add', {'op': 'Add'}), **regular_op('max', {'op': 'Maximum'}), **regular_op('min', {'op': 'Minimum'}), **regular_op('mul', {'op': 'Mul'}), **regular_op('mul_2', { 'op': 'Mul', 'name': 'final_mul' }), **const('const_0', float_array([0.0])), **const('const_3', float_array([3.0])), **const('const_6', float_array([6.0])), **const('const_1_6', float_array([1.0 / 6.0])), **result('result'), }, [('input_2', 'mul', { 'in': 1, 'out': 0 }), ('input', 'add', { 'in': 0, 'out': 0 }), ('const_3', 'add', { 'in': 1, 'out': 0 }), ('add', 'max', { 'in': 0, 'out': 0 }), ('const_0', 'max', { 'in': 1, 'out': 0 }), ('max', 'min', { 'in': 0, 'out': 0 }), ('const_6', 'min', { 'in': 1, 'out': 0 }), ('min', 'mul', { 'in': 0, 'out': 0 }), ('mul', 'mul_2', { 'in': 0, 'out': 0 }), ('const_1_6', 'mul_2', { 'in': 1, 'out': 0 }), ('mul_2', 'result', { 'in': 0, 'out': 0 })]) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_backward_bfs_for_op_closest_op_detected(self): """ input -> hsigmoid_1 -> hsigmoid_2 -> result The returned op should be first met HSigmoid which is hsigmoid_2 """ nodes = { **regular_op('input', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **result('result'), } edges = [ ('input', 'hsigmoid_1', { 'out': 0, 'in': 0 }), ('hsigmoid_1', 'hsigmoid_2', { 'out': 0, 'in': 0 }), ('hsigmoid_2', 'result', { 'out': 0, 'in': 0 }), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid']) self.assertEqual(len(found_nodes), 1) self.assertEqual(found_nodes[0].id, 'hsigmoid_2')
def test_backward_bfs_for_op_parallel_branch_op_detected(self): r""" input_1 -> hsigmoid_1 -> hsigmoid_2 -> \ - Concat->result / input_2 -> hsigmoid_3 -> hsigmoid_4 -> The returned op should be first met HSigmoids which are hsigmoid_2 and hsigmoid_4 """ nodes = {**regular_op('input_1', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **regular_op('input_2', {'op': 'Parameter'}), **regular_op('hsigmoid_3', {'op': 'HSigmoid'}), **regular_op('hsigmoid_4', {'op': 'HSigmoid'}), **regular_op('concat', {'op': 'Concat'}), **result('result'), } edges = [('input_1', 'hsigmoid_1', {'out': 0, 'in': 0}), ('hsigmoid_1', 'hsigmoid_2', {'out': 0, 'in': 0}), ('hsigmoid_2', 'concat', {'out': 0, 'in': 0}), ('input_2', 'hsigmoid_3', {'out': 0, 'in': 0}), ('hsigmoid_3', 'hsigmoid_4', {'out': 0, 'in': 0}), ('hsigmoid_4', 'concat', {'out': 0, 'in': 1}), ('concat', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid']) self.assertEqual(len(found_nodes), 2) self.assertSetEqual({found_nodes[0].id, found_nodes[1].id}, {'hsigmoid_2', 'hsigmoid_4'})
def test_backward_bfs_for_op_parallel_branch_stop_op(self): r""" input_1 -> hsigmoid_1 -> hsigmoid_2 -> \ - Concat->result / input_2 -> hsigmoid_3 -> ShapeOf -> The returned op should be first met HSigmoids which is hsigmoid_2, but not the hsigmoid_3 located after banned operation of type "ShapeOf" """ nodes = {**regular_op('input_1', {'op': 'Parameter'}), **regular_op('hsigmoid_1', {'op': 'HSigmoid'}), **regular_op('hsigmoid_2', {'op': 'HSigmoid'}), **regular_op('input_2', {'op': 'Parameter'}), **regular_op('hsigmoid_3', {'op': 'HSigmoid'}), **regular_op('shapeof', {'op': 'ShapeOf'}), **regular_op('concat', {'op': 'Concat'}), **result('result'), } edges = [('input_1', 'hsigmoid_1', {'out': 0, 'in': 0}), ('hsigmoid_1', 'hsigmoid_2', {'out': 0, 'in': 0}), ('hsigmoid_2', 'concat', {'out': 0, 'in': 0}), ('input_2', 'hsigmoid_3', {'out': 0, 'in': 0}), ('hsigmoid_3', 'shapeof', {'out': 0, 'in': 0}), ('shapeof', 'concat', {'out': 0, 'in': 1}), ('concat', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['HSigmoid'], ['ShapeOf']) self.assertEqual(len(found_nodes), 1) self.assertEqual(found_nodes[0].id, 'hsigmoid_2')
def test_positive(self): graph = build_graph_with_edge_attrs( { 'iter_get_next': { 'kind': 'op', 'op': 'IteratorGetNext', 'shapes': int64_array([[2, 2], [1, 1]]), 'types': [None, None] }, 'sub': { 'kind': 'op', 'op': 'Sub' }, 'add': { 'kind': 'op', 'op': 'Add' } }, [('iter_get_next', 'sub', { 'out': 0, 'in': 0 }), ('iter_get_next', 'add', { 'out': 1, 'in': 0 })]) inputs_desc = {} message = InputsAnalysis.iterator_get_next_analysis(graph, inputs_desc) ref_message = 'It looks like there is IteratorGetNext as input\n' \ 'Run the Model Optimizer without --input option \n' \ 'Otherwise, try to run the Model Optimizer with:\n\t\t--input "iter_get_next:0[2 2],iter_get_next:1[1 1]"\n' self.assertEqual(message, ref_message)
def test_swish_with_sigmoid_without_beta_different_tensors(self): graph = build_graph_with_edge_attrs( { **regular_op('input', {'type': 'Parameter'}), **regular_op('input_2', {'type': 'Parameter'}), **regular_op('sigmoid', {'op': 'Sigmoid'}), **regular_op('mul', { 'op': 'Mul', 'name': 'final_mul' }), **result('result'), }, [('input_2', 'mul', { 'in': 0, 'out': 0 }), ('input', 'sigmoid', { 'in': 0, 'out': 0 }), ('sigmoid', 'mul', { 'in': 1, 'out': 0 }), ('mul', 'result', { 'in': 0, 'out': 0 })], {}) graph_ref = graph.copy() graph.stage = 'front' SwishWithSigmoidWithoutBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_negative(self): graph = build_graph_with_edge_attrs( { 'placeholder': { 'kind': 'op', 'op': 'Parameter' }, 'sub': { 'kind': 'op', 'op': 'Sub' }, 'add': { 'kind': 'op', 'op': 'Add' } }, [('placeholder', 'sub', { 'out': 0, 'in': 0 }), ('placeholder', 'add', { 'out': 0, 'in': 0 })]) inputs_desc = {} message = InputsAnalysis.iterator_get_next_analysis(graph, inputs_desc) self.assertEqual(message, None)
def test_leaky_relu_mul_multiple_consumers(self): # multiple consumers of Mul operation graph = build_graph_with_edge_attrs(nodes, edges, {}) additional_result = Result(graph, {'name': 'result_2'}).create_node() Node(graph, 'mul').out_port(0).connect(additional_result.in_port(0)) ref_nodes = { **regular_op_with_shaped_data('input', shape, { 'type': 'Parameter', 'op': 'Parameter' }), **regular_op_with_shaped_data('mul', shape, { 'type': 'Multiply', 'name': 'mul' }), **regular_op_with_shaped_data('max', shape, { 'type': 'Maximum', 'name': 'final_max' }), **valued_const_with_data('const', float_array([0.5])), **regular_op_with_shaped_data('leaky_relu', shape, { 'type': 'LeakyReLU', 'name': 'max_final', 'negative_slope': None }), **result('result'), **result('result_2') } ref_edges = [ *connect('input:0', '0:mul'), *connect('const', '1:mul'), *connect('max:0', 'result'), *connect('mul:0', 'result_2'), *connect_data('input', 'leaky_relu'), *connect('leaky_relu', 'result') ] graph_ref = build_graph_with_edge_attrs(ref_nodes, ref_edges) LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) (flag, resp) = compare_graphs(graph, graph_ref, 'result_2') self.assertTrue(flag, resp)
def test_axpy(self): nodes = { 'node_1': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'node_2': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'node_3': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' }, 'axpy': { 'type': 'Axpy', 'kind': 'op', 'op': 'Axpy' }, 'node_4': { 'kind': 'op', 'type': 'Identity', 'op': 'Parameter' } } edges = [('node_1', 'axpy', { 'in': 0, 'out': 0 }), ('node_2', 'axpy', { 'in': 1, 'out': 0 }), ('node_3', 'axpy', { 'in': 2, 'out': 0 }), ('axpy', 'node_4', { 'in': 0, 'out': 0 })] graph = build_graph_with_edge_attrs(nodes, edges) node = Node(graph, 'axpy') replacer = AxpyToSSandAdd() replacer.replace_op(graph, node) scale_node = [ node for node, attrs in list(graph.nodes(data=True)) if attrs['type'] == 'ScaleShift' ] self.assertEqual(len(scale_node), 1) add_node = [ node for node, attrs in list(graph.nodes(data=True)) if attrs['type'] == 'Add' ] self.assertEqual(len(add_node), 1)
def test_one_output_v2(self): graph = build_graph_with_edge_attrs( { 'queue_dequeue': { 'kind': 'op', 'op': 'QueueDequeueV2', 'shapes': shape_array([[2, 2]]), 'types': [np.int32] }, 'sub': { 'kind': 'op', 'op': 'Sub' }, }, [ ('queue_dequeue', 'sub', { 'out': 0, 'in': 0 }), ]) graph_ref = build_graph_with_edge_attrs( { 'parameter_1': { 'kind': 'op', 'op': 'Parameter', 'shape': shape_array([2, 2]), 'type': np.int32 }, 'sub': { 'kind': 'op', 'op': 'Sub' }, }, [ ('parameter_1', 'sub', { 'out': 0, 'in': 0 }), ]) FIFOQueueDequeueCut().find_and_replace_pattern(graph) flag, msg = compare_graphs(graph, graph_ref, last_node='sub') self.assertTrue(flag, msg)
def test_softplus_fusion_test_wrong_const(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_1': {'value': float_array([0.9999])}}) graph_ref = graph.copy() graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_hswish_with_clamp_wrong_constant(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {'const_0': {'value': float_array([0.00001])}}) graph_ref = graph.copy() graph.stage = 'front' HSwishWithClamp().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_one_output(self): graph = build_graph_with_edge_attrs( { 'iter_get_next': { 'kind': 'op', 'op': 'IteratorGetNext', 'shapes': shape_array([[2, 2]]), 'types': [np.int32] }, 'sub': { 'kind': 'op', 'op': 'Sub' }, }, [ ('iter_get_next', 'sub', { 'out': 0, 'in': 0 }), ]) graph_ref = build_graph_with_edge_attrs( { 'parameter_1': { 'kind': 'op', 'op': 'Parameter', 'shape': shape_array([2, 2]), 'type': np.int32 }, 'sub': { 'kind': 'op', 'op': 'Sub' }, }, [ ('parameter_1', 'sub', { 'out': 0, 'in': 0 }), ]) IteratorGetNextCut().find_and_replace_pattern(graph) flag, msg = compare_graphs(graph, graph_ref, last_node='sub') self.assertTrue(flag, msg)
def test_hswish_with_clamp(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSwishWithClamp().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSwish')
def test_leaky_relu_not_applicable_non_scalar_const(self): # const value is not a scalar or 1D tensor with 1 element so the transformation is not applicable graph = build_graph_with_edge_attrs(nodes, edges, {}) Node(graph, 'const')['value'] = float_array([0.5, 0.7]) Node(graph, 'const_d')['value'] = float_array([0.5, 0.7]) graph_ref = graph.copy() LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_softplus_fusion_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' SoftplusFusion().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue(len(graph.get_op_nodes(name='final_log')) == 1 and graph.get_op_nodes(name='final_log')[0].op == 'SoftPlus')
def test_switch_cf_false_no_exec(self): me_mock = Mock() nodes = { 'tensor': { 'value': True, 'kind': 'data', 'executable': True }, 'pred_id': { 'value': np.array(False), 'kind': 'data', 'executable': True }, 'switch': { 'type': 'Switch', 'kind': 'op', 'op': 'Switch', 'control_flow_infer': Switch.control_flow_infer }, 'switch_data_1': { 'value': None, 'kind': 'data', 'executable': True }, 'result_1': { 'value': None, 'kind': 'op', 'executable': True, 'type': 'Result', 'op': 'Result' }, } edges = [ ('tensor', 'switch', { 'in': 0 }), ('pred_id', 'switch', { 'in': 1 }), ('switch', 'switch_data_1', { 'out': 1 }), ('switch_data_1', 'result_1', { 'in': 0 }), ] graph = build_graph_with_edge_attrs(nodes, edges) node = Node(graph, 'switch') node.control_flow_infer(node, True, me_mock) me_mock.assert_has_calls([call('switch_data_1', False)], any_order=True)
def test_fifo_with_out_label_batch(self): nodes_no_label = { 'placeholder': { 'op': 'Parameter', 'data_type': np.int32, 'kind': 'op', 'shape': np.array(0) }, 'batch_join/fifo_queue': { 'op': 'FIFOQueueV2', 'name': 'batch_join/fifo_queue', 'shapes': np.array([[1, 2, 3]]), 'types': np.array([np.float32]), 'kind': 'op' }, 'batch_join': { 'op': 'QueueDequeueUpToV2', 'kind': 'op' }, 'image_batch': { 'op': 'Identity', 'data_type': np.float32, 'kind': 'op' }, } edges_no_label = [('placeholder', 'batch_join', { 'out': 0, 'in': 0 }), ('batch_join/fifo_queue', 'batch_join', { 'out': 0, 'in': 1 }), ('batch_join', 'image_batch', { 'out': 0, 'in': 0 })] graph = build_graph_with_edge_attrs(nodes_no_label, edges_no_label) tested_class = FIFOQueue() tested_class.find_and_replace_pattern(graph=graph) after_pattern = graph.nodes() self.assertEqual(2, len(after_pattern)) try: new_ph_dict = graph.node[[ u for u, v in graph.in_edges('image_batch') ][0]] except Exception as e: self.fail( "Can't get new placeholder. Broken edge. Additional information: {}" .format(e)) self.assertEqual(new_ph_dict['name'], 'batch_join/fifo_queue') self.assertTrue( np.array_equal(new_ph_dict['shape'], np.array([1, 2, 3])))
def test_leaky_relu_data_port_0(self): graph = build_graph_with_edge_attrs(nodes, edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) Node(graph_ref, 'leaky_relu')['negative_slope'] = 0.5 LeakyReLUFusion().find_and_replace_pattern(graph) graph.clean_up() (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_max')) == 1 and graph.get_op_nodes(name='final_max')[0].op == 'LeakyReLU')
def create_fifo_queue_graph(batch_size_shape: np.ndarray): nodes = { 'placeholder': { 'op': 'Parameter', 'data_type': np.int32, 'kind': 'op', 'shape': batch_size_shape }, 'batch_join/fifo_queue': { 'op': 'FIFOQueueV2', 'name': 'batch_join/fifo_queue', 'shapes': np.array([[1, 2, 3]]), 'types': np.array([np.float32]), 'kind': 'op' }, 'batch_join': { 'op': 'QueueDequeueUpToV2', 'kind': 'op' }, 'image_batch': { 'op': 'Identity', 'data_type': np.float32, 'kind': 'op' }, 'label_batch': { 'op': 'Identity', 'kind': 'op' }, 'label_batch_op_output': { 'op': 'Result', 'kind': 'op' }, } edges = [('placeholder', 'batch_join', { 'out': 0, 'in': 0 }), ('batch_join/fifo_queue', 'batch_join', { 'out': 0, 'in': 1 }), ('batch_join', 'image_batch', { 'out': 0, 'in': 0 }), ('batch_join', 'label_batch', { 'out': 1, 'in': 0 }), ('label_batch', 'label_batch_op_output', { 'out': 0, 'in': 0 })] graph = build_graph_with_edge_attrs(nodes, edges) return graph
def test_backward_bfs_for_op_no_ops_detected(self): nodes = {**regular_op('input', {'op': 'Parameter'}), **regular_op('hsigmoid', {'op': 'HSigmoid'}), **result('result'), } edges = [('input', 'hsigmoid', {'out': 0, 'in': 0}), ('hsigmoid', 'result', {'out': 0, 'in': 0}), ] graph = build_graph_with_edge_attrs(nodes, edges) graph.stage = 'front' found_nodes = backward_bfs_for_operation(Node(graph, 'result'), ['NonExistingOp']) self.assertEqual(len(found_nodes), 0)
def test_hsigmoid_with_relu_mul_wrong_constant(self): graph = build_graph_with_edge_attrs( self.nodes, self.edges, {'add_const': { 'value': float_array([0.00001]) }}) graph_ref = graph.copy() graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp)
def test_switch_cf_infer_no_condition(self): me_mock = Mock() nodes = { 'tensor': { 'value': True, 'kind': 'data', 'executable': True }, 'pred_id': { 'value': None, 'kind': 'data', 'executable': True }, 'switch': { 'type': 'Switch', 'kind': 'op', 'op': 'Switch' }, 'switch_data_0': { 'value': None, 'kind': 'data', 'executable': True }, 'switch_data_1': { 'value': None, 'kind': 'data', 'executable': True } } edges = [('tensor', 'switch', { 'in': 0 }), ('pred_id', 'switch', { 'in': 1 }), ('switch', 'switch_data_0', { 'out': 0 }), ('switch', 'switch_data_1', { 'out': 1 })] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Switch(graph=graph, attrs={}) node = Node(graph, 'switch') tested_class.control_flow_infer(node, True, me_mock) # In this case we should mark all ports as executable me_mock.assert_has_calls( [call('switch_data_0', True), call('switch_data_1', True)], any_order=True)
def test_hsigmoid_with_relu_mul(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) graph_ref = build_graph(ref_nodes, ref_edges) graph.stage = 'front' HSigmoidWithReluMul().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'HSigmoid') self.assertTrue( graph.get_op_nodes( name='final_mul')[0].out_nodes()[0].node == 'result')
def test_swish_with_sigmoid_with_beta_test(self): graph = build_graph_with_edge_attrs(self.nodes, self.edges, {}) new_ref_nodes = ref_nodes.copy() new_ref_nodes.update(**regular_op('beta', {'type': 'Parameter'})) graph_ref = build_graph(new_ref_nodes, ref_edges + [('beta', 'swish')]) graph.stage = 'front' SwishWithSigmoidWithBeta().find_and_replace_pattern(graph) (flag, resp) = compare_graphs(graph, graph_ref, 'result') self.assertTrue(flag, resp) self.assertTrue( len(graph.get_op_nodes(name='final_mul')) == 1 and graph.get_op_nodes(name='final_mul')[0].op == 'Swish')
def test_switch_cf_false_both_ports(self): me_mock = Mock() nodes = { 'tensor': { 'value': True, 'kind': 'data', 'executable': True }, 'pred_id': { 'value': np.array(False), 'kind': 'data', 'executable': True }, 'switch': { 'type': 'Switch', 'kind': 'op', 'op': 'Switch' }, 'switch_data_0': { 'value': None, 'kind': 'data', 'executable': True }, 'switch_data_1': { 'value': None, 'kind': 'data', 'executable': True } } edges = [('tensor', 'switch', { 'in': 0 }), ('pred_id', 'switch', { 'in': 1 }), ('switch', 'switch_data_0', { 'out': 0 }), ('switch', 'switch_data_1', { 'out': 1 })] graph = build_graph_with_edge_attrs(nodes, edges) tested_class = Switch(graph=graph, attrs={}) node = Node(graph, 'switch') tested_class.control_flow_infer(node, True, me_mock) me_mock.assert_has_calls( [call('switch_data_0', True), call('switch_data_1', False)], any_order=True)
def test_output_port_cut(self, output): nodes = { 'A': { 'type': 'Identity', 'kind': 'op', 'op': 'Identity' }, 'B': { 'type': 'Identity', 'kind': 'op', 'op': 'Identity' }, 'C': { 'type': 'Identity', 'kind': 'op', 'op': 'Identity' }, 'D': { 'type': 'Identity', 'kind': 'op', 'op': 'Identity' }, 'E': { 'type': 'Identity', 'kind': 'op', 'op': 'Identity' }, } edges = [('A', 'C', { 'in': 0, 'out': 0 }), ('B', 'C', { 'in': 1, 'out': 0 }), ('C', 'D', { 'in': 0, 'out': 0 }), ('C', 'E', { 'in': 0, 'out': 1 })] graph = build_graph_with_edge_attrs(nodes, edges) sinks = add_output_ops(graph, output) graph.clean_up() self.assertEqual(len(Node(graph, 'C').out_nodes()), 1) self.assertEqual(len(Node(graph, 'C').in_nodes()), 2)