Exemple #1
0
 def pattern_ugraph(self):
     graph = tf.Graph()
     with graph.as_default():
         dummy_input = tf.placeholder(dtype=tf.float32,
                                      shape=[None, 128, 128, 3],
                                      name='dummy_input')
         dummy_weight = tf.zeros([32, 32, 3, 10],
                                 dtype=tf.float32,
                                 name='dummy_weight')
         conv = tf.nn.conv2d(dummy_input,
                             dummy_weight,
                             strides=[1, 2, 2, 1],
                             padding='VALID',
                             name='conv')
         maxpool = tf.nn.max_pool(conv,
                                  ksize=[1, 2, 2, 1],
                                  strides=[1, 2, 2, 1],
                                  padding='VALID',
                                  name='maxpool')
     ugraph = GraphDefParser(config={}).parse(
         graph.as_graph_def(), output_nodes=[maxpool.op.name])
     quant_ugraph = QuantizeTransformer().transform(ugraph)
     patrn_ugraph = deepcopy(quant_ugraph)
     quant_conv_op = patrn_ugraph['conv/eightbit']
     for i, _ in enumerate(quant_conv_op.input_tensors):
         quant_conv_op.replace_with_null_input_tensor(i)
     patrn_ugraph.output_nodes = ['maxpool/eightbit']
     patrn_ugraph = prune_graph(patrn_ugraph)
     topologic_order_graph(patrn_ugraph)
     return patrn_ugraph
Exemple #2
0
 def __call__(self, match):
     graph = tf.Graph()
     subj_pool_name = match.patrn2subj_op_map['max_pool'].name
     subj_pool_op = match.subject_ugraph[subj_pool_name]
     ksize = subj_pool_op.op_attr['ksize'].value.ints_value[:]
     strides = subj_pool_op.op_attr['strides'].value.ints_value[:]
     padding = subj_pool_op.op_attr['padding'].value
     with graph.as_default():
         dummy_input = tf.placeholder(dtype=tf.float32,
                                      shape=[None, 128, 128, 3])
         max_pool = tf.nn.max_pool(dummy_input,
                                   ksize=ksize,
                                   strides=strides,
                                   padding=padding,
                                   name='max_pool')
         tf.nn.relu(max_pool, name='relu')
     ugraph = GraphDefParser(config={}).parse(graph.as_graph_def(),
                                              output_nodes=['relu'])
     ugraph['max_pool'].replace_with_null_input_tensor(0)
     ugraph = prune_graph(ugraph)
     topologic_order_graph(ugraph)
     input_map = {
         match.pattern_ugraph['relu'].input_tensors[0]:
         ugraph['max_pool'].input_tensors[0]
     }
     output_map = {
         match.pattern_ugraph['max_pool'].output_tensors[0]:
         ugraph['relu'].output_tensors[0]
     }
     return ugraph, input_map, output_map
Exemple #3
0
    def _handle_fully_connected(cls, ugraph):
        # 1. transpose the filter to make a right mulitiplication: fc = x @ filter + bias
        # 2. if the input is not flatten, inject a reshape op
        reshape_cnt = 0
        for op_info in ugraph.get_ops_by_type('FullyConnected'):
            filter_tensor = op_info.input_tensors[1]
            filter_op = filter_tensor.op
            np_arr = filter_op.op_attr['value'].value.np_array
            filter_op.op_attr['value'].value.np_array = np_arr.T
            filter_tensor.shape = list(np_arr.T.shape)
            filter_op.output_tensors[0].shape = list(np_arr.T.shape)

            tensor_x = op_info.input_tensors[0]
            if len(tensor_x.shape) > 2:
                new_shape = [
                    tensor_x.shape[0],
                    reduce(lambda a, b: a * b, tensor_x.shape[1:], 1)
                ]
                reshape_op_name = tensor_x.name.replace(
                    ":", "_") + '_Reshape' + str(reshape_cnt)
                out_tensor = deepcopy(tensor_x, {'ugraph': ugraph})
                out_tensor.name = reshape_op_name + ":0"
                out_tensor.op_name = reshape_op_name
                out_tensor.shape = new_shape
                OperationInfo(name=reshape_op_name,
                              op_type="Reshape",
                              lib_name='tflite',
                              ugraph=ugraph,
                              input_tensors=[tensor_x],
                              output_tensors=[out_tensor],
                              op_attr={'new_shape': new_shape})
                reshape_cnt += 1
                op_info.input_tensors[0] = out_tensor
        topologic_order_graph(ugraph)
Exemple #4
0
 def _visit_all(self, ugraph):
     for op_info in list(ugraph.ops_info.values()):
         visitor = getattr(self,
                           '_visit_{}'.format(op_info.op_type.lower()),
                           lambda op_info: op_info)
         visitor(op_info)
     topologic_order_graph(ugraph)
Exemple #5
0
    def parse(self, pb_file, output_nodes=None, model_name=None):
        graph_def, graph_name = self._load_graph_def(pb_file)
        if model_name:
            graph_name = model_name
        if not self._is_freeze_graph(graph_def):
            raise ValueError("Given graph_def is not freezed")
        if output_nodes is None:
            output_nodes = [node.name for node in graph_def.node]
            logger.warning(
                'output_nodes is not given, use all nodes instead (may cause unexpected behaviour)'
            )

        graph = tf.Graph()
        with graph.as_default():
            tf.import_graph_def(graph_def, name="")
        ugraph = uTensorGraph(
            name=graph_name,
            output_nodes=output_nodes,
            lib_name="tensorflow",
        )
        for node in graph_def.node:
            op = graph.get_operation_by_name(node.name)
            in_tensors = [
                TensorInfo(
                    name=tensor.name,
                    ugraph=ugraph,
                    op_name=tensor.op.name,
                    dtype=np.dtype(tensor.dtype.as_numpy_dtype),
                    shape=self._tf_parse_tshape(tensor.shape),
                ) for tensor in op.inputs
            ]
            out_tensors = [
                TensorInfo(
                    name=tensor.name,
                    ugraph=ugraph,
                    op_name=op.name,
                    dtype=np.dtype(tensor.dtype.as_numpy_dtype),
                    shape=self._tf_parse_tshape(tensor.shape),
                ) for tensor in op.outputs
            ]
            op_type = node.op
            op_attr = node.attr
            op_info = OperationInfo(
                name=node.name,
                input_tensors=in_tensors,
                n_inputs=len(in_tensors),
                output_tensors=out_tensors,
                n_outputs=len(out_tensors),
                op_type=op_type,
                lib_name="tensorflow",
                op_attr=op_attr,
                ugraph=ugraph,
            )
            op_info.op_attr["tensorflow__device"] = node.device
            ugraph.ops_info[node.name] = op_info
        topologic_order_graph(ugraph)
        ugraph = Legalizer.legalize(ugraph, {})
        return ugraph
Exemple #6
0
  def ops(self):
    """
    the ops of the graph in topological sorting order

    :rtype: List[:class:`.OperationInfo`]
    """
    if not self.topo_order:
      topologic_order_graph(self)
    return [self.ops_info[name] for name in self.topo_order]
Exemple #7
0
 def __call__(self, match):
     op_name = 'quant_conv_pool'
     repl_ugraph = uTensorGraph(name='{}_repl_graph'.format(op_name),
                                output_nodes=[op_name],
                                lib_name=match.subject_ugraph.lib_name)
     subj_conv_op = match.patrn2subj_op_map['conv/eightbit']
     subj_pool_op = match.patrn2subj_op_map['maxpool/eightbit']
     output_tensors = [
         TensorInfo(name='{}:{}'.format(op_name, i),
                    op_name=op_name,
                    dtype=subj_tensor.dtype,
                    shape=subj_tensor.shape,
                    ugraph=repl_ugraph)
         for i, subj_tensor in enumerate(subj_pool_op.output_tensors)
     ]
     input_tensors = [
         TensorInfo.make_null_tensor(ugraph=repl_ugraph)
         for _ in subj_conv_op.input_tensors
     ]
     quant_conv2d_pool_op = OperationInfo(
         name=op_name,
         input_tensors=input_tensors,
         n_inputs=len(input_tensors),
         output_tensors=output_tensors,
         n_outputs=len(output_tensors),
         op_type='QuantizedFusedConv2DMaxpool',
         lib_name=subj_conv_op.lib_name,
         op_attr={
             '_utensor_conv': subj_conv_op.op_attr,
             '_utensor_pool': subj_pool_op.op_attr,
         },
         ugraph=repl_ugraph)
     topologic_order_graph(repl_ugraph)
     input_map = {
         match.pattern_ugraph['conv/eightbit'].input_tensors[0]:
         quant_conv2d_pool_op.input_tensors[0],
         match.pattern_ugraph['conv/eightbit'].input_tensors[1]:
         quant_conv2d_pool_op.input_tensors[1],
         match.pattern_ugraph['conv/eightbit'].input_tensors[2]:
         quant_conv2d_pool_op.input_tensors[2],
         match.pattern_ugraph['conv/eightbit'].input_tensors[3]:
         quant_conv2d_pool_op.input_tensors[3],
         match.pattern_ugraph['conv/eightbit'].input_tensors[4]:
         quant_conv2d_pool_op.input_tensors[4],
         match.pattern_ugraph['conv/eightbit'].input_tensors[5]:
         quant_conv2d_pool_op.input_tensors[5],
     }
     output_map = {
         match.pattern_ugraph['maxpool/eightbit'].output_tensors[0]:
         output_tensors[0],
         match.pattern_ugraph['maxpool/eightbit'].output_tensors[1]:
         output_tensors[1],
         match.pattern_ugraph['maxpool/eightbit'].output_tensors[2]:
         output_tensors[2],
     }
     return repl_ugraph, input_map, output_map
Exemple #8
0
    def _build_graph(self, fb_model, ugraph):
        # addresseed by index
        tensor_names_map = self._build_tensor_map(fb_model, ugraph)

        self._build_param_ops(fb_model, ugraph, tensor_names_map)
        # find and set input nodes
        self._build_input_ops(fb_model, ugraph, tensor_names_map)
        self._build_intermediate_ops(fb_model, ugraph, tensor_names_map)
        self._set_output_ops(fb_model, ugraph, tensor_names_map)
        self._prepare_quant_params(ugraph)
        topologic_order_graph(ugraph)
Exemple #9
0
 def transform(ugraph):
     if self.APPLICABLE_LIBS is not GENERIC_SENTINEL and ugraph.lib_name not in self.APPLICABLE_LIBS:
         logger.info(
             "%s is not applicable to ugraph with lib name %s, skipping",
             self,
             ugraph.lib_name,
         )
         return ugraph
     new_ugraph = ori_transform(ugraph)
     topologic_order_graph(new_ugraph)
     if self.prune_graph:
         return _prune_graph(new_ugraph)
     return new_ugraph
Exemple #10
0
def fully_connect_pattern1():
    patrn_graph = tf.Graph()
    with patrn_graph.as_default():
        z_prime = tf.placeholder(name='z_prime', dtype=tf.float32)
        w_prime = tf.constant(np.random.rand(3, 3), name='w_prime', dtype=tf.float32)
        a_prime = tf.matmul(z_prime, w_prime, name='a_prime')
        r_prime = tf.nn.relu(a_prime, name='r_prime')
    patrn_ugraph = GraphDefParser(config={}).parse(patrn_graph.as_graph_def(), output_nodes=[r_prime.op.name])
    for i in range(2):
        patrn_ugraph.ops_info['a_prime'].replace_with_null_input_tensor(i)
    patrn_ugraph = prune_graph(patrn_ugraph)
    topologic_order_graph(patrn_ugraph)
    return patrn_ugraph
Exemple #11
0
 def parse(cls, pkl_file, output_nodes=None, model_name=None):
     with open(pkl_file, 'rb') as fid:
         ugraph = pickle.load(fid)
         if not isinstance(ugraph, uTensorGraph):
             raise ValueError('expecting uTensorGraph object, get %s' %
                              type(ugraph))
     if model_name:
         ugraph.name = model_name
     if output_nodes is not None:
         ugraph.output_nodes = output_nodes
         topologic_order_graph(ugraph)
     ugraph.finalize()
     return ugraph
Exemple #12
0
 def __deepcopy__(self, memo):
   new_graph = uTensorGraph(
     name=self.name,
     output_nodes=self.output_nodes,
     lib_name=self._lib_name
   )
   memo['ugraph'] = new_graph
   new_graph.ops_info = {
     k: deepcopy(v, memo)
     for k, v in self.ops_info.items()
   }
   new_graph._lib_name = self._lib_name
   new_graph.attributes = deepcopy(self.attributes)
   topologic_order_graph(new_graph)
   return new_graph
Exemple #13
0
 def _handle_match_tf(self, match):
     subj_ugraph = match.subject_ugraph
     subj_in_tensor = (match.patrn2subj_op_map['dropout/truediv'].
                       input_tensors[0].op.output_tensors[0])
     subj_out_op = match.patrn2subj_op_map['dropout/mul']
     subj_out_tensor = subj_out_op.output_tensors[0]
     for op in subj_out_op.output_nodes:
         for idx, tensor in enumerate(op.input_tensors):
             if tensor.name == subj_out_tensor.name:
                 op.input_tensors[idx] = subj_in_tensor
     for idx, op_name in enumerate(subj_ugraph.output_nodes):
         if op_name == subj_out_op.name:
             subj_ugraph.output_nodes[idx] = subj_in_tensor.op_name
     match.subject_ugraph = prune_graph(subj_ugraph)
     topologic_order_graph(match.subject_ugraph)
     return match.subject_ugraph
Exemple #14
0
 def _handle_conv_2d(cls, ugraph):
     activation_pattern = re.compile(r'^(\d+) \(\w+\)$')
     activation_map = {
         '0': 'None',
         '1': 'ReLUOperator',
         # '2': 'TFLM::TfLiteFusedActivation::kTfLiteActRelu1',
         '3': 'ReLU6Operator',
         # '4': 'TFLM::TfLiteFusedActivation::kTfLiteActTanh',
         # '5': 'TFLM::TfLiteFusedActivation::kTfLiteActSignBit',
         # '6': 'TFLM::TfLiteFusedActivation::kTfLiteActSigmoid',
     }
     for i, op_info in enumerate(ugraph.get_ops_by_type('Conv2d')):
         act_idx = activation_pattern.match(
             op_info.op_attr['FusedActivationFunction']).group(1)
         act_op_type = activation_map.get(act_idx)
         if act_op_type is None:
             raise ValueError(
                 'legalization fail, unknown activation: {}'.format(
                     op_info.op_attr['FusedActivationFunction']))
         elif act_op_type is 'None':
             # no activation is set, ignore
             continue
         else:
             ori_out_tensor = op_info.output_tensors[0]
             act_op_name = '{}/{}'.format(
                 op_info.name, act_op_type.replace('Operator', ''))
             act_tensor = TensorInfo(
                 name='{}:0'.format(act_op_name),
                 op_name=act_op_name,
                 dtype=ori_out_tensor.dtype,
                 shape=ori_out_tensor.shape[:],
                 ugraph=ugraph,
                 attributes=dict(ori_out_tensor.attributes),
             )
             OperationInfo(name=act_op_name,
                           input_tensors=[ori_out_tensor],
                           output_tensors=[act_tensor],
                           op_type=act_op_type,
                           lib_name=ugraph.lib_name,
                           ugraph=ugraph,
                           op_attr={})
             for consumer_op in ori_out_tensor.op.output_nodes:
                 for i, input_tensor in enumerate(
                         consumer_op.input_tensors):
                     if input_tensor.name == ori_out_tensor.name:
                         consumer_op.input_tensors[i] = act_tensor
     topologic_order_graph(ugraph)
Exemple #15
0
 def pattern_ugraph(self):
     graph = tf.Graph()
     with graph.as_default():
         dummy_input = tf.placeholder(dtype=tf.float32,
                                      shape=[None, 128, 128, 3])
         relu = tf.nn.relu(dummy_input, name='relu')
         tf.nn.max_pool(relu,
                        ksize=[1, 2, 2, 1],
                        strides=[1, 2, 2, 1],
                        padding='SAME',
                        name='max_pool')
     pattern_ugraph = GraphDefParser(config={}).parse(
         graph.as_graph_def(), output_nodes=['max_pool'])
     pattern_ugraph['relu'].replace_with_null_input_tensor(0)
     pattern_ugraph = prune_graph(pattern_ugraph)
     topologic_order_graph(pattern_ugraph)
     return pattern_ugraph
Exemple #16
0
 def parse(self, txt_file, output_nodes=None):
     graph_name, _ = os.path.splitext(
         os.path.basename(txt_file)
     )
     if output_nodes is None:
         output_nodes = []
     add_all_nodes = not output_nodes
     ugraph = uTensorGraph(name=graph_name, output_nodes=output_nodes, lib_name='txtlib')
     with open(txt_file, 'r') as fid:
         for line in fid:
             try:
                 op_name, value = line.split(' ', maxsplit=1)
             except Exception:
                 raise ValueError('invalid line: {}'.format(line))
             value = np.array(eval(value))
             out_tensor = TensorInfo(
                 '{}:0'.format(op_name),
                 op_name,
                 dtype=value.dtype,
                 shape=list(value.shape),
                 ugraph=ugraph
             )
             op_info = OperationInfo(
                 name=op_name,
                 lib_name='txtlib',
                 ugraph=ugraph,
                 input_tensors=[],
                 output_tensors=[out_tensor],
                 op_type='Const',
                 op_attr={
                     "value": AttrValueConverter.GenericType(
                         value_name="tensor",
                         value=GenericTensorConverterMixin.GenericType(
                             np_array=value
                         ),
                     )
                 }
             )
             if add_all_nodes:
                 ugraph.output_nodes.append(op_name)
     topologic_order_graph(ugraph)
     return ugraph
Exemple #17
0
 def callback(match):
     graph = tf.Graph()
     with graph.as_default():
         a = tf.placeholder(dtype=tf.float32, name='a')
         b = tf.placeholder(dtype=tf.float32, name='b')
         out = tf.add(a, b, name='fused_node')
     ugraph = GraphDefParser(config={}).parse(graph.as_graph_def(), output_nodes=[out.op.name])
     ugraph.ops_info['fused_node'].replace_with_null_input_tensor(0)
     ugraph.ops_info['fused_node'].replace_with_null_input_tensor(1)
     topologic_order_graph(ugraph)
     ugraph = prune_graph(ugraph)
     patrn_ugraph = match.pattern_ugraph
     
     input_map = {
         patrn_ugraph.ops_info['a_prime'].input_tensors[0]: ugraph.ops_info['fused_node'].input_tensors[0],
         patrn_ugraph.ops_info['a_prime'].input_tensors[1]: ugraph.ops_info['fused_node'].input_tensors[1]
     }
     output_map = {
         patrn_ugraph.ops_info['r_prime'].output_tensors[0]: ugraph.ops_info['fused_node'].output_tensors[0]
     }
     return ugraph, input_map, output_map
Exemple #18
0
 def pattern_ugraph(self):
     graph = tf.Graph()
     with graph.as_default():
         dummy_x = tf.constant(np.random.rand(10, 10),
                               dtype=tf.float32,
                               name='dummy_x')
         dummy_rate = tf.placeholder(dtype=tf.float32, name='dummy_rate')
         dropout = tf.nn.dropout(dummy_x, rate=dummy_rate, name='dropout')
     patrn_ugraph = GraphDefParser(config={}).parse(
         graph.as_graph_def(), output_nodes=[dropout.op.name])
     # replace dummy_x
     patrn_ugraph['dropout/truediv'].replace_with_null_input_tensor(0)
     # # replace dummy_rate
     patrn_ugraph['dropout/sub'].replace_with_null_input_tensor(1)
     # # replace Shape Op
     patrn_ugraph[
         'dropout/random_uniform/RandomUniform'].replace_with_null_input_tensor(
             0)
     patrn_ugraph = prune_graph(patrn_ugraph)
     topologic_order_graph(patrn_ugraph)
     return patrn_ugraph
Exemple #19
0
 def _build_graph(self, onnx_graph, ugraph):
     op_types_cnt = Counter()  # op_type (str) -> count (int)
     tensor_names_map = {}  # tensor_name (str) -> tensor_info (TensorInfo)
     # these methods will update inputs **inplace**
     self._build_param_ops(onnx_graph, ugraph, op_types_cnt,
                           tensor_names_map)
     self._build_input_ops(onnx_graph, ugraph, op_types_cnt,
                           tensor_names_map)
     self._build_intermediate_ops(
         onnx_graph,
         ugraph,
         op_types_cnt,
         tensor_names_map,
     )
     # find outupt nodes
     distinct_out_ops = set()
     graph_output = set([v.name for v in onnx_graph.output])
     for name, tensor_info in tensor_names_map.items():
         if name in graph_output:
             distinct_out_ops.add(tensor_info.op_name)
     ugraph.output_nodes = list(distinct_out_ops)
     topologic_order_graph(ugraph)
     _PostProcessing.post_process(ugraph)
Exemple #20
0
 def finalize(self):
     self._is_finalized = True
     topologic_order_graph(self)
Exemple #21
0
    def replace_with(self, callback, suffix=None):
        """
    Replace matched subgraph with a given ugraph given by the callback, **not** in-place

    :param callback: a callable object which takes a :py:class:`.uTensorGraphMatch` and
      reutrn three values -- a :py:class:`.uTensorGraph` object to replaced the matched
      subgraph with (the ``replacing graph``), an ``input_map`` (dict) maps input tensors 
      in pattern graph to the input tensors in replacing graph and an ``output_map`` (dict)
      which maps the output tensors
    :type callback: callable

    :param suffix: (optional) the suffix to add to the name of ops/tensors in the replacing
      graph returned by ``callback``. If not given, it will be a random string
    :type suffix: str

    :rtype: :py:class:`.uTensorGraph`, a **new** graph with matched subgraph replaced
    """
        # build a matched subgraph and pass it to callback
        # input/output_map (dict):
        #  {
        #     tensor in pattern graph : tensor in replacing graph
        #  }
        replace_ugraph, input_map, output_map = callback(self)
        replaceible, reasons = self._is_replacible(replace_ugraph, input_map,
                                                   output_map)
        if not replaceible:
            raise ValueError(
                'matched subgraph can not be replaced with the ugraph given: {}'
                .format(reasons))
        replace_ugraph, input_map, output_map = self.new_ugraph_with_suffix(
            replace_ugraph, input_map, output_map, suffix)
        new_ugraph = deepcopy(self.subject_ugraph)
        # make replace_ugraph be a subgraph in the new_ugraph
        replace_ugraph.unsafe_merge_into(new_ugraph)
        for tensor in input_map.values():
            tensor.move_into(new_ugraph)
        for tensor in output_map.values():
            tensor.move_into(new_ugraph)
        subj_graph_view = self.subject_graph_view
        # replacing output tensors
        for out_tensor in subj_graph_view.output_tensors:
            repl_out_tensor = output_map[self.subj2patrn_tensor_map[
                out_tensor.name]]
            out_ops = [
                new_ugraph[op.name] for op in out_tensor.op.output_nodes
            ]
            for op in out_ops:
                for i, tensor in enumerate(op.input_tensors):
                    if tensor.name == out_tensor.name:
                        op.input_tensors[i] = repl_out_tensor
            for i, node_name in enumerate(new_ugraph.output_nodes):
                if node_name == out_tensor.op.name:
                    new_ugraph.output_nodes[i] = repl_out_tensor.op.name
        # replacing input tensors
        inv_input_map = dict([(v, k) for k, v in input_map.items()])
        for op in replace_ugraph.input_ops:
            for i, repl_in_tensor in enumerate(op.input_tensors):
                patrn_in_tensor = inv_input_map[repl_in_tensor]
                subj_in_tensor = self.patrn2subj_tensor_map[
                    patrn_in_tensor.name]
                op.input_tensors[i] = subj_in_tensor
        new_ugraph.ops_info.update(replace_ugraph.ops_info)
        topologic_order_graph(new_ugraph)
        new_ugraph = prune_graph(new_ugraph)
        return new_ugraph