Beispiel #1
0
 def extract(cls, node):
     data_type = tf_dtype_extractor(node.pb.attr["T"].type)
     AttributedPower.update_node_stat(node, {
         'power': data_type(2),
         'data_type': data_type
     })
     return cls.enabled
Beispiel #2
0
    def extract(cls, node):
        shape = shape_array([])
        # Extract output shape from `shape` attribute
        extracted_shape = tf_tensor_shape(node.pb.attr["shape"].shape)
        if len(extracted_shape) != 0:
            shape = extracted_shape
        else:
            # Extract output shape from `_output_shapes` attribute if it is possible
            extracted_output_shapes = node.pb.attr["_output_shapes"].list.shape
            if len(extracted_output_shapes) == 1:   # check if attribute not empty
                extracted_output_shapes = tf_tensor_shape(extracted_output_shapes[0])

                # Check equality of extracted shapes. We know some cases then Placeholder operation has empty `shape`
                # attribute value and non-empty `_output_shapes` attribute value and need co handle and support it.
                if len(extracted_output_shapes) > len(extracted_shape):
                    log.warning('Extracted shapes for Placeholder operation {} have different lengths: `shape` {} and '
                                '`_output_shapes` {}. Please, check if model is consistent'.format(
                        node.pb.name, extracted_shape, extracted_output_shapes))
                    if len(extracted_output_shapes) != 0:
                        shape = extracted_output_shapes

        attrs = {
            'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
            'shape': shape,
            'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
        }
        if node.pb.attr["shape"].shape.unknown_rank:
            attrs['shape'] = None
        Parameter.update_node_stat(node, attrs)
        return cls.enabled
Beispiel #3
0
 def extract(cls, node):
     BiasAdd.update_node_stat(
         node, {
             'data_type': tf_dtype_extractor(node.pb.attr["T"].type),
             'data_format': node.pb.attr["data_format"].s.decode()
         })
     return cls.enabled
Beispiel #4
0
def generate_feed_dict(graph: tf_v1.Graph, node: Node):
    """
    The first value in the return tuple is True if all inputs for the node has constant values.
    The second returned value is mapping of placeholder tensor to the numpy arrays with the values for these
    placeholders.
    :param graph: the TensorFlow Graph to generate feed dictionary to.
    :param node: the node which represents TensorFlow sub-graph of operations.
    :return: pair where the first element is a flag that specifies that all node inputs are constants and a dictionary
    where key is the input Tensor object and the value is the tensor value.
    """
    all_constants = True
    feed_dict = dict()
    for in_data_node_name, edge_attrs in node.get_inputs():
        if 'control_flow_edge' in edge_attrs and edge_attrs[
                'control_flow_edge']:
            continue
        value = node.in_node(edge_attrs['in']).value
        if value is None:
            all_constants = False
            placeholder_pb = node['pbs'][edge_attrs['placeholder_name']]
            value = np.ones(
                shape=tf_tensor_shape(placeholder_pb.attr['shape'].shape),
                dtype=tf_dtype_extractor(placeholder_pb.attr['dtype'].type))
        feed_dict[graph.get_tensor_by_name(edge_attrs['placeholder_name'] +
                                           ":0")] = value
    return all_constants, feed_dict
 def extract(cls, node):
     Size.update_node_stat(
         node, {
             'output_type':
             tf_dtype_extractor(node.pb.attr['out_type'].type, np.int32)
         })
     return cls.enabled
Beispiel #6
0
 def extract(cls, node: Node):
     dtypes = [tf_dtype_extractor(t) for t in node.pb.attr["T"].list.type]
     IdentityN.update_node_stat(node, {
         'data_types': dtypes,
         'in_ports_count': len(dtypes),
         'out_ports_count': len(dtypes),
     })
     return cls.enabled
Beispiel #7
0
 def extract(cls, node):
     attrs = {
         'output_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
         'global_seed': node.pb.attr['seed'].i,
         'op_seed': node.pb.attr['seed2'].i
     }
     AttributedRandomUniform.update_node_stat(node, attrs)
     return cls.enabled
Beispiel #8
0
 def extract(cls, node):
     OneHot.update_node_stat(
         node, {
             'axis': node.pb.attr['axis'].i,
             'data_type': tf_dtype_extractor(node.pb.attr["T"].type,
                                             np.float32)
         })
     return cls.enabled
 def extract(cls, node):
     attrs = {
         'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
         'shape': tf_tensor_shape(node.pb.attr["shape"].shape),
         'identity': True,
         'infer': lambda node: copy_shape_infer(node, value_infer=copy_value),
     }
     Op.update_node_stat(node, attrs)
     return cls.enabled
Beispiel #10
0
 def extract(cls, node):
     pb_tensor = node.pb.attr["value"].tensor
     shape = tf_tensor_shape(pb_tensor.tensor_shape)
     attrs = {
         'shape': shape,
         'value': tf_tensor_content(pb_tensor.dtype, shape, pb_tensor),
         'data_type': tf_dtype_extractor(pb_tensor.dtype),
     }
     Const.update_node_stat(node, attrs)
     return cls.enabled
Beispiel #11
0
 def extract(cls, node):
     attrs = {
         'data_type': tf_dtype_extractor(node.pb.attr["dtype"].type),
         'shape': tf_tensor_shape(node.pb.attr["shape"].shape),
         'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
     }
     if node.pb.attr["shape"].shape.unknown_rank:
         attrs['shape'] = None
     Parameter.update_node_stat(node, attrs)
     return cls.enabled
Beispiel #12
0
def tf_fused_bn_extractor(pb):
    is_training = pb.attr['is_training'].b
    if is_training:
        log.warning('FusedBatchNorm doesn\'t support is_training=True')

    return {
        'data_format': pb.attr["data_format"].s,
        'data_type': tf_dtype_extractor(pb.attr["T"].type),
        'eps': pb.attr['epsilon'].f,
        'infer': tf_fused_bn_infer,
        'is_training': is_training
    }
Beispiel #13
0
def tf_fused_bn_extractor(pb):
    is_training = pb.attr['is_training'].b
    if is_training:
        log.warning('FusedBatchNorm doesn\'t support is_training=True')

    return {
        'data_format': pb.attr["data_format"].s.decode(),
        'data_type': tf_dtype_extractor(pb.attr["T"].type),
        'eps': pb.attr['epsilon'].f,
        'infer': tf_fused_bn_infer,
        'reverse_infer': lambda node: reverse_bypass_infer(node, in_ports=[0]),
        'is_training': is_training
    }
Beispiel #14
0
def determine_data_type(node: Node):
    """
    Tries to determine data type of the node. The input node could be either data or op node. If we don't know the data
    type of the node then we recursively check the first parent of the node.
    :param node: node to determine data type.
    :return: data type of the node output in the numpy format.
    """
    if node.has_and_set('data_type'):
        return node.data_type
    if node.has_and_set('kind') and node.kind == 'op':
        if node.has_and_set('pb'):
            if 'dtype' in node.pb.attr:
                return tf_dtype_extractor(node.pb.attr['dtype'].type)
            if 'T' in node.pb.attr:
                return tf_dtype_extractor(node.pb.attr['T'].type)
    if node.has_and_set('kind') and node.kind == 'data':
        if 'value' in node and node.value is not None:
            return node.value.dtype
    if len(node.in_nodes()) != 0:  # try to guess data type from the first parent
        return determine_data_type(node.in_node(0))
    log.error('Failed to determine data type for node "{}"'.format(node.name))
    return None
Beispiel #15
0
 def extract(cls, node):
     attrs = {
         'top_k':
         1,
         'axis':
         None,
         'keepdims':
         0,
         'remove_values_output':
         True,
         'output_type':
         tf_dtype_extractor(node.pb.attr['output_type'].type, np.int64)
     }
     ArgMinOp.update_node_stat(node, attrs)
     return cls.enabled
 def extract(cls, node):
     shapes = node.pb.attr['shapes'].list.shape
     tf_types = node.pb.attr['component_types'].list.type
     extracted_types = []
     for t in tf_types:
         extracted_types.append(tf_dtype_extractor(t))
     result_shapes = []
     for shape_pb in shapes:
         shape = shape_pb.dim
         if len(shape) == 3:
             result_shapes.append(int64_array([1, shape[0].size, shape[1].size, shape[2].size]))
         else:
             result_shapes.append(int64_array([dim.size for dim in shape]))
     Op.update_node_stat(node, {'shapes': result_shapes, 'types': extracted_types})
     return cls.enabled
Beispiel #17
0
 def replace_sub_graph(self, graph: Graph, match: dict):
     node = match['op']
     if not node.has_valid('value'):
         log.debug("No value in FakeConst node {}".format(node.id))
         return
     node_value = node.value
     extracted_attrs = {
         'data_type': tf_dtype_extractor(node.pb.attr['dtype'].type),
         'shape': int64_array(node_value.shape),
         'value': node_value
     }
     Const.update_node_stat(node, extracted_attrs)
     log.debug(
         'FakeConst op was translated to Const op with shape = {} and value.shape = {}'
         ''.format(extracted_attrs['shape'],
                   extracted_attrs['value'].shape))
 def extract(cls, node):
     shapes = node.pb.attr['output_shapes'].list.shape
     tf_types = node.pb.attr['output_types'].list.type
     extracted_types = []
     for t in tf_types:
         extracted_types.append(tf_dtype_extractor(t))
     result_shapes = []
     for shape_pb in shapes:
         result_shapes.append(tf_tensor_shape(shape_pb))
     Op.update_node_stat(
         node, {
             'shapes': result_shapes,
             'types': extracted_types,
             'out_ports_count': 1
         })
     return cls.enabled
Beispiel #19
0
def get_attrs(node: Node):
    shapes = node.pb.attr["_output_shapes"].list.shape
    tf_types = node.pb.attr["component_types"].list.type
    extracted_types = []
    for t in tf_types:
        extracted_types.append(tf_dtype_extractor(t))
    result_shapes = []
    for shape_pb in shapes:
        result_shapes.append(tf_tensor_shape(shape_pb))
    assert len(result_shapes) == len(extracted_types), "Output shapes do not match output" \
                                                       "types in the node {}".format(node.soft_get('name', node.id))
    attrs = {
        "shapes": result_shapes,
        "types": extracted_types,
        'out_ports_count': len(result_shapes)
    }
    return attrs
Beispiel #20
0
 def extract(cls, node):
     ArgMaxOp.update_node_stat(
         node, {
             'out_max_val':
             0,
             'top_k':
             1,
             'axis':
             None,
             'dim_attrs': ['axis'],
             'keepdims':
             0,
             'remove_values_output':
             True,
             'output_type':
             tf_dtype_extractor(node.pb.attr['output_type'].type, np.int64),
         })
     return cls.enabled
Beispiel #21
0
def convert_graph_inputs_to_parameters(internal_graph, internal_graph_proto):
    # create Parameter nodes for the body graph
    body_parameters = []
    body_parameter_names = []
    for idx, pb_node in enumerate(internal_graph_proto['input_arg']):
        param_id = internal_graph.unique_id(pb_node.name)
        internal_graph.add_node(param_id,
                                name=param_id,
                                kind='op',
                                op='Parameter',
                                pb=None,
                                shape=None)
        parameter_node = Node(internal_graph, pb_node.name)
        Parameter.update_node_stat(
            parameter_node, {
                'data_type':
                tf_dtype_extractor(pb_node.type),
                'permute_attrs':
                PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')])
            })
        body_parameters.append(parameter_node)
        body_parameter_names.append(param_id)
    return body_parameters, body_parameter_names
Beispiel #22
0
 def extract(cls, node):
     SquaredDifference.update_node_stat(node, {'data_type': tf_dtype_extractor(node.pb.attr["T"].type)})
     return cls.enabled
Beispiel #23
0
 def extract(cls, node):
     Minimum.update_node_stat(node, {'data_type': tf_dtype_extractor(node.pb.attr["T"].type)})
     return cls.enabled
Beispiel #24
0
 def extract(cls, node: Node):
     Identity.update_node_stat(
         node, {
             'data_type': tf_dtype_extractor(node.pb.attr["T"].type),
         })
     return cls.enabled
Beispiel #25
0
 def extract(cls, node: Node):
     Range.update_node_stat(node, {'output_type': tf_dtype_extractor(node.pb.attr['Tidx'].type)})
     return cls.enabled