def input_ext(proto_layer, model_layer): return { 'op': 'Placeholder', 'type': 'Input', 'shape': dim_to_shape(proto_layer.input_param.shape[0].dim), 'infer': lambda node: single_output_infer(node, lambda n: n.shape) }
def extract(node): node['infer'] = lambda node: single_output_infer( node, lambda node: node.in_node(0).shape, lambda node: node.in_node(0).value ) return __class__.enabled
def tf_placeholder_ext(pb): return { 'data_type': tf_dtype_extractor(pb.attr["dtype"].type), 'shape': tf_tensor_shape(pb.attr["shape"].shape), 'type': 'Input', 'infer': lambda node: single_output_infer(node, lambda n: n.shape), 'permute_attrs': PermuteAttrs().update_attrs(attrs=[('shape', 'output:0')]) }
def __init__(self, graph: Graph, attrs: dict): mandatory_props = { 'kind': 'op', 'op': __class__.op, 'type': __class__.op, 'infer': lambda node: single_output_infer(node, lambda n: n.shape), 'is_input': True, 'out_ports_count': 1, } super().__init__(graph, mandatory_props, attrs)
def tf_reshape_ext(pb): return { 'type': 'Reshape', 'infer': lambda node: single_output_infer( node, tf_reshape_shape_infer, lambda node: np.reshape( node.in_node().value, node.out_node().shape) if node.in_node().value is not None else None) }
def __init__(self, graph: Graph, attrs: dict): super().__init__( graph, { 'kind': 'op', 'type': __class__.op, 'op': 'Placeholder', 'infer': lambda node: single_output_infer(node, lambda n: n.shape), 'out_ports_count': 1, 'is_input': True }, attrs)
def onnx_placeholder_ext(node): return { 'data_type': np.float, # TODO WARNING Use real type here 'shape': np.array([d.dim_value for d in node.pb.type.tensor_type.shape.dim], dtype=np.int64), 'type': 'Input', 'infer': lambda node: single_output_infer(node, lambda n: n.shape) }
def onnx_reshape_ext(node): ''' Extract ONNX Reshape op of different versions. Support both latest Reshape and Reshape-1. The first one has 2 arguments, Reshape-1 has one input and shape is coded in attribute. ''' dim = onnx_attr(node, 'shape', 'ints', None) if dim is not None: dim = np.array(dim, dtype=np.int64) return { 'type': 'Reshape', 'dim': dim, 'infer': lambda node: single_output_infer(node, tf_reshape_shape_infer, lambda node: np.reshape(node.in_node().value, node.out_node().shape) if node.in_node().value is not None else None) }
def __init__(self, graph: nx.MultiDiGraph, attrs: dict): super().__init__( graph, { 'kind': 'op', 'type': __class__.op, 'op': __class__.op, 'infer': lambda node: single_output_infer( node, tf_reshape_shape_infer, lambda node: np.reshape( node.in_node().value, node.out_node().shape)) }, attrs)
def null_ext(attr_dict): if 'value' in attr_dict: value = attr_dict['value'] return { 'op': 'Const', 'value': value, 'shape': np.array(value.shape, dtype=np.int64), 'infer': tf_const_infer } else: return { 'op': 'Placeholder', 'type': 'Input', 'shape': None, 'infer': lambda node: single_output_infer(node, lambda n: n.shape) }
def reshape_ext(pl, ml): param = pl.reshape_param attrs = { 'op': 'Reshape', 'type': 'Reshape', 'axis': param.axis, 'num_axes': param.num_axes, 'dim': list(param.shape.dim), 'infer': lambda node: single_output_infer(node, tf_reshape_shape_infer) } if attrs['axis'] != 0: log.error('The operation "Reshape" has attribute "axis" with unsupported value "{}"'.format(attrs['axis'])) return None if attrs['num_axes'] != -1: log.error('The operation "Reshape" has attribute "num_axes" with unsupported value "{}"'.format( attrs['num_axes'])) return None return attrs
def global_input_ext(proto_layer, model_layer): return { 'op': 'Placeholder', 'type': 'Input', 'infer': lambda node: single_output_infer(node, lambda n: n.shape) }
def infer(node: Node): single_output_infer( node, tf_reshape_shape_infer, lambda node: np.reshape( node.in_node(0).value, node.out_node().shape))