def convert_concatenation(self, op): """Convert TFLite concatenation""" try: from tflite.Operator import Operator from tflite.ConcatenationOptions import ConcatenationOptions from tflite.BuiltinOptions import BuiltinOptions from tflite.ActivationFunctionType import ActivationFunctionType except ImportError: raise ImportError("The tflite package must be installed") assert isinstance(op, Operator) input_tensors = self.get_input_tensors(op) assert len(input_tensors) >= 1, "input tensors should greater than 1" in_exprs = [self.get_expr(input_tensor.tensor_idx) for input_tensor in input_tensors] output_tensors = self.get_output_tensors(op) assert len(output_tensors) == 1, "output tensors should be 1" assert op.BuiltinOptionsType() == BuiltinOptions.ConcatenationOptions op_options = op.BuiltinOptions() concatenation_options = ConcatenationOptions() concatenation_options.Init(op_options.Bytes, op_options.Pos) concatenation_axis = concatenation_options.Axis() fused_activation_fn = concatenation_options.FusedActivationFunction() # with axis in N H W C out = _op.concatenate(in_exprs, axis=concatenation_axis) # if we have activation fn if fused_activation_fn != ActivationFunctionType.NONE: out = self.convert_fused_activation_function(out, fused_activation_fn) return out
def convert_concatenation(self, op): """ convert TFLite concatenation""" try: from tflite.Operator import Operator from tflite.ConcatenationOptions import ConcatenationOptions from tflite.BuiltinOptions import BuiltinOptions from tflite.ActivationFunctionType import ActivationFunctionType except ImportError: raise ImportError("The tflite package must be installed") assert isinstance(op, Operator) input_tensors = self.get_input_tensors(op) assert len(input_tensors) >= 1, "input tensors should greater than 1" in_exprs = [ self.get_expr(input_tensor.tensor_idx) for input_tensor in input_tensors ] output_tensors = self.get_output_tensors(op) assert len(output_tensors) == 1, "output tensors should be 1" assert op.BuiltinOptionsType() == BuiltinOptions.ConcatenationOptions op_options = op.BuiltinOptions() concatenation_options = ConcatenationOptions() concatenation_options.Init(op_options.Bytes, op_options.Pos) concatenation_axis = concatenation_options.Axis() fused_activation_fn = concatenation_options.FusedActivationFunction() input_shape_length = len(input_tensors[0].tensor.ShapeAsNumpy()) # TFLite is N H W C, our layout is N C H W if input_shape_length <= 4: axis_convert_map = [0] + list(range(2, input_shape_length)) + [1] concatenation_axis = axis_convert_map[concatenation_axis] else: raise NotImplementedError( "Not support input shape length {} of concatenatio : ".format( str(input_shape_length))) # with axis in N H W C out = _op.concatenate(in_exprs, axis=concatenation_axis) # if we have activation fn if fused_activation_fn != ActivationFunctionType.NONE: out = self.convert_fused_activation_function( out, fused_activation_fn) return out
def __init__(self, op, op_type, tflite_interpreter): Layer.__init__(self, op, op_type, tflite_interpreter) self.tflite_concat_parser = ConcatenationOptions() self.tflite_concat_parser.Init(self.op.BuiltinOptions().Bytes, self.op.BuiltinOptions().Pos)