Exemplo n.º 1
0
    def convert_lambda(layer):
        global count_lambda
        if isinstance(layer, KL.Lambda) and layer.name:
            print('-------LAMBDA--------')

            params = NeuralNetwork_pb2.CustomLayerParams()

            # The name of the Swift or Obj-C class that implements this layer.
            params.className = str(layer.name)
            # The desciption is shown in Xcode's mlmodel viewer.
            params.description = "This a Lambda layer transformed by CoreML"

            count_lambda += 1
            return params
        elif isinstance(layer, KL.Lambda) and not layer.name:
            print('-------LAMBDA--------')

            params = NeuralNetwork_pb2.CustomLayerParams()

            # The name of the Swift or Obj-C class that implements this layer.
            params.className = 'Lambda'
            # The desciption is shown in Xcode's mlmodel viewer.
            params.description = "This a Lambda layer transformed by CoreML"

            count_lambda += 1
            return params
        else:
            return None
Exemplo n.º 2
0
 def convert_concat(node):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = node.op_type
     params.description = "Custom layer that corresponds to the ONNX op {}".format(
         node.op_type, )
     params.parameters["axis"].intValue = node.attrs['axis']
     return params
Exemplo n.º 3
0
def _convert_custom(
        builder, node, graph, err
):  # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None

    if node.op_type in err.custom_conversion_functions:
        func = err.custom_conversion_functions[node.op_type]
        params = func(node)
    else:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = node.op_type
        params.description = "Custom layer that corresponds to the ONNX op {}".format(
            node.op_type, )

    inputs_ = []
    # skip the inputs that are initializers
    for inp in node.inputs:
        if inp not in node.input_tensors:
            inputs_.append(inp)

    builder.add_custom(name=node.name,
                       input_names=inputs_,
                       output_names=node.outputs,
                       custom_proto_spec=params)

    err.custom_layer_nodes.append(node)
Exemplo n.º 4
0
 def convert_pyramid(layer):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = "PyramidROIAlignLayer"
     params.parameters["poolSize"].intValue = layer.pool_shape[0]
     params.parameters["imageWidth"].intValue = layer.image_shape[0]
     params.parameters["imageHeight"].intValue = layer.image_shape[1]
     params.description = "Extracts feature maps based on the regions of interest."
     return params
Exemplo n.º 5
0
def convert_lambda(layer):
    if layer.function == modellib.lambda_a:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'lambda_a'
    elif layer.function == modellib.lambda_b:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'lambda_b'
    elif layer.function == modellib.lambda_c:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'lambda_c'
    elif layer.function == modellib.lambda_d:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'lambda_d'
    else:
        params = None

    return params
Exemplo n.º 6
0
 def convert_clip(node):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = node.op_type
     params.description = "Custom layer that corresponds to the ONNX op {}".format(
         node.op_type, )
     params.parameters["min"].doubleValue = node.attrs['min']
     params.parameters["max"].doubleValue = node.attrs['max']
     return params
Exemplo n.º 7
0
def convert_lambda(layer):
    if layer.function == scaling:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = "scaling"
        params.parameters["scale"].doubleValue = layer.arguments['scale']
        return params
    else:
        return None
Exemplo n.º 8
0
 def serialize(self, name, input, builder):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = 'identity'
     params.description = 'An identity layer'
     builder.add_custom(name,
                        input_names=[input],
                        output_names=[name],
                        custom_proto_spec=params)
     return name
Exemplo n.º 9
0
        def convert_acos(builder, node, graph, err):
            params = NeuralNetwork_pb2.CustomLayerParams()
            params.className = node.op_type
            params.description = "Custom layer that corresponds to the ONNX op {}".format(
                node.op_type, )

            builder.add_custom(name=node.name,
                               input_names=node.inputs,
                               output_names=node.outputs,
                               custom_proto_spec=params)
Exemplo n.º 10
0
    def convert_coreml(layer):
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'ResizeLayer'
        params.description = 'Perform nearest neighbour / bilinear resizing of input tensors'

        layer_config = layer.get_config()
        params.parameters['bilinear'].intValue = int(layer_config['bilinear'])
        params.parameters['new_height'].intValue = layer_config['new_height']
        params.parameters['new_width'].intValue = layer_config['new_width']

        return params
Exemplo n.º 11
0
def convert_FullSizePReLU(keras_layer):
    coreml_layer = NeuralNetwork_pb2.CustomLayerParams()
    coreml_layer.className = className_FullSizePReLU
    coreml_layer.description = 'Custom activation layer: ' + className_FullSizePReLU

    weightList = keras_layer.get_weights()
    p_alpha = weightList[0]  # numpy array
    alpha = coreml_layer.weights.add()
    alpha.floatValue.extend(map(float, p_alpha.flatten()))

    return coreml_layer
Exemplo n.º 12
0
 def serialize(self, name, input, builder):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = 'dropout'
     params.description = 'An n-dimensional dropout layer'
     params.parameters['dim'].intValue = self.dim
     params.parameters['p'].doubleValue = self.p
     builder.add_custom(name,
                        input_names=[input],
                        output_names=[name],
                        custom_proto_spec=params)
     return name
Exemplo n.º 13
0
def convert_swish(layer):
    params = NeuralNetwork_pb2.CustomLayerParams()

    # The name of the Swift or Obj-C class that implements this layer.
    params.className = "Swish"

    # The desciption is shown in Xcode's mlmodel viewer.
    params.description = "A fancy new activation function"

    # Set configuration parameters
    params.parameters["beta"].doubleValue = layer.beta
    return params
Exemplo n.º 14
0
    def convert_topk(builder, node, graph, err):
      params = NeuralNetwork_pb2.CustomLayerParams()
      params.className = node.op_type
      params.description = "Custom layer that corresponds to the ONNX op {}".format(node.op_type, )
      params.parameters["axis"].intValue = node.attrs.get('axis', -1)

      builder.add_custom(
        name=node.name,
        input_names=node.inputs,
        output_names=node.outputs,
        custom_proto_spec=params
      )
Exemplo n.º 15
0
    def serialize(self, name, input, builder):
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'addition'
        params.description = 'An addition layer'
        params.parameters['dim'].intValue = self.dim
        params.parameters['chunk_size'].intValue = self.chunk_size

        builder.add_custom(name,
                           input_names=[input],
                           output_names=[name],
                           custom_proto_spec=params)
        return name
Exemplo n.º 16
0
 def convert_proposal(layer):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = "ProposalLayer"
     params.description = "Proposes regions of interests and performs NMS."
     params.parameters["bboxStdDev_count"].intValue = len(
         layer.bounding_box_std_dev)
     for idx, value in enumerate(layer.bounding_box_std_dev):
         params.parameters["bboxStdDev_" + str(idx)].doubleValue = value
     params.parameters[
         "preNMSMaxProposals"].intValue = layer.pre_nms_max_proposals
     params.parameters["maxProposals"].intValue = layer.max_proposals
     params.parameters["nmsIOUThreshold"].doubleValue = layer.nms_threshold
     return params
Exemplo n.º 17
0
def convert_learnable_swish(layer):
    params = NeuralNetwork_pb2.CustomLayerParams()

    # The name of the Swift or Obj-C class that implements this layer.
    params.className = "Swish"

    # The desciption is shown in Xcode's mlmodel viewer.
    params.description = "A fancy new activation function"

    # Add the weights
    beta_weights = params.weights.add()
    beta_weights.floatValue.extend(layer.get_weights()[0].astype(float))
    return params
Exemplo n.º 18
0
def convert_SynapticTransmissionRegulator(keras_layer):
    coreml_layer = NeuralNetwork_pb2.CustomLayerParams()
    coreml_layer.className = className_SynapticTransmissionRegulator
    coreml_layer.description = 'Custom Synaptic Transmission Regulator (STR) layer: ' + className_SynapticTransmissionRegulator

    weightList = keras_layer.get_weights()
    p_weight = weightList[0]  # numpy array
    p_bias = weightList[1]  # numpy array
    weight = coreml_layer.weights.add()
    weight.floatValue.extend(map(float, p_weight.flatten()))
    bias = coreml_layer.weights.add()
    bias.floatValue.extend(map(float, p_bias.flatten()))

    return coreml_layer
Exemplo n.º 19
0
 def convert_detection(layer):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = "DetectionLayer"
     params.parameters["bboxStdDev_count"].intValue = len(
         layer.bounding_box_std_dev)
     for idx, value in enumerate(layer.bounding_box_std_dev):
         params.parameters["bboxStdDev_" + str(idx)].doubleValue = value
     params.parameters["maxDetections"].intValue = layer.max_detections
     params.parameters[
         "scoreThreshold"].doubleValue = layer.detection_min_confidence
     params.parameters[
         "nmsIOUThreshold"].doubleValue = layer.detection_nms_threshold
     params.description = "Outputs detections based on confidence and performs NMS."
     return params
Exemplo n.º 20
0
    def serialize(self, name: str, input: str, builder) -> str:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'reshape'
        params.description = 'A generalized reshape layer'
        params.parameters['src_dim'].intValue = self.src_dim
        params.parameters['part_a'].intValue = self.part_a
        params.parameters['part_b'].intValue = self.part_b
        params.parameters['high'].intValue = self.high
        params.parameters['low'].intValue = self.low

        builder.add_custom(name,
                           input_names=[input],
                           output_names=[name],
                           custom_proto_spec=params)
        return name
Exemplo n.º 21
0
    def _convert_topk(**kwargs):
      tf_op = kwargs["op"]
      coreml_nn_builder = kwargs["nn_builder"]
      constant_inputs = kwargs["constant_inputs"]

      params = NeuralNetwork_pb2.CustomLayerParams()
      params.className = 'Top_K'
      params.description = "Custom layer that corresponds to the top_k TF op"
      params.parameters["sorted"].boolValue = tf_op.get_attr('sorted')
      # get the value of k
      k = constant_inputs.get(tf_op.inputs[1].name, 3)
      params.parameters["k"].intValue = k
      coreml_nn_builder.add_custom(name=tf_op.name,
                                   input_names=[tf_op.inputs[0].name],
                                   output_names=[tf_op.outputs[0].name],
                                   custom_proto_spec=params)
Exemplo n.º 22
0
    def _convert_topk(ssa_converter, node):
      coreml_nn_builder = ssa_converter._get_builder()
      constant_inputs = node.attr

      params = NeuralNetwork_pb2.CustomLayerParams()
      params.className = 'Top_K'
      params.description = "Custom layer that corresponds to the top_k TF op"
      params.parameters["sorted"].boolValue = node.attr.get('sorted')
      # get the value of k
      k = constant_inputs.get(node.inputs[1], 3)
      params.parameters["k"].intValue = k
      layer = coreml_nn_builder.add_custom(name=node.name,
                                  input_names=[node.inputs[0]],
                                  output_names=['output'],
                                  custom_proto_spec=params)
      custom_shape_update.propagate_single_layer(layer, ssa_converter.tensor_shapes, custom_shape_function=_shape_topk)
Exemplo n.º 23
0
    def convert_BU(layer):
        global count_BU
        if isinstance(layer, ResizeBilinear) and layer.name:

            # params = NeuralNetwork_pb2.ReorganizeDataLayerParams()
            params = NeuralNetwork_pb2.CustomLayerParams()

            # The name of the Swift or Obj-C class that implements this layer.
            params.className = str(layer.name)

            # The desciption is shown in Xcode's mlmodel viewer.
            params.description = "This a BilinearUpsampling layer transformed by CoreML"

            count_BU += 1
            return params

        else:
            return None
Exemplo n.º 24
0
def custom_layer(op, context):

    print("Adding custom layer")

    if op.name in context.custom_conversion_functions or \
       op.type in context.custom_conversion_functions:

        if op.type in context.custom_conversion_functions:
            func = context.custom_conversion_functions[op.type]
        else:
            func = context.custom_conversion_functions[op.name]

        # Fill up values of any constant inputs that this op receives
        constant_inputs = {}
        for inp_ in op.inputs:
            if inp_.name in context.consts:
                constant_inputs[inp_.name] = context.consts[inp_.name]
            elif inp_.op.type == 'Identity' and inp_.op.inputs[
                    0].name in context.consts:
                constant_inputs[inp_.op.inputs[0].name] = context.consts[
                    inp_.op.inputs[0].name]

        kwargs = {
            "op": op,
            "nn_builder": context.builder,
            "context": context,
            "constant_inputs": constant_inputs
        }
        func(**kwargs)

    else:
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = op.type
        params.description = "Custom layer that corresponds to the TensorFlow op {}".format(
            op.type, )
        inputs = [inp.name for inp in op.inputs]
        outputs = [out.name for out in op.outputs]
        context.builder.add_custom(name=op.name,
                                   input_names=inputs,
                                   output_names=outputs,
                                   custom_proto_spec=params)
    for out in op.outputs:
        context.translated[out.name] = True
    context.ops_converted_to_custom_layers.append(op)
Exemplo n.º 25
0
    def serialize(self, name: str, input: str, builder) -> str:
        """
        Serializes the module using a NeuralNetworkBuilder.
        """
        params = NeuralNetwork_pb2.CustomLayerParams()
        params.className = 'groupnorm'
        params.description = 'A Group Normalization layer'
        params.parameters['in_channels'].intValue = self.in_channels
        params.parameters['num_groups'].intValue = self.num_groups

        weight = params.weights.add()
        weight.floatValue.extend(self.layer.weight.data.numpy())
        bias = params.weights.add()
        bias.floatValue.extend(self.layer.bias.data.numpy())

        builder.add_custom(name,
                           input_names=[input],
                           output_names=[name],
                           custom_proto_spec=params)
        return name
Exemplo n.º 26
0
    def _convert_slice(**kwargs):
      tf_op = kwargs["op"]
      coreml_nn_builder = kwargs["nn_builder"]
      constant_inputs = kwargs["constant_inputs"]

      params = NeuralNetwork_pb2.CustomLayerParams()
      params.className = 'Slice'
      params.description = "Custom layer that corresponds to the slice TF op"
      # get the value of begin
      begin = constant_inputs.get(tf_op.inputs[1].name, [0, 0, 0, 0])
      size = constant_inputs.get(tf_op.inputs[2].name, [0, 0, 0, 0])
      # add begin and size as two repeated weight fields
      begin_as_weights = params.weights.add()
      begin_as_weights.floatValue.extend(map(float, begin))
      size_as_weights = params.weights.add()
      size_as_weights.floatValue.extend(map(float, size))
      coreml_nn_builder.add_custom(name=tf_op.name,
                                   input_names=[tf_op.inputs[0].name],
                                   output_names=[tf_op.outputs[0].name],
                                   custom_proto_spec=params)
def convert_lambda(layer):
    # Only convert this Lambda layer if it is for our swish function.

    if layer.name == 'pred_a':
        params = NeuralNetwork_pb2.CustomLayerParams()

        # The name of the Swift or Obj-C class that implements this layer.
        params.className = "SSR_module"

        # The desciption is shown in Xcode's mlmodel viewer.
        params.description = "Soft Stagewise Regression"
        params.parameters["s1"].doubleValue = layer.arguments['s1']
        params.parameters["s2"].doubleValue = layer.arguments['s2']
        params.parameters["s3"].doubleValue = layer.arguments['s3']
        params.parameters["lambda_d"].doubleValue = layer.arguments['lambda_d']
        params.parameters["lambda_local"].doubleValue = layer.arguments[
            'lambda_local']

        return params
    else:
        return None
Exemplo n.º 28
0
def convert_lambda(layer):
    # Only convert this Lambda layer if it is for our swish function.
    if layer.function == swish:
        params = NeuralNetwork_pb2.CustomLayerParams()

        # The name of the Swift or Obj-C class that implements this layer.
        params.className = "Swish"

        # The desciption is shown in Xcode's mlmodel viewer.
        params.description = "A fancy new activation function"

        # Set configuration parameters
        # params.parameters["someNumber"].intValue = 100
        # params.parameters["someString"].stringValue = "Hello, world!"

        # Add some random weights
        # my_weights = params.weights.add()
        # my_weights.floatValue.extend(np.random.randn(10).astype(float))

        return params
    else:
        return None
Exemplo n.º 29
0
def convert_custom_lambda_layer(layer):

    params = NeuralNetwork_pb2.CustomLayerParams()

    if layer.function.__name__ == custom_relu_activation.__name__:
        # カスタムレイヤーに割り当てるクラス
        params.className = "MyCustomReluActivation"
        # Xcodeで表示される解説文
        params.description = "RELU"

        # その他のパラメタ
        params.parameters["test"].stringValue = "hoge"
        params.parameters["param"].intValue = 10

        # 重みを設定・・・ReLUには無関係
        my_weights = params.weights.add()
        my_weights.floatValue.extend(np.zeros(10).astype(float))
        my_weights.floatValue[0] = 3
        my_weights.floatValue[1] = 1
        my_weights.floatValue[2] = 4
        my_weights.floatValue[3] = 1
        my_weights.floatValue[4] = 5
        my_weights.floatValue[5] = 9
        my_weights.floatValue[6] = 2
        my_weights.floatValue[7] = 6
        my_weights.floatValue[8] = 5
        my_weights.floatValue[9] = 3

        return params
    elif layer.function.__name__ == custom_sigmoid_activation.__name__:

        # Lambdaレイヤーをいくつか使う場合は,関数名などで分岐する

        params.className = "MyCustomSigmoidActivation"
        params.description = "sigmoid"
        return params
    else:
        return None
Exemplo n.º 30
0
 def convert_acos(node):
     params = NeuralNetwork_pb2.CustomLayerParams()
     params.className = node.op_type
     params.description = "Custom layer that corresponds to the ONNX op {}".format(
         node.op_type, )
     return params