def get_custom_model_spec(): from coremltools.models.neural_network import NeuralNetworkBuilder from coremltools.models.datatypes import Array, Dictionary, String input_name = 'output1' input_length = self._feature_extractor.output_length builder = NeuralNetworkBuilder( [(input_name, Array(input_length, ))], [(prob_name, Dictionary(String))], 'classifier') ctx = _mxnet_utils.get_mxnet_context()[0] input_name, output_name = input_name, 0 import mxnet as _mx for i, cur_layer in enumerate(self._custom_classifier): output_name = str(i) if type(cur_layer) == _mx.gluon.nn.basic_layers.Dense: W = cur_layer.weight.data(ctx).asnumpy() nC, nB = W.shape Wb = cur_layer.bias.data(ctx).asnumpy() builder.add_inner_product(name='inner_product_' + str(i), W=W, b=Wb, input_channels=nB, output_channels=nC, has_bias=True, input_name=input_name, output_name='inner_product_' + output_name) if cur_layer.act: builder.add_activation("activation" + str(i), 'RELU', 'inner_product_' + output_name, output_name) elif type(cur_layer) == _mx.gluon.nn.basic_layers.BatchNorm: zeros = _np.zeros(nC) ones = _np.ones(nC) builder.add_batchnorm(name='bn_layer_' + str(i), channels=nC, gamma=ones, beta=zeros, mean=zeros, variance=ones, input_name=input_name, output_name=output_name) elif type(cur_layer) == _mx.gluon.nn.basic_layers.Dropout: continue input_name = output_name last_output = builder.spec.neuralNetworkClassifier.layers[ -1].output[0] builder.add_softmax('softmax', last_output, self.target) builder.set_class_labels(self.classes) builder.set_input([input_name], [(input_length, )]) builder.set_output([self.target], [(self.num_classes, )]) return builder.spec
def get_custom_model_spec(): from coremltools.models.neural_network import NeuralNetworkBuilder from coremltools.models.datatypes import Array, Dictionary, String input_name = 'output1' input_length = self._feature_extractor.output_length builder = NeuralNetworkBuilder( [(input_name, Array(input_length, ))], [(prob_name, Dictionary(String))], 'classifier') input_name, output_name = input_name, 0 for i, cur_layer in enumerate( self._custom_classifier.export_weights()): W = cur_layer['weight'] nC, nB = W.shape Wb = cur_layer['bias'] builder.add_inner_product(name="inner_product_" + str(i), W=W, b=Wb, input_channels=nB, output_channels=nC, has_bias=True, input_name=str(input_name), output_name='inner_product_' + str(output_name)) if cur_layer['act']: builder.add_activation("activation" + str(i), 'RELU', 'inner_product_' + str(output_name), str(output_name)) input_name = i output_name = i + 1 last_output = builder.spec.neuralNetworkClassifier.layers[ -1].output[0] builder.add_softmax('softmax', last_output, self.target) builder.set_class_labels(self.classes, predicted_feature_name=self.target) builder.set_input([input_name], [(input_length, )]) builder.set_output([self.target], [(self.num_classes, )]) return builder.spec
class CodegenCoreML(ExprVisitor): """ A visitor to traverse subgraphs and build Core ML models. """ def __init__(self, model_name, function): import coremltools from coremltools.models.neural_network import NeuralNetworkBuilder ExprVisitor.__init__(self) self.model_name = model_name self.function = function self.out_map = {} self.model_inputs_ = [] self.buf_idx_ = 0 # Update inputs and outputs after we visit all the nodes. # Set dummy values for now. # TODO: support multiple outputs inputs = [('', coremltools.models.datatypes.Array(1, )) for _ in self.function.params] outputs = [('', coremltools.models.datatypes.Array(1, ))] self.builder = NeuralNetworkBuilder(inputs, outputs, disable_rank5_shape_mapping=True) def visit_constant(self, const): output = "buf_" + str(self.buf_idx_) self.builder.add_load_constant_nd(name=output, output_name=output, constant_value=const.data.asnumpy(), shape=const.data.shape) self.buf_idx_ = self.buf_idx_ + 1 self.out_map[const] = [output] def visit_var(self, var): name = var.name_hint shape = [int(n) for n in var.type_annotation.shape] dtype = var.type_annotation.dtype self.model_inputs_.append((name, shape, dtype)) self.out_map[var] = [name] def visit_call(self, call): inputs = [] for arg in call.args: super().visit(arg) for out in self.out_map[arg]: inputs.append(out) outputs = ["buf_" + str(self.buf_idx_)] op_name = call.op.name layer_name = op_name + "_" + str(self.buf_idx_) assert op_name in _convert_map, "{} is not supported".format(op_name) _convert_map[op_name](self.builder, layer_name, inputs, outputs, call.args, call.attrs) self.buf_idx_ = self.buf_idx_ + 1 self.out_map[call] = outputs def compile(self, out_dir): """ Build a Core ML model and compile it with Xcode toolchain. """ import coremltools from coremltools.proto.Model_pb2 import ArrayFeatureType FEATURE_TYPE_MAP = { "float32": ArrayFeatureType.FLOAT32, "float64": ArrayFeatureType.DOUBLE, "int32": ArrayFeatureType.INT32, } input_names, input_dims, input_dtypes = zip(*self.model_inputs_) self.builder.set_input(input_names, input_dims) for i, dtype in enumerate(input_dtypes): assert dtype in FEATURE_TYPE_MAP input_desc = self.builder.spec.description.input input_desc[i].type.multiArrayType.dataType = FEATURE_TYPE_MAP[ dtype] output_dim = [int(n) for n in self.function.ret_type.shape] self.builder.set_output(self.out_map[self.function.body], [output_dim]) for i, dtype in enumerate([self.function.ret_type.dtype]): assert dtype in FEATURE_TYPE_MAP output_desc = self.builder.spec.description.output output_desc[i].type.multiArrayType.dataType = FEATURE_TYPE_MAP[ dtype] model = coremltools.models.MLModel(self.builder.spec) xcode.compile_coreml(model, self.model_name, out_dir)