def test_nn_set_training_input(self): builder = self.create_base_builder() builder.set_mean_squared_error_loss(name='mse', input='output', target='target') builder.set_adam_optimizer( AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(20, allowed_set=[10, 20, 30]) builder.set_training_input([('input', datatypes.Array(3)), ('target', 'Double')]) model_path = os.path.join(self.model_dir, 'updatable_creation.mlmodel') print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertEqual(spec.description.trainingInput[0].name, 'input') self.assertEqual( spec.description.trainingInput[0].type.WhichOneof('Type'), 'multiArrayType') self.assertEqual(spec.description.trainingInput[1].name, 'target') self.assertEqual( spec.description.trainingInput[1].type.WhichOneof('Type'), 'doubleType')
def test_nn_set_training_input(self): builder = self.create_base_builder() builder.set_mean_squared_error_loss(name="mse", input_feature=("output", datatypes.Array(3))) builder.set_adam_optimizer( AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(20, allowed_set=[10, 20, 30]) model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel") print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertEqual(spec.description.trainingInput[0].name, "input") self.assertEqual( spec.description.trainingInput[0].type.WhichOneof("Type"), "multiArrayType") self.assertEqual(spec.description.trainingInput[1].name, "output_true") self.assertEqual( spec.description.trainingInput[1].type.WhichOneof("Type"), "multiArrayType")
def _keras_2_mlmodel_image(): """ Converts a Keras h5 model into ML Model for image data and saves it on disk. NOTE: Image configuration must be specified from Explora. NOTE: Currently, only categorical cross entropy loss is supported. """ model = get_keras_model() ios_config = state.state["ios_config"] class_labels = ios_config["class_labels"] mlmodel = keras_converter.convert(model, input_names=['image'], output_names=['output'], class_labels=class_labels, predicted_feature_name='label') mlmodel.save(state.state["mlmodel_path"]) image_config = ios_config["image_config"] spec = coremltools.utils.load_spec(state.state["mlmodel_path"]) builder = coremltools.models.neural_network.NeuralNetworkBuilder(spec=spec) dims = image_config["dims"] spec.description.input[0].type.imageType.width = dims[0] spec.description.input[0].type.imageType.height = dims[1] cs = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value(image_config["color_space"]) spec.description.input[0].type.imageType.colorSpace = cs trainable_layer_names = [layer.name for layer in model.layers if layer.get_weights()] builder.make_updatable(trainable_layer_names) builder.set_categorical_cross_entropy_loss(name='loss', input='output') if isinstance(model.optimizer, SGD): params = SgdParams( lr=K.eval(model.optimizer.lr), batch=state.state["hyperparams"]["batch_size"], ) builder.set_sgd_optimizer(params) elif isinstance(model.optimizer, Adam): params = AdamParams( lr=K.eval(model.optimizer.lr), batch_size=state.state["hyperparams"]["batch_size"], beta1=model.optimizer.beta1, beta2=model.optimizer.beta2, eps=model.optimizer.eps, ) builder.set_adam_optimizer(params) else: raise Exception("iOS optimizer must be SGD or Adam!") builder.set_epochs(UNLIMITED_EPOCHS) builder.set_shuffle(state.state["hyperparams"]["shuffle"]) mlmodel_updatable = MLModel(spec) mlmodel_updatable.save(state.state["mlmodel_path"]) K.clear_session()
def test_nn_builder_with_training_features(self): input_features = [('input', datatypes.Array(3))] output_features = [('output', None)] training_features = [('input', datatypes.Array(3)), ('target', datatypes.Double)] builder = NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True, training_features=training_features) W1 = _np.random.uniform(-0.5, 0.5, (3, 3)) W2 = _np.random.uniform(-0.5, 0.5, (3, 3)) builder.add_inner_product(name='ip1', W=W1, b=None, input_channels=3, output_channels=3, has_bias=False, input_name='input', output_name='hidden') builder.add_inner_product(name='ip2', W=W2, b=None, input_channels=3, output_channels=3, has_bias=False, input_name='hidden', output_name='output') builder.make_updatable(['ip1', 'ip2']) # or a dict for weightParams builder.set_mean_squared_error_loss(name='mse', input='output', target='target') builder.set_adam_optimizer( AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(20, allowed_set=[10, 20, 30]) builder.set_training_input([('input', datatypes.Array(3)), ('target', 'Double')]) model_path = os.path.join(self.model_dir, 'updatable_creation.mlmodel') print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertEqual(spec.description.trainingInput[0].name, 'input') self.assertEqual( spec.description.trainingInput[0].type.WhichOneof('Type'), 'multiArrayType') self.assertEqual(spec.description.trainingInput[1].name, 'target') self.assertEqual( spec.description.trainingInput[1].type.WhichOneof('Type'), 'doubleType')
def test_nn_builder_with_training_features(self): input_features = [("input", datatypes.Array(3))] output_features = [("output", datatypes.Array(3))] builder = NeuralNetworkBuilder(input_features, output_features) W1 = _np.random.uniform(-0.5, 0.5, (3, 3)) W2 = _np.random.uniform(-0.5, 0.5, (3, 3)) builder.add_inner_product( name="ip1", W=W1, b=None, input_channels=3, output_channels=3, has_bias=False, input_name="input", output_name="hidden", ) builder.add_inner_product( name="ip2", W=W2, b=None, input_channels=3, output_channels=3, has_bias=False, input_name="hidden", output_name="output", ) builder.make_updatable(["ip1", "ip2"]) # or a dict for weightParams builder.set_mean_squared_error_loss(name="mse", input_feature=("output", datatypes.Array(3))) builder.set_adam_optimizer( AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(20, allowed_set=[10, 20, 30]) model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel") print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertEqual(spec.description.trainingInput[0].name, "input") self.assertEqual( spec.description.trainingInput[0].type.WhichOneof("Type"), "multiArrayType") self.assertEqual(spec.description.trainingInput[1].name, "output_true") self.assertEqual( spec.description.trainingInput[1].type.WhichOneof("Type"), "multiArrayType")
def test_updatable_model_creation_mse_adam(self): builder = self.create_base_builder() builder.set_mean_squared_error_loss(name="mse", input_feature=("output", datatypes.Array(3))) builder.set_adam_optimizer( AdamParams(lr=1e-2, batch=10, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(20, allowed_set=[10, 20, 30]) model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel") print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertTrue(spec.isUpdatable) self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable) self.assertTrue( spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable) self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable) self.assertTrue( spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable) self.assertTrue(spec.neuralNetwork.updateParams.lossLayers[0]. categoricalCrossEntropyLossLayer is not None) self.assertTrue(spec.neuralNetwork.updateParams.optimizer.adamOptimizer is not None) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer. learningRate.defaultValue, 1e-2, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer. miniBatchSize.defaultValue, 10, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1. defaultValue, 0.9, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2. defaultValue, 0.999, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps. defaultValue, 1e-8, atol=1e-8, )) self.assertTrue( _np.isclose(spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4)) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.learningRate.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.learningRate.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.miniBatchSize.set.values == [10]) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta1.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta1.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta2.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta2.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.eps.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.eps.range.maxValue == 1) self.assertTrue( spec.neuralNetwork.updateParams.epochs.set.values == [10, 20, 30])
def test_updatable_model_creation_ce_adam(self): builder = self.create_base_builder() builder.add_softmax(name="softmax", input_name="output", output_name="softmax_output") builder.set_categorical_cross_entropy_loss(name="cross_entropy", input="softmax_output") adam_params = AdamParams() adam_params.set_batch(value=10, allowed_set=[10, 20]) builder.set_adam_optimizer(adam_params) builder.set_epochs(20) model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel") print(model_path) save_spec(builder.spec, model_path) mlmodel = MLModel(model_path) self.assertTrue(mlmodel is not None) spec = mlmodel.get_spec() self.assertTrue(spec.isUpdatable) self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable) self.assertTrue( spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable) self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable) self.assertTrue( spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable) self.assertTrue(spec.neuralNetwork.updateParams.lossLayers[0]. categoricalCrossEntropyLossLayer is not None) self.assertTrue(spec.neuralNetwork.updateParams.optimizer.adamOptimizer is not None) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer. learningRate.defaultValue, 1e-2, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer. miniBatchSize.defaultValue, 10, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta1. defaultValue, 0.9, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.beta2. defaultValue, 0.999, atol=1e-4, )) self.assertTrue( _np.isclose( spec.neuralNetwork.updateParams.optimizer.adamOptimizer.eps. defaultValue, 1e-8, atol=1e-8, )) self.assertTrue( _np.isclose(spec.neuralNetwork.updateParams.epochs.defaultValue, 20, atol=1e-4)) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.learningRate.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.learningRate.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.miniBatchSize.set.values == [10, 20]) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta1.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta1.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta2.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.beta2.range.maxValue == 1) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.eps.range.minValue == 0) self.assertTrue(spec.neuralNetwork.updateParams.optimizer. adamOptimizer.eps.range.maxValue == 1) self.assertTrue( spec.neuralNetwork.updateParams.epochs.set.values == [20])
layer.innerProduct.weights.floatValue.extend(weights) layer.innerProduct.bias.floatValue.extend(biases) spec.neuralNetworkClassifier.stringClassLabels.vector.extend(labels) # Make this model trainable. builder = coremltools.models.neural_network.NeuralNetworkBuilder( spec=model._spec) builder.make_updatable(["fullyconnected0"]) builder.set_categorical_cross_entropy_loss(name="lossLayer", input="labelProbability") builder.set_adam_optimizer( AdamParams(lr=0.01, batch=16, beta1=0.9, beta2=0.999, eps=1e-8)) builder.set_epochs(10) builder.spec.description.trainingInput[0].shortDescription = "Example image" builder.spec.description.trainingInput[1].shortDescription = "True label" coremltools.utils.save_spec(builder.spec, "../Models/HandsTuri.mlmodel") # Replace the weights of the last layer with random weights. model = coremltools.models.MLModel("../Models/HandsTuri.mlmodel") model.short_description = "" # The very last layer is softmax, we need the one before that spec = model._spec layer = spec.neuralNetworkClassifier.layers[-2]