Esempio n. 1
0
def _keras_2_mlmodel_image():
    """
    Converts a Keras h5 model into ML Model for image data and saves it on 
    disk.

    NOTE: Image configuration must be specified from Explora. 

    NOTE: Currently, only categorical cross entropy loss is supported.
    """
    model = get_keras_model()
    ios_config = state.state["ios_config"]
    class_labels = ios_config["class_labels"]
    mlmodel = keras_converter.convert(model, input_names=['image'],
                                output_names=['output'],
                                class_labels=class_labels,
                                predicted_feature_name='label')
    mlmodel.save(state.state["mlmodel_path"])

    image_config = ios_config["image_config"]
    spec = coremltools.utils.load_spec(state.state["mlmodel_path"])
    builder = coremltools.models.neural_network.NeuralNetworkBuilder(spec=spec)

    dims = image_config["dims"]
    spec.description.input[0].type.imageType.width = dims[0]
    spec.description.input[0].type.imageType.height = dims[1]

    cs = _FeatureTypes_pb2.ImageFeatureType.ColorSpace.Value(image_config["color_space"])
    spec.description.input[0].type.imageType.colorSpace = cs

    trainable_layer_names = [layer.name for layer in model.layers if layer.get_weights()]
    builder.make_updatable(trainable_layer_names)

    builder.set_categorical_cross_entropy_loss(name='loss', input='output')

    if isinstance(model.optimizer, SGD):
        params = SgdParams(
            lr=K.eval(model.optimizer.lr), 
            batch=state.state["hyperparams"]["batch_size"],
        )
        builder.set_sgd_optimizer(params)
    elif isinstance(model.optimizer, Adam):
        params = AdamParams(
            lr=K.eval(model.optimizer.lr), 
            batch_size=state.state["hyperparams"]["batch_size"],
            beta1=model.optimizer.beta1,
            beta2=model.optimizer.beta2,
            eps=model.optimizer.eps,    
        )
        builder.set_adam_optimizer(params)
    else:
        raise Exception("iOS optimizer must be SGD or Adam!")

    builder.set_epochs(UNLIMITED_EPOCHS)
    builder.set_shuffle(state.state["hyperparams"]["shuffle"])  

    mlmodel_updatable = MLModel(spec)
    mlmodel_updatable.save(state.state["mlmodel_path"])

    K.clear_session()
Esempio n. 2
0
def make_updatable(builder, mlmodel_url, mlmodel_updatable_path):
    """This method makes an existing non-updatable mlmodel updatable.
    mlmodel_url - the path the Core ML model is stored.
    mlmodel_updatable_path - the path the updatable Core ML model will be saved.
    """
    import coremltools
    model_spec = builder.spec

    # make_updatable method is used to make a layer updatable. It requires a list of layer names.
    # dense_1 and dense_2 are two innerProduct layer in this example and we make them updatable.
    builder.make_updatable(['dense_1', 'dense_2'])

    # Categorical Cross Entropy or Mean Squared Error can be chosen for the loss layer.
    # Categorical Cross Entropy is used on this example. CCE requires two inputs: 'name' and 'input'.
    # name must be a string and will be the name associated with the loss layer
    # input must be the output of a softmax layer in the case of CCE.
    # The loss's target will be provided automatically as a part of the model's training inputs.
    builder.set_categorical_cross_entropy_loss(name='lossLayer',
                                               input='digitProbabilities')

    # in addition of the loss layer, an optimizer must also be defined. SGD and Adam optimizers are supported.
    # SGD has been used for this example. To use SGD, one must set lr(learningRate) and batch(miniBatchSize) (momentum is an optional parameter).
    from coremltools.models.neural_network import SgdParams
    builder.set_sgd_optimizer(SgdParams(lr=0.01, batch=32))

    # Finally, the number of epochs must be set as follows.
    builder.set_epochs(10)

    # Set training inputs descriptions
    model_spec.description.trainingInput[
        0].shortDescription = 'Example image of handwritten digit'
    model_spec.description.trainingInput[
        1].shortDescription = 'Associated true label (digit) of example image'

    # save the updated spec
    from coremltools.models import MLModel
    mlmodel_updatable = MLModel(model_spec)
    mlmodel_updatable.save(mlmodel_updatable_path)
    def test_updatable_model_creation_ce_sgd(self):
        builder = self.create_base_builder()

        builder.add_softmax(name="softmax",
                            input_name="output",
                            output_name="softmax_output")

        builder.set_categorical_cross_entropy_loss(name="cross_entropy",
                                                   input="softmax_output")

        builder.set_sgd_optimizer(SgdParams(lr=1e-2, batch=10, momentum=0.0))
        builder.set_epochs(20, allowed_set=[10, 20, 30, 40])

        model_path = os.path.join(self.model_dir, "updatable_creation.mlmodel")
        print(model_path)
        save_spec(builder.spec, model_path)

        mlmodel = MLModel(model_path)
        self.assertTrue(mlmodel is not None)

        spec = mlmodel.get_spec()
        self.assertTrue(spec.isUpdatable)
        self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
        self.assertTrue(
            spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
        self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
        self.assertTrue(
            spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)

        self.assertTrue(spec.neuralNetwork.updateParams.lossLayers[0].
                        categoricalCrossEntropyLossLayer is not None)
        self.assertTrue(
            spec.neuralNetwork.updateParams.optimizer.sgdOptimizer is not None)

        self.assertTrue(
            _np.isclose(
                spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                learningRate.defaultValue,
                1e-2,
                atol=1e-4,
            ))
        self.assertTrue(
            _np.isclose(
                spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                miniBatchSize.defaultValue,
                10,
                atol=1e-4,
            ))
        self.assertTrue(
            _np.isclose(
                spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                momentum.defaultValue,
                0,
                atol=1e-8,
            ))

        self.assertTrue(
            _np.isclose(spec.neuralNetwork.updateParams.epochs.defaultValue,
                        20,
                        atol=1e-4))

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        learningRate.range.minValue == 0)
        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        learningRate.range.maxValue == 1)

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        miniBatchSize.set.values == [10])

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        momentum.range.minValue == 0)
        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        momentum.range.maxValue == 1)
Esempio n. 4
0
labels = ["user" + str(i) for i in range(num_classes - 3)]

layer.innerProduct.weights.floatValue.extend(weights)
layer.innerProduct.bias.floatValue.extend(biases)
spec.neuralNetworkClassifier.stringClassLabels.vector.extend(labels)

# Make this model trainable.

builder = NeuralNetworkBuilder(spec=model._spec)

builder.make_updatable(["fullyconnected0"])
builder.set_categorical_cross_entropy_loss(name="lossLayer", input="labelProbability")
builder.set_epochs(10, [1, 10, 50])

# Using the SDG optimizer:
sgd_params = SgdParams(lr=0.001, batch=8, momentum=0)
sgd_params.set_batch(8, [1, 2, 8, 16])
builder.set_sgd_optimizer(sgd_params)

# Using the Adam optimizer:
# adam_params = AdamParams(lr=0.001, batch=8, beta1=0.9, beta2=0.999, eps=1e-8)
# adam_params.set_batch(8, [1, 2, 8, 16])
# builder.set_adam_optimizer(adam_params)

builder.spec.description.trainingInput[0].shortDescription = "Example image"
builder.spec.description.trainingInput[1].shortDescription = "True label"

coremltools.utils.save_spec(builder.spec, "../Models/HandsTuri.mlmodel")

# Replace the weights of the last layer with random weights.
Esempio n. 5
0
    def test_updatable_model_creation_mse_sgd(self):

        builder = self.create_base_builder()

        builder.set_mean_squared_error_loss(name='mse',
                                            input='output',
                                            target='target')

        builder.set_sgd_optimizer(SgdParams(lr=1e-2, batch=10, momentum=0.0))

        builder.set_epochs(20)

        model_path = os.path.join(self.model_dir, 'updatable_creation.mlmodel')
        print(model_path)
        save_spec(builder.spec, model_path)

        mlmodel = MLModel(model_path)
        self.assertTrue(mlmodel is not None)
        spec = mlmodel.get_spec()
        self.assertTrue(spec.isUpdatable)
        self.assertTrue(spec.neuralNetwork.layers[0].isUpdatable)
        self.assertTrue(
            spec.neuralNetwork.layers[0].innerProduct.weights.isUpdatable)
        self.assertTrue(spec.neuralNetwork.layers[1].isUpdatable)
        self.assertTrue(
            spec.neuralNetwork.layers[1].innerProduct.weights.isUpdatable)

        self.assertTrue(spec.neuralNetwork.updateParams.lossLayers[0].
                        categoricalCrossEntropyLossLayer is not None)
        self.assertTrue(
            spec.neuralNetwork.updateParams.optimizer.sgdOptimizer is not None)

        self.assertTrue(
            _np.isclose(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        learningRate.defaultValue,
                        1e-2,
                        atol=1e-4))
        self.assertTrue(
            _np.isclose(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        miniBatchSize.defaultValue,
                        10,
                        atol=1e-4))
        self.assertTrue(
            _np.isclose(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        momentum.defaultValue,
                        0,
                        atol=1e-8))
        self.assertTrue(
            _np.isclose(spec.neuralNetwork.updateParams.epochs.defaultValue,
                        20,
                        atol=1e-4))

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        learningRate.range.minValue == 0)
        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        learningRate.range.maxValue == 1)

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        miniBatchSize.set.values == [10])

        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        momentum.range.minValue == 0)
        self.assertTrue(spec.neuralNetwork.updateParams.optimizer.sgdOptimizer.
                        momentum.range.maxValue == 1)
Esempio n. 6
0
neuralnetwork_spec.description.metadata.shortDescription = (
    'Cat Dog Classifier converted from a Keras model')

model_spec = builder.spec

# make_updatable method is used to make a layer updatable. It requires a list of layer names.
# dense_5 and dense_6 are two innerProduct layer in this example and we make them updatable.
builder.make_updatable(['dense_5', 'dense_6'])

# Categorical Cross Entropy or Mean Squared Error can be chosen for the loss layer.
builder.set_categorical_cross_entropy_loss(name='lossLayer', input='output')

# in addition of the loss layer, an optimizer must also be defined. SGD and Adam optimizers are supported.
# SGD has been used for this example. To use SGD, one must set lr(learningRate) and batch(miniBatchSize) (momentum is an optional parameter).
from coremltools.models.neural_network import SgdParams
builder.set_sgd_optimizer(SgdParams(lr=0.01, batch=5))

# The number of epochs must be set as follows.
builder.set_epochs(1)

model_spec.isUpdatable = True
model_spec.specificationVersion = coremltools._MINIMUM_UPDATABLE_SPEC_VERSION

# Set training inputs descriptions
model_spec.description.trainingInput[
    0].shortDescription = 'Image for training and updating the model'
model_spec.description.trainingInput[
    1].shortDescription = 'Set the value as Cat or Dog and update the model'

# save the updated spec
coremltools.utils.save_spec(model_spec, "CatDogUpdatable.mlmodel")
input_features = [('data', datatypes.Array(*input_dim))]
output_features = [('result', datatypes.Array(*output_dim))]

weights = np.random.rand(1, input_max_size)
bias = np.random.rand(1)

builder = NeuralNetworkBuilder(input_features, output_features)
builder.add_inner_product(name='ip_layer',
                          W=weights,
                          b=bias,
                          input_channels=input_max_size,
                          output_channels=1,
                          has_bias=True,
                          input_name='data',
                          output_name='result')
builder.make_updatable(['ip_layer'])

builder.set_mean_squared_error_loss(name='lossLayer',
                                    input_feature=output_features[0])

optimizerParams = SgdParams(lr=0.01, batch=1)
optimizerParams.set_batch(1, allowed_set=[1, 2, 4, 8, 16, 32])
builder.set_sgd_optimizer(optimizerParams)

builder.set_epochs(16, allowed_set=[2, 4, 8, 16, 32, 64, 128, 256])

#builder.spec = convert_neural_network_spec_weights_to_fp16(builder.spec)

save_spec(builder.spec, '../core/LinearRegressionModel.mlmodel')