def test_tiny_synset_random_input(self):
        np.random.seed(1989)
        input_shape = (1, 10)
        net = mx.sym.Variable('data')
        net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5)
        net = mx.sym.SoftmaxOutput(net, name='softmax')
        mod = _get_mxnet_module(net,
                                data_shapes=[('data', input_shape)],
                                mode='random',
                                label_names=['softmax_label'])

        # Generate some dummy data
        input_data = np.random.uniform(-0.1, 0.1, input_shape)

        Batch = namedtuple('Batch', ['data'])
        mod.forward(Batch([mx.nd.array(input_data)]))

        kwargs = {'input_shape': {'data': input_shape}}
        # Get predictions from coreml
        coreml_model = mxnet_converter.convert(
            model=mod,
            class_labels=['Category1','Category2','Category3','Category4','Category5'],
            mode='classifier',
            **kwargs
        )

        prediction = coreml_model.predict(_mxnet_remove_batch({'data': input_data}))
        self.assertEqual(prediction['classLabel'], 'Category3')
    def test_tiny_synset_random_input(self):
        np.random.seed(1989)
        input_shape = (1, 10)
        net = mx.sym.Variable('data')
        net = mx.sym.FullyConnected(data=net, name='fc1', num_hidden=5)
        net = mx.sym.SoftmaxOutput(net, name='softmax')
        mod = _get_mxnet_module(net,
                                data_shapes=[('data', input_shape)],
                                mode='random',
                                label_names=['softmax_label'])

        # Generate some dummy data
        input_data = np.random.uniform(-0.1, 0.1, input_shape)

        Batch = namedtuple('Batch', ['data'])
        mod.forward(Batch([mx.nd.array(input_data)]))

        kwargs = {'input_shape': {'data': input_shape}}
        # Get predictions from coreml
        coreml_model = mxnet_converter.convert(model=mod,
                                               class_labels=[
                                                   'Category1', 'Category2',
                                                   'Category3', 'Category4',
                                                   'Category5'
                                               ],
                                               mode='classifier',
                                               **kwargs)

        prediction = coreml_model.predict(
            _mxnet_remove_batch({'data': input_data}))
        self.assertEqual(prediction['classLabel'], 'Category3')
    def _test_mxnet_model(self,
                          net,
                          input_shape,
                          mode,
                          class_labels=None,
                          coreml_mode=None,
                          label_names=None,
                          delta=1e-3,
                          pre_processing_args=None,
                          input_name='data'):
        """ Helper method that convert the CoreML model into CoreML and compares the predictions over random data.

        Parameters
        ----------
        net: MXNet Symbol Graph
            The graph that we'll be converting into CoreML.

        input_shape: tuple of ints
            The shape of input data. Generally of the format (batch-size, channels, height, width)

        mode: (random|zeros|ones)
            The mode to use in order to set the parameters (weights and biases).

        label_names: list of strings
            The names of the output labels. Default: None

        delta: float
            The maximum difference b/w predictions of MXNet and CoreML that is tolerable.

        input_name: str
            The name of the input variable to the symbolic graph.
        """

        data_shapes = [(input_name, input_shape)]

        mod = _get_mxnet_module(net, data_shapes, mode, label_names)

        # Generate some dummy data
        input_data = {input_name: np.random.uniform(-10., 10., input_shape)}
        Batch = namedtuple('Batch', ['data'])
        mod.forward(Batch([mx.nd.array(input_data[input_name])]))
        mxnet_preds = mod.get_outputs()[0].asnumpy().flatten()

        # Get predictions from coreml
        coreml_model = mxnet_converter.convert(
            model=mod,
            class_labels=class_labels,
            mode=coreml_mode,
            input_shape={input_name: input_shape},
            preprocessor_args=pre_processing_args)
        coreml_preds = coreml_model.predict(
            _mxnet_remove_batch(input_data)).values()[0].flatten()

        # Check prediction accuracy
        self.assertEquals(len(mxnet_preds), len(coreml_preds))
        for i in range(len(mxnet_preds)):
            self.assertAlmostEquals(mxnet_preds[i],
                                    coreml_preds[i],
                                    delta=delta)
    def _test_image_prediction(self, model_name, epoch, label_name):
        try:
            data = read_image(VAL_DATA, label_name=label_name)
        except:
            download_data()
            data = read_image(VAL_DATA, label_name=label_name)

        mod = load_model(model_name=model_name,
                         epoch_num=epoch,
                         data_shapes=data.provide_data,
                         label_shapes=data.provide_label,
                         label_names=[
                             label_name,
                         ])

        input_shape = (1, 3, 224, 224)
        coreml_model = mxnet_converter.convert(
            mod, input_shape={'data': input_shape})

        mxnet_acc = []
        mxnet_top_5_acc = []
        coreml_acc = []
        coreml_top_5_acc = []

        num_batch = 0

        for batch in data:
            mod.forward(batch, is_train=False)
            mxnet_preds = mod.get_outputs()[0].asnumpy()
            data_numpy = batch.data[0].asnumpy()
            label_numpy = batch.label[0].asnumpy()
            for i in xrange(32):
                input_data = {'data': data_numpy[i]}
                coreml_predict = coreml_model.predict(
                    input_data).values()[0].flatten()
                mxnet_predict = mxnet_preds[i]
                label = label_numpy[i]
                mxnet_acc.append(is_correct_top_one(mxnet_predict, label))
                mxnet_top_5_acc.append(
                    is_correct_top_five(mxnet_predict, label))
                coreml_acc.append(is_correct_top_one(coreml_predict, label))
                coreml_top_5_acc.append(
                    is_correct_top_five(coreml_predict, label))
                num_batch += 1
            if (num_batch == 5): break  # we only use a subset of the batches.

        print("MXNet acc %s" % np.mean(mxnet_acc))
        print("Coreml acc %s" % np.mean(coreml_acc))
        print("MXNet top 5 acc %s" % np.mean(mxnet_top_5_acc))
        print("Coreml top 5 acc %s" % np.mean(coreml_top_5_acc))
        self.assertAlmostEqual(np.mean(mxnet_acc),
                               np.mean(coreml_acc),
                               delta=1e-4)
        self.assertAlmostEqual(np.mean(mxnet_top_5_acc),
                               np.mean(coreml_top_5_acc),
                               delta=1e-4)
示例#5
0
    def _test_model(self,
                    model_name,
                    epoch_num,
                    input_shape=(1, 3, 224, 224),
                    files=None):
        """ Tests whether the converted CoreML model's preds are equal to MXNet preds for a given model or not.

        Parameters
        ----------
        model_name: str
            Prefix of the MXNet model name as stored on the local directory.

        epoch_num : int
            Epoch number of model we would like to load.

        input_shape: tuple
            The shape of the input data in the form of (batch_size, channels, height, width)

        files: list of strings
            List of URLs pertaining to files that need to be downloaded in order to use the model.
        """

        if files is not None:
            print("Downloading files from urls: %s" % (files))
            for url in files:
                mx.test_utils.download(url)
                print("Downloaded %s" % (url))

        module = self._load_model(model_name=model_name,
                                  epoch_num=epoch_num,
                                  input_shape=input_shape)

        coreml_model = mxnet_converter.convert(
            module, input_shape={'data': input_shape})

        # Get predictions from MXNet and coreml
        div = []  # For storing KL divergence for each input.
        for _ in xrange(1):
            np.random.seed(1993)
            input_data = {
                'data': np.random.uniform(0, 1, input_shape).astype(np.float32)
            }
            Batch = namedtuple('Batch', ['data'])
            module.forward(Batch([mx.nd.array(input_data['data'])]),
                           is_train=False)
            mxnet_pred = module.get_outputs()[0].asnumpy().flatten()
            coreml_pred = coreml_model.predict(
                _mxnet_remove_batch(input_data)).values()[0].flatten()
            self.assertEqual(len(mxnet_pred), len(coreml_pred))
            div.append(_kl_divergence(mxnet_pred, coreml_pred))

        print "Average KL divergence is % s" % np.mean(div)
        self.assertTrue(np.mean(div) < 1e-4)
    def _test_mxnet_model(self, net, input_shape, mode, class_labels=None, coreml_mode=None, label_names=None, delta=1e-3,
                          pre_processing_args=None, input_name='data'):
        """ Helper method that convert the CoreML model into CoreML and compares the predictions over random data.

        Parameters
        ----------
        net: MXNet Symbol Graph
            The graph that we'll be converting into CoreML.

        input_shape: tuple of ints
            The shape of input data. Generally of the format (batch-size, channels, height, width)

        mode: (random|zeros|ones)
            The mode to use in order to set the parameters (weights and biases).

        label_names: list of strings
            The names of the output labels. Default: None

        delta: float
            The maximum difference b/w predictions of MXNet and CoreML that is tolerable.

        input_name: str
            The name of the input variable to the symbolic graph.
        """

        data_shapes=[(input_name, input_shape)]

        mod = _get_mxnet_module(net, data_shapes, mode, label_names)

        # Generate some dummy data
        input_data = {input_name: np.random.uniform(-10., 10., input_shape)}
        Batch = namedtuple('Batch', ['data'])
        mod.forward(Batch([mx.nd.array(input_data[input_name])]))
        mxnet_preds = mod.get_outputs()[0].asnumpy().flatten()

        # Get predictions from coreml
        coreml_model = mxnet_converter.convert(
            model=mod,
            class_labels=class_labels,
            mode=coreml_mode,
            input_shape={input_name: input_shape},
            preprocessor_args=pre_processing_args
        )
        coreml_preds = coreml_model.predict(_mxnet_remove_batch(input_data)).values()[0].flatten()

        # Check prediction accuracy
        self.assertEquals(len(mxnet_preds), len(coreml_preds))
        for i in range(len(mxnet_preds)):
            self.assertAlmostEquals(mxnet_preds[i], coreml_preds[i], delta = delta)
    def _test_image_prediction(self, model_name, epoch, label_name):
        try:
            data = read_image(VAL_DATA, label_name=label_name)
        except:
            download_data()
            data = read_image(VAL_DATA, label_name=label_name)

        mod = load_model(
            model_name=model_name,
            epoch_num=epoch,
            data_shapes=data.provide_data,
            label_shapes=data.provide_label,
            label_names=[label_name,]
        )

        input_shape = (1, 3, 224, 224)
        coreml_model = mxnet_converter.convert(mod, input_shape={'data': input_shape})

        mxnet_acc = []
        mxnet_top_5_acc = []
        coreml_acc = []
        coreml_top_5_acc = []

        num_batch = 0

        for batch in data:
            mod.forward(batch, is_train=False)
            mxnet_preds = mod.get_outputs()[0].asnumpy()
            data_numpy = batch.data[0].asnumpy()
            label_numpy = batch.label[0].asnumpy()
            for i in xrange(32):
                input_data = {'data': data_numpy[i]}
                coreml_predict = coreml_model.predict(input_data).values()[0].flatten()
                mxnet_predict = mxnet_preds[i]
                label = label_numpy[i]
                mxnet_acc.append(is_correct_top_one(mxnet_predict, label))
                mxnet_top_5_acc.append(is_correct_top_five(mxnet_predict, label))
                coreml_acc.append(is_correct_top_one(coreml_predict, label))
                coreml_top_5_acc.append(is_correct_top_five(coreml_predict, label))
                num_batch += 1
            if (num_batch == 5): break # we only use a subset of the batches.

        print("MXNet acc %s" % np.mean(mxnet_acc))
        print("Coreml acc %s" % np.mean(coreml_acc))
        print("MXNet top 5 acc %s" % np.mean(mxnet_top_5_acc))
        print("Coreml top 5 acc %s" % np.mean(coreml_top_5_acc))
        self.assertAlmostEqual(np.mean(mxnet_acc), np.mean(coreml_acc), delta=1e-4)
        self.assertAlmostEqual(np.mean(mxnet_top_5_acc), np.mean(coreml_top_5_acc), delta=1e-4)
    def _test_model(self, model_name, epoch_num, input_shape=(1, 3, 224, 224), files=None):
        """ Tests whether the converted CoreML model's preds are equal to MXNet preds for a given model or not.

        Parameters
        ----------
        model_name: str
            Prefix of the MXNet model name as stored on the local directory.

        epoch_num : int
            Epoch number of model we would like to load.

        input_shape: tuple
            The shape of the input data in the form of (batch_size, channels, height, width)

        files: list of strings
            List of URLs pertaining to files that need to be downloaded in order to use the model.
        """

        if files is not None:
            print("Downloading files from urls: %s" % (files))
            for url in files:
                mx.test_utils.download(url)
                print("Downloaded %s" % (url))

        module = self._load_model(
            model_name=model_name,
            epoch_num=epoch_num,
            input_shape=input_shape
        )

        coreml_model = mxnet_converter.convert(module, input_shape={'data': input_shape})

        # Get predictions from MXNet and coreml
        div=[] # For storing KL divergence for each input.
        for _ in xrange(1):
            np.random.seed(1993)
            input_data = {'data': np.random.uniform(0, 1, input_shape).astype(np.float32)}
            Batch = namedtuple('Batch', ['data'])
            module.forward(Batch([mx.nd.array(input_data['data'])]), is_train=False)
            mxnet_pred = module.get_outputs()[0].asnumpy().flatten()
            coreml_pred = coreml_model.predict(_mxnet_remove_batch(input_data)).values()[0].flatten()
            self.assertEqual(len(mxnet_pred), len(coreml_pred))
            div.append(_kl_divergence(mxnet_pred, coreml_pred))

        print "Average KL divergence is % s" % np.mean(div)
        self.assertTrue(np.mean(div) < 1e-4)