Ejemplo n.º 1
0
    def test_saved_model(self):
        ssd_resnet50_ckpt_url = 'http://download.tensorflow.org/models/object_detection/ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03.tar.gz'
        dst_path = 'saved_model.tar.gz'
        if not os.path.exists(dst_path):
            os.system("wget {} -O {}".format(ssd_resnet50_ckpt_url, dst_path))

        os.system("tar -xvf {}".format(dst_path))
        model = TensorflowModel(
            'ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03/saved_model'
        )
        from tensorflow.python.framework import graph_util
        graph_def = graph_util.convert_variables_to_constants(
            sess=model.sess,
            input_graph_def=model.sess.graph_def,
            output_node_names=model.output_node_names)

        model.graph_def = graph_def
        tmp_saved_model_path = './tmp_saved_model'
        if os.path.exists(tmp_saved_model_path):
            os.system('rm -rf {}'.format(tmp_saved_model_path))
        os.system('mkdir -p {}'.format(tmp_saved_model_path))

        self.assertTrue(isinstance(model.graph_def, tf.compat.v1.GraphDef))
        self.assertTrue(isinstance(model.graph, tf.compat.v1.Graph))
        model.save(tmp_saved_model_path)
        # load again to make sure model can be loaded
        model = TensorflowModel(tmp_saved_model_path)
        os.system(
            'rm -rf ssd_resnet50_v1_fpn_shared_box_predictor_640x640_coco14_sync_2018_07_03'
        )
        os.system('rm -rf temp_saved_model')
        os.system('rm -rf {}'.format(tmp_saved_model_path))
        os.system('rm saved_model.tar.gz')
Ejemplo n.º 2
0
    def test_graph(self):
        graph = build_graph()
        self.assertRaises(ValueError, TensorflowModel, 'test')
        fwk_info = {
            'input_tensor_names': ['x'],
            'output_tensor_names': ['op_to_store']
        }
        model = TensorflowModel(graph.as_graph_def(), fwk_info)
        self.assertEqual(True,
                         isinstance(model.graph_def, tf.compat.v1.GraphDef))
        self.assertEqual(model.input_node_names[0], 'x')
        self.assertEqual(model.output_node_names[0], 'op_to_store')
        model.save('model_test.pb')

        model.graph_def = 'model_test.pb'
        self.assertEqual(model.input_tensor_names[0], 'x')
        self.assertEqual(model.output_tensor_names[0], 'op_to_store')
        self.assertEqual(model.input_tensor[0].name, 'x:0')
        self.assertEqual(model.output_tensor[0].name, 'op_to_store:0')

        with self.assertRaises(ValueError):
            model.save('fake_path/fake_path')
        with self.assertRaises(AssertionError):
            model.input_tensor_names = []
        with self.assertRaises(AssertionError):
            model.input_tensor_names = ['test']
        model.input_tensor_names = ['x_1']

        with self.assertRaises(AssertionError):
            model.output_tensor_names = []
        with self.assertRaises(AssertionError):
            model.output_tensor_names = ['test']
        model.output_tensor_names = ['op_to_store_1']
Ejemplo n.º 3
0
    def test_ckpt(self):
        mobilenet_ckpt_url = \
            'http://download.tensorflow.org/models/mobilenet_v1_2018_02_22/mobilenet_v1_1.0_224.tgz'
        dst_path = '/tmp/.lpot/mobilenet_v1_1.0_224.tgz'
        if not os.path.exists(dst_path):
            os.system("mkdir -p /tmp/.lpot && wget {} -O {}".format(
                mobilenet_ckpt_url, dst_path))

        os.system("mkdir -p ckpt && tar xvf {} -C ckpt".format(dst_path))
        fwk_info = {
            'output_tensor_names': ['MobilenetV1/Predictions/Reshape_1']
        }
        model = TensorflowModel('./ckpt', fwk_info)
        self.assertGreaterEqual(len(model.input_tensor_names), 1)
        self.assertEqual(len(model.output_tensor_names), 1)
        graph_def = model.graph_def
        self.assertEqual(True, isinstance(graph_def, tf.compat.v1.GraphDef))
        model.graph_def = graph_def
        os.system('rm -rf ckpt')
Ejemplo n.º 4
0
    def get_optimized_model(self):
        """Executed the non-precision dependant graph optimization.
        The input graph will be optimized with following passes:
        1. Remove the training nodes like Identity Op.
        2. Split the shared nodes like weights node for multi-Conv2d.
        3. Fold Constant Nodes as less as possible.
        4. Fuse the Mul node into the previous Conv2D/MatMul if possible.
        5. Strip the useless nodes.
        6. Do the Common sequence elimation optimization on the graph.
        7. Fold the BN node into the previous Conv2D if possible.

        Returns:
            [graphdef]: the optimized graphdef object.
        """
        self.logger.debug("Start to pre optimize input model...")

        origin_model = TensorflowModel(self.model._model,
                                       self.model.framework_specific_info,
                                       **self.model.kwargs)

        self._tmp_graph_def = ConvertLayoutOptimizer(
            self.model.graph_def, self.output_node_names).do_transformation()

        self._tmp_graph_def = GrapplerOptimizer(
            self._tmp_graph_def, self.output_node_names, self.optimization).do_transformation()

        self._tmp_graph_def = RemoveTrainingNodesOptimizer(
            self._tmp_graph_def, protected_nodes=self.output_node_names).do_transformation()

        self._tmp_graph_def = SplitSharedInputOptimizer(self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = GraphFoldConstantOptimizer(self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = FuseColumnWiseMulOptimizer(self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = StripUnusedNodesOptimizer(self._tmp_graph_def,
            self.input_node_names, self.output_node_names).do_transformation()

        self._tmp_graph_def = FuseGeluOptimizer(self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = GraphCseOptimizer(self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = FoldBatchNormNodesOptimizer(
            self._tmp_graph_def).do_transformation()

        #TODO we should handle all control ops elegantly not bypass it.
        self._tmp_graph_def, excluded_node_names = UpdateEnterOptimizer(
            self._tmp_graph_def).do_transformation()

        #TODO we need to remove below optimizer once the TF enabled the single
        # matmul op quantization
        self._tmp_graph_def = InjectDummyBiasAddOptimizer(
            self._tmp_graph_def).do_transformation()

        self._tmp_graph_def = FuseTransposeReshapeOptimizer(
            self._tmp_graph_def).do_transformation()

        self._excluded_node_names.extend(excluded_node_names)
        self._tmp_graph_def.library.CopyFrom(self.model.graph_def.library)

        origin_model.graph_def = self._tmp_graph_def

        return origin_model