コード例 #1
0
    def create_graph_with_dvpp(self, resize_cfg):
        '''
        Create graph with dvpp

        Args:
            resize_cfg: resize parameter, (resize_w, resize_h)
                resize_w: width of the destination resolution
                resize_h: height of the destination resolution
            
        Returns:
            graph: a graph configured with dvpp

        Raises:
            Exception("[create_graph_with_dvpp]: create graph failed, ret ", ret)
        '''
        nntensorlist_object =hiai.NNTensorList()
        graph = hiai.Graph(hiai.GraphConfig(graph_id=65530))
        with graph.as_default(): 
            engine = hiai.Engine()
            resize_config = hiai.ResizeConfig(resize_width=resize_cfg[0], resize_height = resize_cfg[1])
            nntensorlist_object = engine.resize(input_tensor_list=nntensorlist_object, config=resize_config)
            ai_model_desc = hiai.AIModelDescription(name=os.path.basename(self.model_path), path=self.model_path)
            ai_config = hiai.AIConfig(hiai.AIConfigItem("Inference", "item_value_2"))
            final_result = engine.inference(input_tensor_list=nntensorlist_object,
                                        ai_model=ai_model_desc,
                                        ai_config=ai_config)
        ret = copy.deepcopy(graph.create_graph())
        if ret != hiai.HiaiPythonStatust.HIAI_PYTHON_OK:
            graph.destroy()
            raise Exception("[create_graph_with_dvpp]: create graph failed, ret ", ret) 
        print("[create_graph_with_dvpp]: create graph successful")
        return graph
コード例 #2
0
    def CreateGraph(self):
        '''
        Create graph

        Returns:
            graph
        '''
        path, filename = os.path.split(self.model_path)
        nntensorlist_object = hiai.NNTensorList()
        id = random.randint(1, 2**32 - 1)
        graph = hiai.Graph(hiai.GraphConfig(graph_id=id))
        with graph.as_default():
            engine = hiai.Engine()
            #resize_config = hiai.ResizeConfig(resize_width=300, resize_height = 300)
            #nntensorlist_object = engine.resize(input_tensor_list=nntensorlist_object, config=resize_config)

            ai_model_desc = hiai.AIModelDescription(name=filename,
                                                    path=self.model_path)
            ai_config = hiai.AIConfig(
                hiai.AIConfigItem("Inference", "item_value_2"))
            final_result = engine.inference(
                input_tensor_list=nntensorlist_object,
                ai_model=ai_model_desc,
                ai_config=ai_config)
        ret = copy.deepcopy(graph.create_graph())
        if ret != hiai.HiaiPythonStatust.HIAI_PYTHON_OK:
            graph.destroy()
            raise Exception("create graph failed, ret ", ret)
        print("create graph successful")
        return graph
コード例 #3
0
 def CreateGraph(self, model, graph_id, model_engine_id):
     # 获取Graph实例
     myGraph = hiai.Graph(hiai.GraphConfig(graph_id=graph_id))
     if myGraph is None:
         print 'get graph failed'
         return None
     with myGraph.as_default():
         model_engine = hiai.Engine(
             hiai.EngineConfig(
                 engine_name='ModelInferenceEngine',
                 side=hiai.HiaiPythonSide.Device,
                 internal_so_name='/lib64/libhiai_python_device2.7.so',
                 engine_id=model_engine_id))
         if model_engine is None:
             print 'get model_engine failed'
             return None
         else:
             print 'get model_engine ok!'
         with model_engine.as_default():
             if (None == model_engine.inference(
                     input_tensor_list=hiai.NNTensorList(),
                     ai_model=model)):
                 print 'Init model_engine failed '
                 return None
             else:
                 print 'Init model_engine ok!'
     # 创建Graph
     if (hiai.HiaiPythonStatust.HIAI_PYTHON_OK == myGraph.create_graph()):
         print 'create graph ok '
         return myGraph
     else:
         print 'create graph failed'
         return None
コード例 #4
0
 def CreateGraph(self, model,graph_id, model_engine_id):
     myGraph = hiai.Graph(hiai.GraphConfig(graph_id = graph_id))
     if myGraph is None:
         print('get graph failed')
         return None
     with myGraph.as_default():
         model_engine = hiai.Engine(hiai.EngineConfig(engine_name='ModelInferenceEngine', side=hiai.HiaiPythonSide.Device,engine_id = model_engine_id))
         if model_engine is None:
             print('get model_engine failed')
             return None
         else:
             print('get model_engine ok!')
         with model_engine.as_default():
             if (None == model_engine.inference(input_tensor_list=hiai.NNTensorList(), ai_model=model)):
                 print('Init model_engine failed ')
                 return None
             else:
                 print('Init model_engine ok!')
     # Create Graph
     if (hiai.HiaiPythonStatust.HIAI_PYTHON_OK == myGraph.create_graph()):
         print('create graph ok ')
         return myGraph
     else:
         print('create graph failed')
         return None
コード例 #5
0
    def CreateGraph(self):
        '''
        Create graph

        Returns:
            graph
        '''
        nntensorlist_object = hiai.NNTensorList()
        graph = hiai.Graph(hiai.GraphConfig(graph_id=65530))
        with graph.as_default():
            engine = hiai.Engine()
            print(self.model_path, os.path.basename(self.model_path))
            ai_model_desc = hiai.AIModelDescription(
                name=os.path.basename(
                    self.model_path), path=self.model_path)
            ai_config = hiai.AIConfig(
                hiai.AIConfigItem(
                    "Inference", "item_value_2"))
            final_result = engine.inference(
                input_tensor_list=nntensorlist_object,
                ai_model=ai_model_desc,
                ai_config=ai_config)
        ret = copy.deepcopy(graph.create_graph())
        if ret != hiai.HiaiPythonStatust.HIAI_PYTHON_OK:
            graph.destroy()
            raise Exception("create graph failed, ret ", ret)
        print("create graph successful")
        return graph
コード例 #6
0
    def ExcuteInference(self, images):
        result = []
        for i in range(0, len(images)):
            nArray = Yuv2Array(images[i])
            ssd = {"name": "face_detection", "path": self.modelPath}
            nntensor = hiai.NNTensor(nArray)
            tensorList = hiai.NNTensorList(nntensor)

            if self.first == True:

                self.graph = hiai.Graph(hiai.GraphConfig(graph_id=2001))
                with self.graph.as_default():
                    self.engine_config = hiai.EngineConfig(
                        engine_name="HIAIDvppInferenceEngine",
                        side=hiai.HiaiPythonSide.Device,
                        internal_so_name='/lib/libhiai_python_device2.7.so',
                        engine_id=2001)
                    self.engine = hiai.Engine(self.engine_config)
                    self.ai_model_desc = hiai.AIModelDescription(
                        name=ssd['name'], path=ssd['path'])
                    self.ai_config = hiai.AIConfig(
                        hiai.AIConfigItem("Inference", "item_value_2"))
                    final_result = self.engine.inference(
                        input_tensor_list=tensorList,
                        ai_model=self.ai_model_desc,
                        ai_config=self.ai_config)
                ret = copy.deepcopy(self.graph.create_graph())
                if ret != hiai.HiaiPythonStatust.HIAI_PYTHON_OK:
                    print("create graph failed, ret", ret)
                    d_ret = graph.destroy()
                    SetExitFlag(True)
                    return HIAI_APP_ERROR, None
                self.first = False
            else:
                with self.graph.as_default():
                    final_result = self.engine.inference(
                        input_tensor_list=tensorList,
                        ai_model=self.ai_model_desc,
                        ai_config=self.ai_config)
            resTensorList = self.graph.proc(input_nntensorlist=tensorList)
            print("Inference result: ", resTensorList[0].shape)
            result.append(resTensorList)
        return HIAI_APP_OK, result
コード例 #7
0
    def ExcuteInference(self, images):
        result = []
        for i in range(0, len(images)):
            nArray = Yuv2Array(images[i])
            ssd = {"name": "object_detection", "path": self.modelPath}
            nntensor = hiai.NNTensor(nArray)
            '''
            gray = cv2.cvtColor(nArray, cv2.COLOR_YUV2GRAY_420)
            blurred = cv2.GaussianBlur(gray, (5, 5), 0)
            thresh = cv2.threshold(blurred, 60, 255, cv2.THRESH_BINARY)[1]
            cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
            cnts=cnts[0]
            #print("1111111")
            j=0
            for c in cnts:
                M = cv2.moments(c)
                c = c.astype("float")
                c = c.astype("int")
                shape = detect(c)
               # cv2.drawContours(cv_image, [c], -1, (0, 255, 0), 2)
              #  image=cv2.cvtColor(cv_image, cv2.COLOR_YUV2RGB_I420)
              #  picpath='~/sample-objectdetection-python/picpath'+str(i)+'.jpg'
              #  cv2.imwrite(picpath,image)
           # print("111111")

    #BGR=cv2.cvtColor(nArray,cv2.COLOR_YUV2RGB)
    #return BGR 
           # print(cv123_image)
            # img_np = YUVtoRGB(nArray)
            # cv2.imwrite("text4.jpg",img_np)
            '''

            tensorList = hiai.NNTensorList(nntensor)

            if self.first == True:

                self.graph = hiai.Graph(hiai.GraphConfig(graph_id=2001))
                with self.graph.as_default():
                    self.engine_config = hiai.EngineConfig(
                        engine_name="HIAIDvppInferenceEngine",
                        side=hiai.HiaiPythonSide.Device,
                        internal_so_name='/lib/libhiai_python_device2.7.so',
                        engine_id=2001)
                    self.engine = hiai.Engine(self.engine_config)
                    self.ai_model_desc = hiai.AIModelDescription(
                        name=ssd['name'], path=ssd['path'])
                    self.ai_config = hiai.AIConfig(
                        hiai.AIConfigItem("Inference", "item_value_2"))
                    final_result = self.engine.inference(
                        input_tensor_list=tensorList,
                        ai_model=self.ai_model_desc,
                        ai_config=self.ai_config)
                ret = copy.deepcopy(self.graph.create_graph())
                if ret != hiai.HiaiPythonStatust.HIAI_PYTHON_OK:
                    print("create graph failed, ret", ret)
                    d_ret = graph.destroy()
                    SetExitFlag(True)
                    return HIAI_APP_ERROR, None
                self.first = False
            else:
                with self.graph.as_default():
                    final_result = self.engine.inference(
                        input_tensor_list=tensorList,
                        ai_model=self.ai_model_desc,
                        ai_config=self.ai_config)
            resTensorList = self.graph.proc(input_nntensorlist=tensorList)
            # print("Inference result: ", resTensorList[0].shape)
            result.append(resTensorList)

        return HIAI_APP_OK, result