def submit(self, input_data: Any, future: Future = None) -> Future: """Submits a job and returns a Future that will be fulfilled later.""" future = future or Future() # Add backwards compatibility for 0.2 future.get = future.result future.get = deprecated( message="Use future.result() in place of future.get()", remove_in="0.3.0" )(future.get) self._request_queue.put(_Request( future=future, input_data=input_data)) return future
def send_to_batch(self, input_data: OV_INPUT_TYPE) -> Future: """Efficiently send the input to be inferenced by the network :param input_data: Input to the network :returns: A Future that will be filled with the output from the network """ future = Future() with self._get_free_request_lock: # Try to get at least one request request_id = self.exec_net.get_idle_request_id() if request_id < 0: # Since there was no free request, wait for one status = self.exec_net.wait(num_requests=1) if status != self._StatusCode.OK: raise RuntimeError( f"Wait for idle request failed with code {status}") request_id = self.exec_net.get_idle_request_id() if request_id < 0: raise RuntimeError(f"Invalid request_id: {request_id}") request = self.exec_net.requests[request_id] request_free = self._request_free_events[request_id] def on_result(*args): future.set_result(request.outputs) request_free.set() with self._num_ongoing_requests_lock: self._num_ongoing_requests -= 1 # Make sure that the callback for this request is finished, by # calling request_free.wait(). request_free.wait() request_free.clear() request.set_completion_callback(on_result) with self._num_ongoing_requests_lock: self._num_ongoing_requests += 1 request.async_infer(input_data) # Add backwards compatibility for 0.2 future.get = future.result future.get = deprecated( message="Use future.result() in place of future.get()", remove_in="0.3.0")(future.get) return future