def update_model(): model_uri = request.args.get('model_uri') model_dir = request.args.get('model_dir') if not model_uri and not model_dir: return ('missing model_uri or model_dir') print('[INFO] Update Model ...', flush=True) if model_uri: print('[INFO] Got Model URI', model_uri, flush=True) #FIXME webmodule didnt send set detection_mode as Part Detection somtimes. # workaround onnx.set_detection_mode('PD') onnx.set_is_scenario(False) if model_uri == onnx.model_uri: print('[INFO] Model Uri unchanged', flush=True) else: get_file_zip(model_uri, MODEL_DIR) onnx.model_uri = model_uri onnx.update_model('model') print('[INFO] Update Finished ...', flush=True) return 'ok' elif model_dir: print('[INFO] Got Model DIR', model_dir) onnx.set_is_scenario(True) onnx.update_model(model_dir) print('[INFO] Update Finished ...') return 'ok'
def update_model(): model_uri = request.args.get('model_uri') model_dir = request.args.get('model_dir') if not model_uri and not model_dir: return ('missing model_uri or model_dir') print('[INFO] Update Model ...') if model_uri: print('[INFO] Got Model URI', model_uri) if model_uri == onnx.model_uri: print('[INFO] Model Uri unchanged') else: get_file_zip(model_uri, MODEL_DIR) onnx.model_uri = model_uri onnx.update_model('model') print('[INFO] Update Finished ...') return 'ok' elif model_dir: print('[INFO] Got Model DIR', model_dir) onnx.update_model(model_dir) print('[INFO] Update Finished ...') return 'ok'
def update_model(): model_uri = request.args.get("model_uri") model_dir = request.args.get("model_dir") if not model_uri and not model_dir: return "missing model_uri or model_dir" logger.info("Update Model ...") if model_uri: logger.info("Got Model URI %s", model_uri) if model_uri == onnx.model_uri: logger.info("Model URI unchanged") else: get_file_zip(model_uri, MODEL_DIR) onnx.model_uri = model_uri onnx.update_model("model") logger.info("Update Finished ...") return "ok" elif model_dir: print("[INFO] Got Model DIR", model_dir) onnx.update_model(model_dir) print("[INFO] Update Finished ...") return "ok"
def update_model(): model_uri = request.args.get('model_uri') if not model_uri: return ('missing model_uri') print('[INFO] Update Model ...') if model_uri == onnx.model_uri: print('[INFO] Model Uri unchanged') return 'ok' get_file_zip(model_uri, MODEL_DIR) onnx.model_uri = model_uri onnx.update_model('model') print('[INFO] Update Finished ...') return 'ok'
def run(self, model_uri, MODEL_DIR): self.lock.acquire() try: print("Downloading URL.", flush=True) get_file_zip(model_uri, MODEL_DIR) print("Downloading URL..., Complete!!!", flush=True) self.lock.release() self.model_downloading = False print("Updating Model...", flush=True) self.update_model("model") print("Updating Model..., Complete!!!", flush=True) except Exception: self.lock.release() self.model_downloading = False print("Download URL failed. Model_URI: %s, MODEL_DIR: %s" % (model_uri, MODEL_DIR)) traceback.print_exc()
def twin_update_listener(client): while True: patch = client.receive_twin_desired_properties_patch() # blocking call print("[INFO] Twin desired properties patch received:", flush=True) print("[INFO]", patch, flush=True) if "model_uri" not in patch: print("[WARNING] missing model_uri", flush=True) continue model_uri = patch["model_uri"] print("[INFO] Got Model URI", model_uri, flush=True) if model_uri == onnx.model_uri: print("[INFO] Model Uri unchanged", flush=True) else: get_file_zip(model_uri, MODEL_DIR) onnx.model_uri = model_uri onnx.update_model("model") print("[INFO] Update Finished ...", flush=True)
def module_twin_callback(self, update_state, payload, user_context): global inference_files_zip_url global msg_per_minute global object_of_interest print("") print("Twin callback called with:") print(" updateStatus: %s" % update_state) print(" payload: %s" % payload) data = json.loads(payload) self.setRestartCamera = False if "desired" in data and "inference_files_zip_url" in data["desired"]: self.model_dst_folder = "default_model" inference_files_zip_url = data["desired"][ "inference_files_zip_url"] if inference_files_zip_url: print( "\n Setting value to %s from :: data[\"desired\"][\"all_inference_files_zip\"]" % inference_files_zip_url) self.setRestartCamera = get_file_zip(inference_files_zip_url, self.model_dst_folder) self.model_url = inference_files_zip_url else: print(inference_files_zip_url) self.model_url = None if "inference_files_zip_url" in data: self.model_dst_folder = "default_model" inference_files_zip_url = data["inference_files_zip_url"] if inference_files_zip_url: print( "\n Setting value to %s from :: data[\"all_inference_files_zip\"]" % inference_files_zip_url) self.ret_flag = get_file_zip(inference_files_zip_url, self.model_dst_folder) #self.setRestartCamera = True self.model_url = inference_files_zip_url else: print(inference_files_zip_url) self.model_url = None if "desired" in data and "cam_type" in data["desired"]: cam_type = data["desired"]["cam_type"] self.cam_type = str(cam_type) print("Setting value to %s from :: data[desired][cam_type]" % cam_type) self.ret_flag = True elif "cam_type" in data: cam_type = data["cam_type"] self.cam_type = str(cam_type) print("Setting value to %s from :: data[cam_type]" % cam_type) self.ret_flag = True if "desired" in data and "cam_source" in data["desired"]: cam_source = data["desired"]["cam_source"] self.cam_source = str(cam_source) print("Setting value to %s from :: data[desired][cam_source]" % cam_source) self.ret_flag = True elif "cam_source" in data: cam_source = data["cam_source"] self.cam_source = str(cam_source) print("Setting value to %s from :: data[cam_source]" % cam_source) self.ret_flag = True if self.cam_source: if self.cam_type == "video_file": print("self.cam_source %s file" % self.cam_source) dst_folder = "sample_video" print( "\n Download and unzip video file to sample dir from %s" % self.cam_source) self.ret_flag = get_file_zip(self.cam_source, dst_folder) #ToDo readfilename and add to dst_folder self.cam_source = "/sample_video/video.mp4" #self.setRestartCamera = True else: print(self.cam_source) if "desired" in data and "object_of_interest" in data["desired"]: object_of_interest = data["desired"]["object_of_interest"] print("Setting value to %s from :: data[\"object_of_interest\"]" % object_of_interest) if "object_of_interest" in data: object_of_interest = data["object_of_interest"] print("Setting value to %s from :: data[\"object_of_interest\"]" % object_of_interest) if "desired" in data and "msg_per_minute" in data["desired"]: msg_per_minute = data["desired"]["msg_per_minute"] print("Setting value to %s from :: data[\"msg_per_minute\"]" % msg_per_minute) if "msg_per_minute" in data: msg_per_minute = data["msg_per_minute"] print("Setting value to %s from :: data[\"msg_per_minute\"]" % msg_per_minute) if self.ret_flag: try: print("setting restart inferense to True") self.setRestartCamera = True logger.info("Restarting inferencing") except Exception as e: logger.info("Got an issue during cam ON off after twin update") logger.exception(e) raise
def module_twin_callback(self, update_state, payload, user_context): global inference_files_zip_url global model_url global label_url global config_url global msg_per_minute global wait_for_minutes global object_of_interest print("") print("Twin callback called with:") print(" updateStatus: %s" % update_state) print(" payload: %s" % payload) data = json.loads(payload) setRestartCamera = False if "desired" in data and "inference_files_zip_url" in data["desired"]: dst_folder = "twin_provided_model" inference_files_zip_url = data["desired"][ "inference_files_zip_url"] if inference_files_zip_url: print( "Setting value to %s from :: data[\"desired\"][\"all_inference_files_zip\"]" % inference_files_zip_url) setRestartCamera = get_file_zip(inference_files_zip_url, dst_folder) else: print(inference_files_zip_url) if "inference_files_zip_url" in data: dst_folder = "twin_provided_model" inference_files_zip_url = data["inference_files_zip_url"] if inference_files_zip_url: print( "Setting value to %s from :: data[\"all_inference_files_zip\"]" % inference_files_zip_url) setRestartCamera = get_file_zip(inference_files_zip_url, dst_folder) else: print(inference_files_zip_url) if "desired" in data and "msg_per_minute" in data["desired"]: msg_per_minute = data["desired"]["msg_per_minute"] msg_per_minute = 60 / int(msg_per_minute) print("Setting value to %s from :: data[\"msg_per_minute\"]" % msg_per_minute) if "msg_per_minute" in data: msg_per_minute = data["msg_per_minute"] wait_for_minutes = int(60 / int(msg_per_minute)) print("Setting value to %s from :: data[\"msg_per_minute\"]" % msg_per_minute) if "desired" in data and "object_of_interest" in data["desired"]: object_of_interest = data["desired"]["object_of_interest"] print("Setting value to %s from :: data[\"object_of_interest\"]" % object_of_interest) if "object_of_interest" in data: msg_per_minute = data["object_of_interest"] print("Setting value to %s from :: data[\"object_of_interest\"]" % object_of_interest) if setRestartCamera: # try: logger.info("Restarting VAM to apply new model config") self.restartInference(self.iot_camera_handle) except Exception as e: logger.info("Got an issue during vam ON off after twin update") logger.exception(e) raise