def convert_classes(classes, start=1): msg = StringIntLabelMap() for id, name in enumerate(classes, start=start): msg.item.append(StringIntLabelMapItem(id=id, name=name)) text = str(text_format.MessageToBytes(msg, as_utf8=True), 'utf-8') return text
def _save_label_dict_to_file(label_dict: dict, label_map_path: str): label_map = StringIntLabelMap() for label, id in label_dict.items(): label_map.item.append(StringIntLabelMapItem(id=id, name=label)) with open(label_map_path, "w") as f: f.write( str(text_format.MessageToBytes(label_map, as_utf8=True), "utf-8"))
def download_reaction(): """Returns a pbtxt file parsed from POST data as an attachment.""" reaction = reaction_pb2.Reaction() reaction.ParseFromString(flask.request.get_data()) data = io.BytesIO(text_format.MessageToBytes(reaction)) return flask.send_file(data, mimetype='application/protobuf', as_attachment=True, attachment_filename='reaction.pbtxt')
def create_labelmap_pbtxt(self, path): msg = StringIntLabelMap() for category in self.categories: if category['enabled']: msg.item.append(StringIntLabelMapItem(id=category['id'], name=category['name'])) txt = str(text_format.MessageToBytes(msg, as_utf8=True), 'utf-8') with open(path, 'w') as f: f.write(txt)
def make_label_map(classes, output_path): Path(output_path).mkdir(exist_ok=True, parents=True) msg = StringIntLabelMap() for id, name in enumerate(classes, start=1): msg.item.append(StringIntLabelMapItem(id=id, name=name)) text = str(text_format.MessageToBytes(msg, as_utf8=True), 'utf-8') with open(os.path.join(output_path, 'pascal_label_map.pbtxt'), 'w') as f: f.write(text)
def download_dataset(name, kind='pb'): """Returns a pb or pbtxt from the datasets table as an attachment.""" dataset = get_dataset(name) data = None if kind == 'pb': data = io.BytesIO(dataset.SerializeToString(deterministic=True)) elif kind == 'pbtxt': data = io.BytesIO(text_format.MessageToBytes(dataset)) else: flask.abort(flask.make_response(f'unsupported format: {kind}', 406)) return flask.send_file(data, mimetype='application/protobuf', as_attachment=True, attachment_filename=f'{name}.{kind}')
def label_map_to_file(label_map: Dict[str, int], filepath: Union[str, Path]): msg = StringIntLabelMap() label_map = { label: i for label, i in sorted(label_map.items(), key=lambda item: item[1]) } for label, i in label_map.items(): # pylint: disable=no-member msg.item.append(StringIntLabelMapItem(id=i, name=label)) text = str(text_format.MessageToBytes(msg, as_utf8=True), 'utf-8') with open(filepath, 'w') as out: out.write(text) logger.info(f'label_map saved to {filepath}')
def write_config_to_file(self, model_path, src_model_path, first_variant_model_path): """ Writes a protobuf config file. Parameters ---------- model_path : str Path to write the model config. src_model_path : str Path to the source model in the Triton Model Repository first_variant_model_path : str Indicates the path to the first model variant. Raises ------ TritonModelAnalyzerException If the path doesn't exist or the path is a file """ if not os.path.exists(model_path): raise TritonModelAnalyzerException( 'Output path specified does not exist.') if os.path.isfile(model_path): raise TritonModelAnalyzerException( 'Model output path must be a directory.') model_config_bytes = text_format.MessageToBytes(self._model_config) # Create current variant model as symlinks to first variant model if first_variant_model_path is not None: for file in os.listdir(first_variant_model_path): # Do not copy the config.pbtxt file if file == 'config.pbtxt': continue else: os.symlink( os.path.join( os.path.relpath(first_variant_model_path, model_path), file), os.path.join(model_path, file)) else: # Create first variant model as copy of source model copy_tree(src_model_path, model_path) with open(os.path.join(model_path, "config.pbtxt"), 'wb') as f: f.write(model_config_bytes)
def _run(): from google.protobuf import text_format from tensorflow.python.training.checkpoint_state_pb2 import CheckpointState with open(ckpt_path, "rb") as fp: ckpt_str = fp.read() ckpt = CheckpointState() text_format.Merge(ckpt_str, ckpt) new_ckpt = CheckpointState() new_ckpt.model_checkpoint_path = "./{}".format(os.path.basename(ckpt.model_checkpoint_path)) with open(ckpt_path, "wb") as fp: fp.write(text_format.MessageToBytes(new_ckpt))
def create_labes(label_basepath, categories): path_pbtxt = os.path.join(label_basepath, 'label_map.pbtxt') msg = StringIntLabelMap() for category in categories: if category['enabled']: msg.item.append( StringIntLabelMapItem(id=category['id'], name=category['name'])) txt = str(text_format.MessageToBytes(msg, as_utf8=True), 'utf-8') print(txt) with open(path_pbtxt, 'w') as f: f.write(txt) path_txt = os.path.join(label_basepath, 'label_map.txt') f = open(path_txt, 'w') for category in categories: if category['enabled']: f.write(category['name'] + '\n') f.close()
def write_config_to_file(self, model_path, copy_original_model=False, src_model_path=None): """ Writes a protobuf config file. Parameters ---------- model_path : str Path to write the model config. copy_original_model : bool Whether to copy the original model too or not. Raises ------ TritonModelAnalzyerException If the path doesn't exist or the path is a file """ if not os.path.exists(model_path): raise TritonModelAnalyzerException( 'Output path specified does not exist.') if os.path.isfile(model_path): raise TritonModelAnalyzerException( 'Model output path must be a directory.') model_config_bytes = text_format.MessageToBytes(self._model_config) if copy_original_model: copy_tree(src_model_path, model_path) with open(os.path.join(model_path, "config.pbtxt"), 'wb') as f: f.write(model_config_bytes)
def generate_ma_result(json_file_path, result_pbtxt_path, ma_config_path): from google.protobuf import text_format, json_format from tritonclient.grpc import model_config_pb2 with open(json_file_path) as json_file: olive_result = json.load(json_file) results = olive_result.get("all_tuning_results") best_test_name = olive_result.get("best_test_name") for result in results: if result.get("test_name") == best_test_name: execution_provider = result.get("execution_provider") env_vars = result.get("env_vars") session_options = result.get("session_options") break optimization_config = None sess_opt_parameters = None if best_test_name == "pretuning": optimization_config = {"graph": {"level": 1}} else: intra_op_thread_count = session_options.get("intra_op_num_threads") inter_op_thread_count = session_options.get("inter_op_num_threads") execution_mode = session_options.get("execution_mode") graph_optimization_level = session_options.get("graph_optimization_level") if graph_optimization_level in ["0", "1"]: opt_level = -1 else: opt_level = 1 if execution_provider == "TensorrtExecutionProvider": tensorrt_accelerator = {"name": "tensorrt"} if env_vars.get("ORT_TENSORRT_FP16_ENABLE") == "1": tensorrt_accelerator["parameters"] = {"precision_mode": "FP16"} optimization_config = { "executionAccelerators": {"gpuExecutionAccelerator": [tensorrt_accelerator]}, "graph": {"level": opt_level} } elif execution_provider == "OpenVINOExecutionProvider": optimization_config = { "executionAccelerators": {"cpuExecutionAccelerator": [{"name": "openvino"}]}, "graph": {"level": opt_level}} else: optimization_config = {"graph": {"level": opt_level}} sess_opt_parameters = {} if intra_op_thread_count != "None": sess_opt_parameters["intra_op_thread_count"] = {"stringValue": intra_op_thread_count} if inter_op_thread_count != "None": sess_opt_parameters["inter_op_thread_count"] = {"stringValue": inter_op_thread_count} if execution_mode: execution_mode_flag = "0" if execution_mode == "ExecutionMode.ORT_SEQUENTIAL" else "1" sess_opt_parameters["execution_mode"] = {"stringValue": execution_mode_flag} with open(ma_config_path, 'r+') as f: config_str = f.read() protobuf_message = text_format.Parse(config_str, model_config_pb2.ModelConfig()) model_dict = json_format.MessageToDict(protobuf_message) model_dict.update({"optimization": optimization_config}) if sess_opt_parameters: model_dict.update({"parameters": sess_opt_parameters}) protobuf_message = json_format.ParseDict(model_dict, model_config_pb2.ModelConfig()) model_config_bytes = text_format.MessageToBytes(protobuf_message) with open(result_pbtxt_path, "wb") as f: f.write(model_config_bytes)