def save_and_export( config: PyTextConfig, task: Task_Deprecated, metric_channels: Optional[List[Channel]] = None, ) -> None: print("\n=== Saving model to: " + config.save_snapshot_path) meta = None tensorizers = None if hasattr(task, "data_handler"): meta = task.data_handler.metadata_to_save() else: tensorizers = task.data.tensorizers save(config, task.model, meta, tensorizers=tensorizers) if config.export_list: export_configs = config.export_list elif config.export: export_configs = [config.export] else: export_configs = [] print("No export options.") for export_config in export_configs: model = reload_model_for_multi_export(config) if model is None: model = task.model if export_config.export_caffe2_path: task.export( model, export_config.export_caffe2_path, metric_channels, export_config.export_onnx_path, ) elif export_config.export_torchscript_path: try: task.torchscript_export( model=model, export_path=export_config.export_torchscript_path, export_config=export_config, ) except (RuntimeError, TypeError) as e: print("Ran into error: {}".format(", ".join(e.args))) traceback.print_exception(*sys.exc_info()) print( f"The torchscript model at {export_config.export_torchscript_path} could not be saved, skipping for now." ) elif export_config.export_lite_path: task.lite_export( model=model, export_path=export_config.export_lite_path, export_config=export_config, ) else: print("No model to export.")
def save_and_export( config: PyTextConfig, task: Task_Deprecated, metric_channels: Optional[List[Channel]] = None, ) -> None: print("\n=== Saving model to: " + config.save_snapshot_path) meta = None tensorizers = None if hasattr(task, "data_handler"): meta = task.data_handler.metadata_to_save() else: tensorizers = task.data.tensorizers save(config, task.model, meta, tensorizers=tensorizers) if config.export_list: export_configs = config.export_list elif config.export: export_configs = [config.export] else: export_configs = [] print("No export options.") for export_config in export_configs: if export_config.export_caffe2_path: task.export( task.model, export_config.export_caffe2_path, metric_channels, export_config.export_onnx_path, ) elif export_config.export_torchscript_path: task.torchscript_export( model=task.model, export_path=export_config.export_torchscript_path, export_config=export_config, ) elif export_config.export_lite_path: task.lite_export( model=task.model, export_path=export_config.export_lite_path, export_config=export_config, ) else: print("No model to export.")
def save_and_export( config: PyTextConfig, task: Task_Deprecated, metric_channels: Optional[List[Channel]] = None, ) -> None: print("\n=== Saving model to: " + config.save_snapshot_path) meta = None tensorizers = None if hasattr(task, "data_handler"): meta = task.data_handler.metadata_to_save() else: tensorizers = task.data.tensorizers save(config, task.model, meta, tensorizers=tensorizers) export_config = config.export if export_config.export_caffe2_path: task.export( task.model, export_config.export_caffe2_path, metric_channels, export_config.export_onnx_path, ) if export_config.export_torchscript_path: task.torchscript_export( model=task.model, export_path=export_config.export_torchscript_path, quantize=export_config.torchscript_quantize, inference_interface=export_config.inference_interface, accelerate=export_config.accelerate, seq_padding_control=export_config.seq_padding_control, batch_padding_control=export_config.batch_padding_control, ) if export_config.export_lite_path: task.lite_export( model=task.model, export_path=export_config.export_lite_path, quantize=export_config.torchscript_quantize, inference_interface=export_config.inference_interface, accelerate=export_config.accelerate, seq_padding_control=export_config.seq_padding_control, batch_padding_control=export_config.batch_padding_control, )