예제 #1
0
def save_and_export(config: PyTextConfig,
                    task: Task,
                    metric_channels: Optional[List[Channel]] = None) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    save(config, task.model, task.data_handler.metadata_to_save())
    task.export(task.model, config.export_caffe2_path, metric_channels,
                config.export_onnx_path)
예제 #2
0
def save_and_export(
    config: PyTextConfig,
    task: Task_Deprecated,
    metric_channels: Optional[List[Channel]] = None,
) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    meta = None
    tensorizers = None
    if hasattr(task, "data_handler"):
        meta = task.data_handler.metadata_to_save()
    else:
        tensorizers = task.data.tensorizers
    save(config, task.model, meta, tensorizers=tensorizers)
    if config.export_caffe2_path:
        task.export(
            task.model,
            config.export_caffe2_path,
            metric_channels,
            config.export_onnx_path,
        )
    if config.export_torchscript_path:
        task.torchscript_export(
            model=task.model,
            export_path=config.export_torchscript_path,
            quantize=config.torchscript_quantize,
        )
예제 #3
0
def save_and_export(config: PyTextConfig,
                    task: Task,
                    summary_writer: Optional[SummaryWriter] = None) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    save(config, task.model, task.data_handler.metadata_to_save())
    task.export(task.model, config.export_caffe2_path, summary_writer,
                config.export_onnx_path)
예제 #4
0
def save_and_export(
    config: PyTextConfig,
    task: Task_Deprecated,
    metric_channels: Optional[List[Channel]] = None,
) -> None:
    latest_snapshot_path = config.save_snapshot_path
    print("\n=== Saving model to: " + latest_snapshot_path)
    meta = None
    tensorizers = None
    if hasattr(task, "data_handler"):
        meta = task.data_handler.metadata_to_save()
    else:
        tensorizers = task.data.tensorizers
    save(config, task.model, meta, tensorizers=tensorizers)

    if config.export_list:
        export_configs = config.export_list
    elif config.export:
        export_configs = [config.export]
    else:
        export_configs = []
        print("No export options provided.")

    for export_config in export_configs:
        if export_config is not None:
            if len(export_configs) > 1:
                local_task = reload_task_for_multi_export(
                    config, latest_snapshot_path)
            else:
                local_task = task
            if export_config.export_caffe2_path:
                local_task.export(
                    model=local_task.model,
                    export_path=export_config.export_caffe2_path,
                    metric_channels=metric_channels,
                    export_onnx_path=export_config.export_onnx_path,
                )
            if export_config.export_torchscript_path:
                try:
                    local_task.torchscript_export(
                        model=local_task.model,
                        export_path=export_config.export_torchscript_path,
                        export_config=export_config,
                    )
                except (RuntimeError, TypeError) as e:
                    print("Ran into error: {}".format(", ".join(e.args)))
                    traceback.print_exception(*sys.exc_info())
                    print(
                        f"The torchscript model at {export_config.export_torchscript_path} could not be saved, skipping for now."
                    )
            if export_config.export_lite_path:
                local_task.lite_export(
                    model=local_task.model,
                    export_path=export_config.export_lite_path,
                    export_config=export_config,
                )
예제 #5
0
파일: workflow.py 프로젝트: yaogjim/pytext
def save_and_export(config: PyTextConfig,
                    task: Task,
                    metric_channels: Optional[List[Channel]] = None) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    meta = None
    if hasattr(task, "data_handler"):
        meta = task.data_handler.metadata_to_save()
    save(config, task.model, meta)
    if config.export_caffe2_path:
        task.export(
            task.model,
            config.export_caffe2_path,
            metric_channels,
            config.export_onnx_path,
        )
    if config.export_torchscript_path:
        task.torchscript_export(task.model, config.export_torchscript_path)
예제 #6
0
파일: workflow.py 프로젝트: parmeet/pytext
def save_and_export(
    config: PyTextConfig,
    task: Task_Deprecated,
    metric_channels: Optional[List[Channel]] = None,
) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    meta = None
    tensorizers = None
    if hasattr(task, "data_handler"):
        meta = task.data_handler.metadata_to_save()
    else:
        tensorizers = task.data.tensorizers
    save(config, task.model, meta, tensorizers=tensorizers)

    if config.export_list:
        export_configs = config.export_list
    elif config.export:
        export_configs = [config.export]
    else:
        export_configs = []
        print("No export options.")

    for export_config in export_configs:
        if export_config.export_caffe2_path:
            task.export(
                task.model,
                export_config.export_caffe2_path,
                metric_channels,
                export_config.export_onnx_path,
            )
        elif export_config.export_torchscript_path:
            task.torchscript_export(
                model=task.model,
                export_path=export_config.export_torchscript_path,
                export_config=export_config,
            )
        elif export_config.export_lite_path:
            task.lite_export(
                model=task.model,
                export_path=export_config.export_lite_path,
                export_config=export_config,
            )
        else:
            print("No model to export.")
예제 #7
0
def save_and_export(
    config: PyTextConfig,
    task: Task_Deprecated,
    metric_channels: Optional[List[Channel]] = None,
) -> None:
    print("\n=== Saving model to: " + config.save_snapshot_path)
    meta = None
    tensorizers = None
    if hasattr(task, "data_handler"):
        meta = task.data_handler.metadata_to_save()
    else:
        tensorizers = task.data.tensorizers
    save(config, task.model, meta, tensorizers=tensorizers)
    export_config = config.export
    if export_config.export_caffe2_path:
        task.export(
            task.model,
            export_config.export_caffe2_path,
            metric_channels,
            export_config.export_onnx_path,
        )
    if export_config.export_torchscript_path:
        task.torchscript_export(
            model=task.model,
            export_path=export_config.export_torchscript_path,
            quantize=export_config.torchscript_quantize,
            inference_interface=export_config.inference_interface,
            accelerate=export_config.accelerate,
            seq_padding_control=export_config.seq_padding_control,
            batch_padding_control=export_config.batch_padding_control,
        )
    if export_config.export_lite_path:
        task.lite_export(
            model=task.model,
            export_path=export_config.export_lite_path,
            quantize=export_config.torchscript_quantize,
            inference_interface=export_config.inference_interface,
            accelerate=export_config.accelerate,
            seq_padding_control=export_config.seq_padding_control,
            batch_padding_control=export_config.batch_padding_control,
        )