Example #1
0
 def print_job_set(self):
     self._check_status(self.SessionStatus.OPEN, self.SessionStatus.RUNNING)
     job_set = c_api_util.GetJobSet()
     for job in job_set.job:
         print("job_name:", job.job_conf.job_name)
         for op_conf in job.net.op:
             print("\top_name:", op_conf.name)
Example #2
0
def Export(
    job_func: Callable,
    model_save_dir: Text,
    onnx_filename: Text,
    continue_on_error: bool = False,
    opset: Optional[int] = None,
    extra_opset: Optional[int] = None,
    shape_override: Optional[Dict[Text, List[int]]] = None,
    external_data: bool = False,
):
    r"""Export a oneflow model into ONNX format.

    Args:
        job_func: The job function
        model_save_dir: The directory containing oneflow model weights. Users are expected to call check_point.save(dir), wait for the model saving finishing, and pass the argument 'dir' as model_save_dir.
        onnx_filename: a string for the output filename
        continue_on_error: if an op can't be processed (aka there is no mapping), continue
        opset: the opset to be used (int, default is oneflow.python.onnx.constants.PREFERRED_OPSET)
        extra_opset: list of extra opset's, for example the opset's used by custom ops
        shape_override: dict with inputs that override the shapes given by oneflow
        external_data: Save weights as ONNX external data, usually to bypass the 2GB file size limit of protobuf.
    """
    assert os.getenv("ENABLE_USER_OP") != "False"
    assert os.path.isdir(model_save_dir)
    job_set = c_api_util.GetJobSet()
    job_name = job_func.__name__
    for job in job_set.job:
        # TODO(OYY) Modify the interface before modifying it
        if job.job_conf.job_name == job_name:
            onnx_graph = ProcessFlowGraph(
                job,
                model_save_dir,
                continue_on_error=continue_on_error,
                opset=opset,
                extra_opset=extra_opset,
                shape_override=shape_override,
            )
            onnx_graph = optimizer.OptimizeGraph(onnx_graph)
            model_proto = onnx_graph.MakeModel(
                job_name, onnx_filename, external_data=external_data
            )
            with open(onnx_filename, "wb") as f:
                try:
                    f.write(model_proto.SerializeToString())
                except ValueError as e:
                    raise ValueError(
                        "Error occured when running model_proto.SerializeToString(). If the model is larger than 2GB, please specify external_data=True when calling flow.onnx.export. Original error message:\n{}".format(
                            e
                        )
                    )
            return
    raise ValueError('Cannot find job "{}" in jobset'.format(job_name))
Example #3
0
 def UpdateInfo4InterfaceOp(self):
     for op_attr in c_api_util.GetInterfaceOpAttributes().op_attribute:
         self.interface_op_name2op_attr_[op_attr.op_conf.name] = op_attr
     for job in c_api_util.GetJobSet().job:
         op_name2parallel_conf = {}
         for placement_group in job.placement.placement_group:
             for op_name in placement_group.op_set.op_name:
                 op_name2parallel_conf[
                     op_name] = placement_group.parallel_conf
         for op_conf in job.net.op:
             if c_api_util.IsInterfaceOpConf(op_conf):
                 self.interface_op_name2job_name_[
                     op_conf.name] = job.job_conf.job_name
                 self.lazy_interface_op_name2parallel_conf_[
                     op_conf.name] = op_name2parallel_conf[op_conf.name]