コード例 #1
0
def onnxifi_caffe2_net(pred_net,
                       input_shapes,
                       infer_shapes=False,
                       debug=False,
                       use_onnx=True):
    """
    Transform the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    # Inject an fake input tensor to help popluate the shape if we
    # do not do shape inference
    shape_hints = {}
    external_inputs = []
    if not infer_shapes:
        for k, v in input_shapes.items():
            need_input_tensor = True
            if workspace.HasBlob(k):
                itensor = workspace.FetchBlob(k)
                if itensor.shape == v:
                    need_input_tensor = False
            if need_input_tensor:
                workspace.FeedBlob(k, np.random.randn(*v).astype(np.float32))
                external_inputs.append(k)

    for k, v in input_shapes.items():
        shape_hints[k] = v
    pred_net_str = C.onnxifi(pred_net.SerializeToString(), external_inputs,
                             shape_hints, infer_shapes, debug, use_onnx)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut
コード例 #2
0
def onnxifi_caffe2_net(pred_net,
                       input_shapes,
                       max_batch_size=1,
                       max_seq_size=1,
                       debug=False,
                       use_onnx=True,
                       merge_fp32_inputs_into_fp16=False,
                       adjust_batch=True,
                       black_list=None,
                       weight_names=None,
                       timeout=0):
    """
    Transform the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    shape_hints = {}
    for k, v in input_shapes.items():
        shape_hints[k] = v
    pred_net_str = C.onnxifi(pred_net.SerializeToString(), shape_hints,
                             black_list if black_list else [],
                             weight_names if weight_names is not None else [],
                             max_batch_size, max_seq_size, timeout,
                             adjust_batch, debug, merge_fp32_inputs_into_fp16,
                             use_onnx)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut
コード例 #3
0
def onnxifi_caffe2_net(pred_net,
                       input_shapes,
                       max_batch_size=1,
                       max_seq_size=1,
                       debug=False,
                       use_onnx=True):
    """
    Transform the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    shape_hints = {}
    for k, v in input_shapes.items():
        shape_hints[k] = v
    pred_net_str = C.onnxifi(pred_net.SerializeToString(), shape_hints,
                             max_batch_size, max_seq_size, debug, use_onnx)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut
コード例 #4
0
ファイル: onnxifi.py プロジェクト: zhangtao7750/pytorch
def onnxifi_caffe2_net(pred_net,
                       input_shapes,
                       max_batch_size=1,
                       max_seq_size=1,
                       debug=False,
                       use_onnx=True,
                       merge_fp32_inputs_into_fp16=False,
                       adjust_batch=True,
                       block_list=None,
                       weight_names=None,
                       net_ssa_rewritten=False,
                       timeout=0):
    """
    Transform the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    shape_hints = caffe2_pb2.TensorBoundShapes()
    if type(input_shapes) is caffe2_pb2.TensorBoundShapes:
        shape_hints = input_shapes
    elif type(input_shapes) is dict:
        for k, v in input_shapes.items():
            tbs = caffe2_pb2.TensorBoundShape()
            tbs.name = k
            tbs.shape.dims.extend(v)
            tbs.dim_type.extend([caffe2_pb2.TensorBoundShape.CONSTANT] *
                                len(tbs.shape.dims))
            tbs.dim_type[0] = caffe2_pb2.TensorBoundShape.BATCH
            shape_hints.shapes.extend([tbs])
        shape_hints.max_batch_size = max_batch_size
        shape_hints.max_feature_len = max_seq_size
    pred_net_str = C.onnxifi(pred_net.SerializeToString(),
                             shape_hints.SerializeToString(),
                             block_list if block_list else [],
                             weight_names if weight_names is not None else [],
                             max_batch_size, max_seq_size, timeout,
                             adjust_batch, debug, merge_fp32_inputs_into_fp16,
                             net_ssa_rewritten, use_onnx)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut
コード例 #5
0
ファイル: onnxifi.py プロジェクト: zsk423200/pytorch
def onnxifi_caffe2_net(pred_net,
                       input_shapes,
                       populate_shapes=False,
                       debug=False):
    """
    Transfrom the caffe2_net by collapsing ONNXIFI-runnable nodes into Onnxifi c2 ops
    """
    # Hacky way to infer shapes as not all our operators have shape inference function.
    # Normally this is not needed
    shape_hints = {}
    if populate_shapes:
        input_data = {}
        for k, v in input_shapes.items():
            input_data[k] = np.random.randn(*v).astype(np.float32)
        shape_hints = _infer_shapes(pred_net, input_data)

    for k, v in input_shapes.items():
        shape_hints[k] = v
    pred_net_str = C.onnxifi(pred_net.SerializeToString(), shape_hints, debug)
    pred_net_cut = caffe2_pb2.NetDef()
    pred_net_cut.ParseFromString(pred_net_str)
    return pred_net_cut