コード例 #1
0
def indexed_slices_reduce_sum(
    indices: input_blob_util.ArgBlobDef,
    values: input_blob_util.ArgBlobDef,
    name: Optional[str] = None,
) -> Tuple[remote_blob_util.BlobDef]:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("IndexedSlicesReduceSum_")
    else:
        op_conf.name = name

    op_conf.indexed_slices_reduce_sum_conf.x_indices = indices.unique_name
    op_conf.indexed_slices_reduce_sum_conf.x_values = values.unique_name
    op_conf.indexed_slices_reduce_sum_conf.y_indices = "y_indices"
    op_conf.indexed_slices_reduce_sum_conf.y_values = "y_values"
    op_conf.indexed_slices_reduce_sum_conf.num_unique = "num_unique"

    interpret_util.Forward(op_conf)
    y_indices_lbi = logical_blob_id_util.LogicalBlobId()
    y_indices_lbi.op_name = op_conf.name
    y_indices_lbi.blob_name = "y_indices"
    y_values_lbi = logical_blob_id_util.LogicalBlobId()
    y_values_lbi.op_name = op_conf.name
    y_values_lbi.blob_name = "y_values"
    num_unique_lbi = logical_blob_id_util.LogicalBlobId()
    num_unique_lbi.op_name = op_conf.name
    num_unique_lbi.blob_name = "num_unique"

    return (
        remote_blob_util.RemoteBlob(y_indices_lbi),
        remote_blob_util.RemoteBlob(y_values_lbi),
        remote_blob_util.RemoteBlob(num_unique_lbi),
    )
コード例 #2
0
def dynamic_binary_split(
    x: input_blob_util.ArgBlobDef,
    base_shift: int = 2,
    out_num: int = 2,
    name: Optional[str] = None,
) -> List[remote_blob_util.BlobDef]:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("DynamicBinarySplit_")
    else:
        op_conf.name = name

    obns = []
    out_remote_blobs = []
    for i in range(out_num):
        obns.append("out_" + str(i))

    setattr(op_conf.dynamic_binary_split_conf, "in", x.unique_name)
    # op_conf.dynamic_binary_split_conf.in = x.unique_name
    op_conf.dynamic_binary_split_conf.out[:] = obns
    op_conf.dynamic_binary_split_conf.base_shift = base_shift

    interpret_util.Forward(op_conf)
    for i in range(out_num):
        out_lbi = logical_blob_id_util.LogicalBlobId()
        out_lbi.op_name = op_conf.name
        out_lbi.blob_name = obns[i]
        out_remote_blobs.append(remote_blob_util.RemoteBlob(out_lbi))

    return out_remote_blobs
コード例 #3
0
def elem_cnt(
    input_blob: remote_blob_util.BlobDef,
    axis: Optional[Sequence[int]] = None,
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("ShapeElemCnt_"),
    )
    op_conf.shape_elem_cnt_conf.x = input_blob.unique_name
    if axis is None:
        op_conf.shape_elem_cnt_conf.exclude_axis_conf.SetInParent()
    else:
        assert isinstance(axis, (tuple, list))
        op_conf.shape_elem_cnt_conf.include_axis_conf.axis.extend(axis)
    if dtype is not None:
        op_conf.shape_elem_cnt_conf.data_type = dtype.oneflow_proto_dtype
    op_conf.shape_elem_cnt_conf.y = "y"
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    out_lbi.op_name = op_conf.name
    out_lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #4
0
def broadcast_to_compatible_with(
    x: remote_blob_util.BlobDef,
    compatible: Sequence[remote_blob_util.BlobDef],
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    r"""Returns a 'Blob' with the shape can be broadcasted by other shapes

    Args:
        x (remote_blob_util.BlobDef): a 'Blob'
        compatible (Sequence[remote_blob_util.BlobDef]): Sequence of different shape
        name (Optional[str], optional): This operator's name. Defaults to None.

    Returns:
        remote_blob_util.BlobDef: A 'Blob' with the biggest shape
    """
    assert isinstance(compatible, (list, tuple))
    if name is None:
        name = id_util.UniqueStr("BroadcastToCompatibleWith_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.broadcast_to_compatible_with_conf, "x", x.unique_name)
    setattr(op_conf.broadcast_to_compatible_with_conf, "y", "y")
    op_conf.broadcast_to_compatible_with_conf.compatible.extend(
        [cp.unique_name for cp in compatible])
    interpret_util.Forward(op_conf)

    ret_lbi = logical_blob_id_util.LogicalBlobId()
    ret_lbi.op_name = op_conf.name
    ret_lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(ret_lbi)
コード例 #5
0
ファイル: constant_op.py プロジェクト: Sodu-Qinming/Oneflow
def constant_like(
    like: remote_blob_util.BlobDef,
    value: Union[int, float],
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("ConstantLike_"),
    )
    setattr(op_conf.constant_like_conf, "like", like.unique_name)
    if isinstance(value, int):
        op_conf.constant_like_conf.int_operand = value
    elif isinstance(value, float):
        op_conf.constant_like_conf.float_operand = value
    else:
        raise NotImplementedError
    if dtype is not None:
        setattr(op_conf.constant_like_conf, "data_type",
                dtype.oneflow_proto_dtype)
    setattr(op_conf.constant_like_conf, "out", "out")
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "out")
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #6
0
def tensor_buffer_to_tensor_list(
    input: remote_blob_util.BlobDef,
    shape: Sequence[int],
    dtype: dtype_util.dtype,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    if name is None:
        name = id_util.UniqueStr("TensorBufferToList_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_buffer_to_tensor_list_conf, "in", input.unique_name)
    setattr(op_conf.tensor_buffer_to_tensor_list_conf, "out", "out")
    op_conf.tensor_buffer_to_tensor_list_conf.shape.dim[:] = list(shape)
    setattr(
        op_conf.tensor_buffer_to_tensor_list_conf,
        "data_type",
        dtype.oneflow_proto_dtype,
    )
    interpret_util.Forward(op_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #7
0
def argwhere(
    condition: remote_blob_util.BlobDef,
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    if name is None:
        name = id_util.UniqueStr("ArgWhere_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.arg_where_conf, "in", condition.unique_name)
    setattr(op_conf.arg_where_conf, "out", "out")
    setattr(op_conf.arg_where_conf, "out_size", "out_size")
    if dtype is not None:
        setattr(op_conf.arg_where_conf, "data_type", dtype.oneflow_proto_dtype)
    interpret_util.Forward(op_conf)

    arg_where_out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(arg_where_out_lbi, "op_name", op_conf.name)
    setattr(arg_where_out_lbi, "blob_name", "out")

    arg_where_out_size_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(arg_where_out_size_lbi, "op_name", op_conf.name)
    setattr(arg_where_out_size_lbi, "blob_name", "out_size")

    arg_where_out = remote_blob_util.RemoteBlob(arg_where_out_lbi)
    arg_where_out_size = remote_blob_util.RemoteBlob(arg_where_out_size_lbi)
    return sync_dynamic_resize(arg_where_out, arg_where_out_size)
コード例 #8
0
ファイル: assign_op.py プロジェクト: zjureel/oneflow
def lazy_system_assign(ref, value, validate_shape=None, use_locking=None, name=None):
    op_conf = _SystemAssignOpConf(ref, value, name=name)
    device_tag, machine_device_ids = oneflow_api.GetDeviceTagAndMachineDeviceIds(
        ref.parallel_conf
    )
    with oneflow.scope.placement(device_tag, machine_device_ids):
        interpret_util.Forward(op_conf)
    return ref
コード例 #9
0
ファイル: tensor_list_ops.py プロジェクト: zjureel/oneflow
def tensor_list_split(
        input_tensor_list: oneflow_api.BlobDesc,
        name: Optional[str] = None) -> Tuple[oneflow_api.BlobDesc]:
    """This operator splits the input `TensorList`. 

    Args:
        input_tensor_list (oneflow_api.BlobDesc): The input `TensorList`. 
        name (Optional[str], optional): The name for the operation. Defaults to None.

    Returns:
        Tuple[oneflow_api.BlobDesc]: A Tuple of `ListNumpy`. 

    For example: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp
        from typing import Tuple


        func_config = flow.FunctionConfig()
        func_config.default_data_type(flow.float)
        func_config.default_logical_view(flow.scope.mirrored_view())
        @flow.global_function(function_config=func_config)
        def tensorList_split_Job(x: tp.ListListNumpy.Placeholder(shape=(2, 5, 4), dtype=flow.float32),
        ) -> Tuple[tp.ListNumpy, tp.ListNumpy]:
            return flow.tensor_list_split(x)


        x = np.random.rand(1, 3, 2).astype(np.float32)
        y = np.random.rand(1, 2, 2).astype(np.float32)
        out = tensorList_split_Job([[x, y]])

        # out[0][0].shape (3, 2)
        # out[1][0].shape (2, 2)

    """
    if name is None:
        name = id_util.UniqueStr("TensorListSplit_")

    output_size = input_tensor_list.shape[0]
    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_split_conf, "in",
            input_tensor_list.unique_name)
    op_conf.tensor_list_split_conf.out.extend(
        ["out_{}".format(i) for i in range(output_size)])
    interpret_util.Forward(op_conf)
    ret = []
    for i in range(output_size):
        out_lbi = logical_blob_id_util.LogicalBlobId()
        setattr(out_lbi, "op_name", op_conf.name)
        setattr(out_lbi, "blob_name", "out_{}".format(i))
        ret.append(remote_blob_util.RemoteBlob(out_lbi))
    return tuple(ret)
コード例 #10
0
ファイル: tensor_list_ops.py プロジェクト: zjureel/oneflow
def tensor_list_to_tensor_buffer(
        input: oneflow_api.BlobDesc,
        name: Optional[str] = None) -> oneflow_api.BlobDesc:
    """This operator converts `TensorList` to `TensorBuffer`. 

    Refer to `Concept Explanation <https://docs.oneflow.org/basics_topics/concept_explanation.html#3tensorbuffer-tensorlist>`_ 
    for more about TensorList. 

    Args:
        input (oneflow_api.BlobDesc): The input `TensorList`. 
        name (Optional[str], optional): The name for the operation. Defaults to None.

    Returns:
        oneflow_api.BlobDesc: The result Blob. 

    For example: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp

        func_config = flow.FunctionConfig()
        func_config.default_data_type(flow.float)
        func_config.default_logical_view(flow.scope.mirrored_view())
        @flow.global_function(function_config=func_config)
        def tensorList_to_tensorBuffer_Job(x: tp.ListListNumpy.Placeholder(shape=(2, 5, 4), dtype=flow.float32),
        ) -> tp.ListListNumpy:
            x = flow.tensor_list_to_tensor_buffer(input=x)
            return flow.tensor_buffer_to_tensor_list(x, 
                                                    shape=(5, 4), 
                                                    dtype=flow.float32)

        x = np.random.rand(1, 3, 2).astype(np.float32)
        y = np.random.rand(1, 2, 2).astype(np.float32)
        out = tensorList_to_tensorBuffer_Job([[x, y]])

        # out[0][0].shape (1, 3, 2)

    """
    if name is None:
        name = id_util.UniqueStr("TensorListToBuffer_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "in", input.unique_name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "out", "out")
    interpret_util.Forward(op_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #11
0
ファイル: data_ops.py プロジェクト: zhenlin-work/oneflow
def image_decoder_random_crop_resize(
    input_blob: oneflow_api.BlobDesc,
    target_width: int,
    target_height: int,
    num_attempts: Optional[int] = None,
    seed: Optional[int] = None,
    random_area: Optional[Sequence[float]] = None,
    random_aspect_ratio: Optional[Sequence[float]] = None,
    num_workers: Optional[int] = None,
    warmup_size: Optional[int] = None,
    max_num_pixels: Optional[int] = None,
    name: Optional[str] = None,
) -> Tuple[oneflow_api.BlobDesc]:
    if name is None:
        name = id_util.UniqueStr("ImageDecoderRandomCropResize_")

    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    setattr(op_conf.image_decoder_random_crop_resize_conf, "in",
            input_blob.unique_name)
    op_conf.image_decoder_random_crop_resize_conf.out = "out"
    op_conf.image_decoder_random_crop_resize_conf.target_width = target_width
    op_conf.image_decoder_random_crop_resize_conf.target_height = target_height
    if num_attempts is not None:
        op_conf.image_decoder_random_crop_resize_conf.num_attempts = num_attempts
    if seed is not None:
        op_conf.image_decoder_random_crop_resize_conf.seed = seed
    if random_area is not None:
        assert len(random_area) == 2
        op_conf.image_decoder_random_crop_resize_conf.random_area_min = random_area[
            0]
        op_conf.image_decoder_random_crop_resize_conf.random_area_max = random_area[
            1]
    if random_aspect_ratio is not None:
        assert len(random_aspect_ratio) == 2
        op_conf.image_decoder_random_crop_resize_conf.random_aspect_ratio_min = random_aspect_ratio[
            0]
        op_conf.image_decoder_random_crop_resize_conf.random_aspect_ratio_max = random_aspect_ratio[
            1]
    if num_workers is not None:
        op_conf.image_decoder_random_crop_resize_conf.num_workers = num_workers
    if warmup_size is not None:
        op_conf.image_decoder_random_crop_resize_conf.warmup_size = warmup_size
    if max_num_pixels is not None:
        op_conf.image_decoder_random_crop_resize_conf.max_num_pixels = max_num_pixels
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #12
0
ファイル: assign_op.py プロジェクト: zheddie/oneflow
def lazy_system_assign(ref, value, validate_shape=None, use_locking=None, name=None):
    op_conf = _SystemAssignOpConf(ref, value, name=name)
    (
        device_tag,
        machine_device_ids,
        hierarchy,
    ) = oneflow._oneflow_internal.GetDeviceTagAndMachineDeviceIdsAndHierarchy(
        ref.parallel_conf
    )
    if hierarchy is not None:
        hierarchy = tuple(hierarchy.dim())
    with oneflow.scope.placement(device_tag, machine_device_ids, hierarchy):
        interpret_util.Forward(op_conf)
    return ref
コード例 #13
0
def tensor_list_to_tensor_buffer(
        input: remote_blob_util.BlobDef,
        name: Optional[str] = None) -> remote_blob_util.BlobDef:
    if name is None:
        name = id_util.UniqueStr("TensorListToBuffer_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "in", input.unique_name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "out", "out")
    interpret_util.Forward(op_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #14
0
ファイル: square_sum_op.py プロジェクト: Sodu-Qinming/Oneflow
def square_sum(
    x: input_blob_util.ArgBlobDef, name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("SquareSum_")
    else:
        op_conf.name = name

    op_conf.square_sum_conf.x = x.unique_name
    op_conf.square_sum_conf.y = "y"

    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #15
0
def dynamic_binary_concat(
    input_blob_list: Sequence[remote_blob_util.BlobDef],
    source_blob: input_blob_util.ArgBlobDef,
    source_sbp: str = "S:0",
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("DynamicBinaryConcat_")
    else:
        op_conf.name = name

    in_lbns = []
    for in_blob in input_blob_list:
        in_lbns.append(in_blob.unique_name)

    getattr(op_conf.dynamic_binary_concat_conf, "in").extend(in_lbns)
    # op_conf.dynamic_binary_concat_conf.in[:] = in_lbns
    op_conf.dynamic_binary_concat_conf.out = "out"
    op_conf.dynamic_binary_concat_conf.out_data_type = (
        source_blob.dtype.oneflow_proto_dtype)
    op_conf.dynamic_binary_concat_conf.out_shape.dim.extend(
        list(source_blob.shape))
    if source_blob.batch_axis is not None:
        op_conf.dynamic_binary_concat_conf.out_batch_axis.value = source_blob.batch_axis
    else:
        op_conf.dynamic_binary_concat_conf.out_batch_axis.SetInParent()
    if "S" in source_sbp:
        axis = int(source_sbp.split(":")[-1])
        op_conf.dynamic_binary_concat_conf.out_sbp.split_parallel.axis = axis
    elif "B" in source_sbp:
        op_conf.dynamic_binary_concat_conf.out_sbp.broadcast_parallel.SetInParent(
        )
    elif "P" in source_sbp:
        op_conf.dynamic_binary_concat_conf.out_sbp.partial_sum_parallel.SetInParent(
        )
    else:
        print("Error! invalid sbp str:", source_sbp)
        op_conf.dynamic_binary_concat_conf.out_sbp.SetInParent()

    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    out_lbi.op_name = op_conf.name
    out_lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #16
0
def reshape(
    x: remote_blob_util.BlobDef, shape: Sequence[int], name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    r"""Reshapes a blob.

    Args:
        x: A `Blob`.
        shape: Shape of the output blob.
        name: A name for the operation (optional).
    Returns:
        A `Blob`, has the same type as `x`.
    """
    x = flow.cast_to_current_logical_view(x)
    assert isinstance(shape, tuple) or isinstance(shape, list)
    shape = list(shape)
    assert all(dim == -1 or dim > 0 for dim in shape)
    assert shape.count(-1) <= 1
    if not x.is_dynamic:
        if name is None:
            name = id_util.UniqueStr("Reshape_")
        return (
            flow.user_op_builder(name)
            .Op("reshape")
            .Input("in", [x])
            .Output("out")
            .Attr("shape", infer_shape(x, shape))
            .Build()
            .InferAndTryRun()
            .RemoteBlobList()[0]
        )
    else:
        op_conf = op_conf_util.OperatorConf()
        setattr(
            op_conf,
            "name",
            name if name is not None else id_util.UniqueStr("DynamicReshape_"),
        )
        setattr(op_conf.dynamic_reshape_conf, "in", x.unique_name)
        op_conf.dynamic_reshape_conf.shape.dim.extend(list(shape))
        setattr(op_conf.dynamic_reshape_conf, "out", "out")
        interpret_util.Forward(op_conf)
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = op_conf.name
        lbi.blob_name = "out"
        return remote_blob_util.RemoteBlob(lbi)
コード例 #17
0
ファイル: array_ops.py プロジェクト: doombeaker/oneflow
def elem_cnt(
    inputs: remote_blob_util.BlobDef,
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name",
            name if name is not None else id_util.UniqueStr("ElemCnt_"))
    op_conf.shape_elem_cnt_conf.x = inputs.unique_name

    op_conf.shape_elem_cnt_conf.exclude_axis_conf.SetInParent()
    if dtype is not None:
        op_conf.shape_elem_cnt_conf.data_type = dtype.oneflow_proto_dtype
    op_conf.shape_elem_cnt_conf.y = "y"
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "y")
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #18
0
def dynamic_reshape(
    x: remote_blob_util.BlobDef, shape: Sequence[int], name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    assert isinstance(shape, tuple) or isinstance(shape, list)
    shape = list(shape)
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("DynamicReshape_"),
    )
    setattr(op_conf.dynamic_reshape_conf, "in", x.unique_name)
    op_conf.dynamic_reshape_conf.shape.dim.extend(list(shape))
    setattr(op_conf.dynamic_reshape_conf, "out", "out")
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #19
0
def tensor_list_split(
        input_tensor_list: remote_blob_util.BlobDef,
        name: Optional[str] = None) -> Tuple[remote_blob_util.BlobDef]:
    if name is None:
        name = id_util.UniqueStr("TensorListSplit_")

    output_size = input_tensor_list.shape[0]
    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_split_conf, "in",
            input_tensor_list.unique_name)
    op_conf.tensor_list_split_conf.out.extend(
        ["out_{}".format(i) for i in range(output_size)])
    interpret_util.Forward(op_conf)
    ret = []
    for i in range(output_size):
        out_lbi = logical_blob_id_util.LogicalBlobId()
        setattr(out_lbi, "op_name", op_conf.name)
        setattr(out_lbi, "blob_name", "out_{}".format(i))
        ret.append(remote_blob_util.RemoteBlob(out_lbi))
    return tuple(ret)
コード例 #20
0
def stack(
    inputs: Sequence[remote_blob_util.BlobDef], axis: int, name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    if not isinstance(inputs, (list, tuple)):
        inputs = [inputs]

    if axis < 0:
        axis = axis + len(inputs[0].shape)

    assert axis == 0, "Only support dim0 stack now."

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name or id_util.UniqueStr("Stack_"))
    getattr(op_conf.stack_conf, "in").extend([input.unique_name for input in inputs])
    setattr(op_conf.stack_conf, "axis", axis)
    setattr(op_conf.stack_conf, "out", "out")
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #21
0
def sync_dynamic_resize(
    inputs: remote_blob_util.BlobDef,
    size: remote_blob_util.BlobDef,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("SyncDynamicResize_"),
    )
    setattr(op_conf.sync_dynamic_resize_conf, "in", inputs.unique_name)
    setattr(op_conf.sync_dynamic_resize_conf, "size", size.unique_name)
    setattr(op_conf.sync_dynamic_resize_conf, "axis", 0)
    setattr(op_conf.sync_dynamic_resize_conf, "out", "out")
    setattr(op_conf.sync_dynamic_resize_conf, "eager", flow.eager_execution_enabled())
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "out")
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #22
0
def unique_with_counts(
    x: input_blob_util.ArgBlobDef,
    out_idx: flow.dtype = flow.int32,
    name: Optional[str] = None,
) -> Tuple[oneflow_api.BlobDesc]:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("UniqueWithCounts_")
    else:
        op_conf.name = name

    op_conf.unique_with_counts_conf.x = x.unique_name
    op_conf.unique_with_counts_conf.y = "y"
    op_conf.unique_with_counts_conf.idx = "idx"
    op_conf.unique_with_counts_conf.count = "count"
    op_conf.unique_with_counts_conf.num_unique = "num_unique"
    op_conf.unique_with_counts_conf.out_idx = oneflow_api.deprecated.GetProtoDtype4OfDtype(
        out_idx
    )

    interpret_util.Forward(op_conf)
    y_lbi = logical_blob_id_util.LogicalBlobId()
    y_lbi.op_name = op_conf.name
    y_lbi.blob_name = "y"
    idx_lbi = logical_blob_id_util.LogicalBlobId()
    idx_lbi.op_name = op_conf.name
    idx_lbi.blob_name = "idx"
    count_lbi = logical_blob_id_util.LogicalBlobId()
    count_lbi.op_name = op_conf.name
    count_lbi.blob_name = "count"
    num_unique_lbi = logical_blob_id_util.LogicalBlobId()
    num_unique_lbi.op_name = op_conf.name
    num_unique_lbi.blob_name = "num_unique"

    return (
        remote_blob_util.RemoteBlob(y_lbi),
        remote_blob_util.RemoteBlob(idx_lbi),
        remote_blob_util.RemoteBlob(count_lbi),
        remote_blob_util.RemoteBlob(num_unique_lbi),
    )
コード例 #23
0
ファイル: array_ops.py プロジェクト: doombeaker/oneflow
def identity(x: remote_blob_util.BlobDef,
             name: Optional[str] = None) -> remote_blob_util.BlobDef:
    r"""Return a `Blob` has identical content and data type to input `Blob`. Analogous to `tf.identity <https://www.tensorflow.org/api_docs/python/tf/identity>`_

    Args:
        input: a `Blob`
        name: name of this operator. `None` by default
    
    Returns:
        A `Blob`
    """
    if name is None:
        name = id_util.UniqueStr("Identity_")
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    setattr(op_conf.identity_conf, "in", x.unique_name)
    op_conf.identity_conf.out = "out"
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
コード例 #24
0
def broadcast_to_compatible_with(
    x: remote_blob_util.BlobDef,
    compatible: Sequence[remote_blob_util.BlobDef],
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    assert isinstance(compatible, (list, tuple))
    if name is None:
        name = id_util.UniqueStr("BroadcastToCompatibleWith_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.broadcast_to_compatible_with_conf, "x", x.unique_name)
    setattr(op_conf.broadcast_to_compatible_with_conf, "y", "y")
    op_conf.broadcast_to_compatible_with_conf.compatible.extend(
        [cp.unique_name for cp in compatible]
    )
    interpret_util.Forward(op_conf)

    ret_lbi = logical_blob_id_util.LogicalBlobId()
    ret_lbi.op_name = op_conf.name
    ret_lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(ret_lbi)
コード例 #25
0
def identity_n(
    inputs: Iterable[remote_blob_util.BlobDef], name: Optional[str] = None
) -> List[remote_blob_util.BlobDef]:
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf, "name", name if name is not None else id_util.UniqueStr("IdentityN_"),
    )
    assert len(inputs) > 1
    out_bns = []
    for idx, blob in enumerate(inputs):
        getattr(op_conf.tuple_identity_conf, "in").append(blob.unique_name)
        out_bn = "out_" + str(idx)
        getattr(op_conf.tuple_identity_conf, "out").append(out_bn)
        out_bns.append(out_bn)
    interpret_util.Forward(op_conf)

    def bn_to_remote_blob(bn):
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = op_conf.name
        lbi.blob_name = bn
        return remote_blob_util.RemoteBlob(lbi)

    return list(map(bn_to_remote_blob, out_bns))
コード例 #26
0
def elem_cnt(
    input_blob: remote_blob_util.BlobDef,
    axis: Optional[Sequence[int]] = None,
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    r"""Reduce shape emements count accross 'axis'

    Args:
        input_blob (remote_blob_util.BlobDef): a 'Blob'
        axis (Optional[Sequence[int]], optional): The axis need to be reduced. Defaults to None.
        dtype (Optional[int], optional): A OneFlow data type (flow.int32). Defaults to None.
        name (Optional[str], optional): This operator's name. Defaults to None.

    Returns:
        remote_blob_util.BlobDef: A 'Blob'
    """
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("ShapeElemCnt_"),
    )
    op_conf.shape_elem_cnt_conf.x = input_blob.unique_name
    if axis is None:
        op_conf.shape_elem_cnt_conf.exclude_axis_conf.SetInParent()
    else:
        assert isinstance(axis, (tuple, list))
        op_conf.shape_elem_cnt_conf.include_axis_conf.axis.extend(axis)
    if dtype is not None:
        op_conf.shape_elem_cnt_conf.data_type = dtype.oneflow_proto_dtype
    op_conf.shape_elem_cnt_conf.y = "y"
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    out_lbi.op_name = op_conf.name
    out_lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #27
0
ファイル: constant_op.py プロジェクト: zheddie/oneflow
def constant_like(
    like: oneflow._oneflow_internal.BlobDesc,
    value: Union[int, float],
    dtype: Optional[flow.dtype] = None,
    name: Optional[str] = None,
) -> oneflow._oneflow_internal.BlobDesc:
    """This operator creates a constant Blob that has the same shape as `like`.

    Args:
        like (oneflow._oneflow_internal.BlobDesc): A Blob.
        value (Union[int, float]): The constant value of Blob.
        dtype (Optional[flow.dtype], optional): The data type of Blob. Defaults to None.
        name (Optional[str], optional): The name for the operation. Defaults to None.

    Raises:
        NotImplementedError: The data type of value should be int or float.

    Returns:
        oneflow._oneflow_internal.BlobDesc: The result Blob.

    For example:

    .. code-block:: python

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def constant_like_Job() -> tp.Numpy:
            constant_blob = flow.constant(value=1.5,
                                        shape=(1, 3, 3),
                                        dtype=flow.float)
            constant_like_blob = flow.constant_like(like=constant_blob,
                                                    value=5.5,
                                                    dtype=flow.float)
            return constant_like_blob


        out = constant_like_Job()

        # out [[[5.5 5.5 5.5]
        #       [5.5 5.5 5.5]
        #       [5.5 5.5 5.5]]]

    """
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("ConstantLike_"),
    )
    setattr(op_conf.constant_like_conf, "like", like.unique_name)
    if isinstance(value, int):
        op_conf.constant_like_conf.int_operand = value
    elif isinstance(value, float):
        op_conf.constant_like_conf.float_operand = value
    else:
        raise NotImplementedError
    if dtype is not None:
        setattr(
            op_conf.constant_like_conf,
            "data_type",
            oneflow._oneflow_internal.deprecated.GetProtoDtype4OfDtype(dtype),
        )
    setattr(op_conf.constant_like_conf, "out", "out")
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "out")
    return remote_blob_util.RemoteBlob(out_lbi)
コード例 #28
0
 def InferAndTryRun(self):
     interpret_util.Forward(self.op_conf_)
     return self
コード例 #29
0
ファイル: tensor_list_ops.py プロジェクト: zjureel/oneflow
def tensor_buffer_to_tensor_list(
    input: oneflow_api.BlobDesc,
    shape: Sequence[int],
    dtype: dtype_util.dtype,
    name: Optional[str] = None,
) -> oneflow_api.BlobDesc:
    """This operator converts `TensorBuffer` to `TensorList`. 

    Refer to `Concept Explanation <https://docs.oneflow.org/basics_topics/concept_explanation.html#3tensorbuffer-tensorlist>`_ 
    for more about TensorList. 

    Args:
        input (oneflow_api.BlobDesc): The input Tensor Buffer. 
        shape (Sequence[int]): The shape of input Tensor Buffer. 
        dtype (dtype_util.dtype): The data type. 
        name (Optional[str], optional): The name for the operation. Defaults to None.

    Returns:
        oneflow_api.BlobDesc: The result Blob. 
    
    For example: 

    .. code-block:: python 

        import oneflow as flow
        import numpy as np
        import oneflow.typing as tp


        @flow.global_function()
        def tensorBuffer_to_tensorList_Job(x: tp.Numpy.Placeholder(shape=(4, 16, 64, 64), dtype=flow.float32),
        ) -> tp.ListListNumpy:
            x = flow.tensor_to_tensor_buffer(x, 
                                            instance_dims=3)
            out = flow.tensor_buffer_to_tensor_list(input=x, 
                                                    shape=(16, 64, 64), 
                                                    dtype=flow.float32)
            return out

        x = np.random.randn(4, 16, 64, 64).astype(np.float32)
        out = tensorBuffer_to_tensorList_Job(x)

        # out[0][0].shape (1, 16, 64, 64)

    """
    if name is None:
        name = id_util.UniqueStr("TensorBufferToList_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_buffer_to_tensor_list_conf, "in", input.unique_name)
    setattr(op_conf.tensor_buffer_to_tensor_list_conf, "out", "out")
    op_conf.tensor_buffer_to_tensor_list_conf.shape.dim[:] = list(shape)
    setattr(
        op_conf.tensor_buffer_to_tensor_list_conf,
        "data_type",
        dtype.oneflow_proto_dtype,
    )
    interpret_util.Forward(op_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)