Exemple #1
0
def unique_with_counts(
    x: input_blob_util.ArgBlobDef,
    out_idx: dtype_util.dtype = dtype_util.int32,
    name: Optional[str] = None,
) -> Tuple[remote_blob_util.BlobDef]:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("UniqueWithCounts_")
    else:
        op_conf.name = name

    op_conf.unique_with_counts_conf.x = x.unique_name
    op_conf.unique_with_counts_conf.y = "y"
    op_conf.unique_with_counts_conf.idx = "idx"
    op_conf.unique_with_counts_conf.count = "count"
    op_conf.unique_with_counts_conf.num_unique = "num_unique"
    op_conf.unique_with_counts_conf.out_idx = out_idx.oneflow_proto_dtype

    interpret_util.Forward(op_conf)
    y_lbi = logical_blob_id_util.LogicalBlobId()
    y_lbi.op_name = op_conf.name
    y_lbi.blob_name = "y"
    idx_lbi = logical_blob_id_util.LogicalBlobId()
    idx_lbi.op_name = op_conf.name
    idx_lbi.blob_name = "idx"
    count_lbi = logical_blob_id_util.LogicalBlobId()
    count_lbi.op_name = op_conf.name
    count_lbi.blob_name = "count"
    num_unique_lbi = logical_blob_id_util.LogicalBlobId()
    num_unique_lbi.op_name = op_conf.name
    num_unique_lbi.blob_name = "num_unique"

    return (
        remote_blob_util.RemoteBlob(y_lbi),
        remote_blob_util.RemoteBlob(idx_lbi),
        remote_blob_util.RemoteBlob(count_lbi),
        remote_blob_util.RemoteBlob(num_unique_lbi),
    )
def distribute_add(xs, name=None):
    assert oneflow.placement.current_scope().parallel_size == len(xs)
    if name is None:
        name = id_util.UniqueStr("DistributeAdd_")
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    getattr(op_conf.distribute_add_conf,
            "in").extend([_SoleConsistentLbn(x) for x in xs])
    op_conf.distribute_add_conf.out = "out"
    interpret_util.ConsistentForward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #3
0
 def __init__(self, shape, dtype, batch_axis, name=None):
     lbi = lbi_util.LogicalBlobId()
     if name is None:
         name = id_util.UniqueStr("Input_")
     lbi.op_name = name
     lbi.blob_name = "out"
     blob_desc.BlobDesc.__init__(self, lbi)
     assert type(shape) is tuple
     for dim in shape:
         assert type(dim) is int
         assert dim > 0
     self.shape_ = shape
     self.dtype_ = dtype
     self.batch_axis_ = batch_axis
Exemple #4
0
def image_decoder_random_crop_resize(
    input_blob: oneflow_api.BlobDesc,
    target_width: int,
    target_height: int,
    num_attempts: Optional[int] = None,
    seed: Optional[int] = None,
    random_area: Optional[Sequence[float]] = None,
    random_aspect_ratio: Optional[Sequence[float]] = None,
    num_workers: Optional[int] = None,
    warmup_size: Optional[int] = None,
    max_num_pixels: Optional[int] = None,
    name: Optional[str] = None,
) -> Tuple[oneflow_api.BlobDesc]:
    if name is None:
        name = id_util.UniqueStr("ImageDecoderRandomCropResize_")

    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    setattr(op_conf.image_decoder_random_crop_resize_conf, "in",
            input_blob.unique_name)
    op_conf.image_decoder_random_crop_resize_conf.out = "out"
    op_conf.image_decoder_random_crop_resize_conf.target_width = target_width
    op_conf.image_decoder_random_crop_resize_conf.target_height = target_height
    if num_attempts is not None:
        op_conf.image_decoder_random_crop_resize_conf.num_attempts = num_attempts
    if seed is not None:
        op_conf.image_decoder_random_crop_resize_conf.seed = seed
    if random_area is not None:
        assert len(random_area) == 2
        op_conf.image_decoder_random_crop_resize_conf.random_area_min = random_area[
            0]
        op_conf.image_decoder_random_crop_resize_conf.random_area_max = random_area[
            1]
    if random_aspect_ratio is not None:
        assert len(random_aspect_ratio) == 2
        op_conf.image_decoder_random_crop_resize_conf.random_aspect_ratio_min = random_aspect_ratio[
            0]
        op_conf.image_decoder_random_crop_resize_conf.random_aspect_ratio_max = random_aspect_ratio[
            1]
    if num_workers is not None:
        op_conf.image_decoder_random_crop_resize_conf.num_workers = num_workers
    if warmup_size is not None:
        op_conf.image_decoder_random_crop_resize_conf.warmup_size = warmup_size
    if max_num_pixels is not None:
        op_conf.image_decoder_random_crop_resize_conf.max_num_pixels = max_num_pixels
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #5
0
def unpack(input, unpack_num, name=None):
    assert not oneflow.eager_execution_enabled()
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf, "name", name if name is not None else id_util.UniqueStr("Unpack_"),
    )
    setattr(op_conf.unpack_conf, "in", input.unique_name)
    op_conf.unpack_conf.out = "out"
    op_conf.unpack_conf.unpack_num = unpack_num
    compile_context.CurJobAddOp(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #6
0
def _GenModelIOPathInputOpConfAndRetLbi():
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = "model_io_path_input"
    op_conf.device_tag = "cpu"
    op_conf.input_conf.out = "out"
    blob_conf = inter_face_blob_conf_util.InterfaceBlobConf()
    blob_conf.shape.dim.append(65536)
    blob_conf.data_type = oneflow._oneflow_internal.deprecated.GetProtoDtype4OfDtype(
        flow.int8)
    blob_conf.is_dynamic = True
    op_conf.input_conf.blob_conf.CopyFrom(blob_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = op_conf.input_conf.out
    return (op_conf, lbi)
Exemple #7
0
def _GenModelLoadOpConfAndRetLbi(var_op_conf, path_lbi):
    variable_op_conf = op_conf_util.VariableOpConf()
    variable_op_conf.CopyFrom(var_op_conf.variable_conf)
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = "model_load"
    op_conf.device_tag = "cpu"
    op_conf.model_load_conf.path = "{}/{}".format(path_lbi.op_name,
                                                  path_lbi.blob_name)
    op_conf.model_load_conf.out.append("out_0")
    op_conf.model_load_conf.variable_op_name.append(var_op_conf.name)
    op_conf.model_load_conf.original_variable_conf.append(variable_op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = op_conf.model_load_conf.out[0]
    return (op_conf, lbi)
Exemple #8
0
        def Build(builder, Yield):
            blob_object = _GetInterfaceBlobObject(builder, op_name)
            lbi = logical_blob_id_util.LogicalBlobId()
            lbi.op_name = op_name
            op_attribute = sess.OpAttribute4InterfaceOpName(op_name)
            assert len(op_attribute.output_bns) == 1
            lbi.blob_name = op_attribute.output_bns[0]
            if blob_object.op_arg_parallel_attr.is_mirrored():
                remote_blob = remote_blob_util.EagerMirroredBlob(
                    lbi, blob_object, job_name)
            else:
                remote_blob = remote_blob_util.EagerConsistentBlob(
                    lbi, blob_object, job_name)

            Yield(remote_blob)
Exemple #9
0
def _CreateEagerVariableBlob(op_attribute):
    bn_in_op2blob_object = {}

    def BuildInstruction(builder):
        parallel_conf = oneflow.placement.current_scope().default_parallel_conf
        builder.StatelessCall(op_attribute,
                              parallel_conf,
                              bn_in_op2blob_object=bn_in_op2blob_object)

    vm_util.LogicalRun(BuildInstruction)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_attribute.op_conf.name
    lbi.blob_name = op_attribute.op_conf.variable_conf.out
    return remote_blob_util.EagerLogicalBlob(
        lbi, blob_object=bn_in_op2blob_object["out"])
Exemple #10
0
def distribute_concat(xs, axis=0, name=None):
    assert oneflow.current_scope(
    ).device_parallel_desc_symbol.parallel_num == len(xs)
    if name is None:
        name = id_util.UniqueStr("DistributeConcat_")
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    getattr(op_conf.distribute_concat_conf,
            "in").extend([_SoleConsistentLbn(x) for x in xs])
    op_conf.distribute_concat_conf.axis = axis
    op_conf.distribute_concat_conf.out = "out"
    interpret_util.ConsistentForward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #11
0
def _GenModelLoadPathInputOpConfAndRetLbi():
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = "model_load_path_input"
    op_conf.input_conf.out = "out"

    blob_conf = op_conf_util.InterfaceBlobConf()
    blob_conf.shape.dim.append(65536)
    blob_conf.data_type = dtype_util.int8.oneflow_proto_dtype
    blob_conf.batch_axis.value = 0
    blob_conf.is_dynamic = True
    op_conf.input_conf.blob_conf.CopyFrom(blob_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = op_conf.input_conf.out
    return op_conf, lbi
Exemple #12
0
def acc(one, max_acc_num, name=None):
    assert not oneflow.eager_execution_enabled()
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("Acc_"),
    )
    op_conf.acc_conf.one = one.unique_name
    op_conf.acc_conf.acc = "acc"
    op_conf.acc_conf.max_acc_num = max_acc_num
    compile_context.CurJobAddOp(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "acc"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #13
0
def CreateEagerVariableBlob(op_attribute, job_name=None):
    bn_in_op2blob_object = {}

    def BuildInstruction(builder):
        parallel_conf = (
            oneflow.current_scope().device_parallel_desc_symbol.parallel_conf)
        builder.StatelessCall(op_attribute,
                              parallel_conf,
                              bn_in_op2blob_object=bn_in_op2blob_object)

    vm_util.LogicalRun(BuildInstruction)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_attribute.op_conf.name
    lbi.blob_name = op_attribute.op_conf.variable_conf.out
    return remote_blob_util.EagerConsistentBlob(
        lbi, blob_object=bn_in_op2blob_object["out"], job_name=job_name)
Exemple #14
0
def tensor_list_to_tensor_buffer(
        input: remote_blob_util.BlobDef,
        name: Optional[str] = None) -> remote_blob_util.BlobDef:
    if name is None:
        name = id_util.UniqueStr("TensorListToBuffer_")

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "in", input.unique_name)
    setattr(op_conf.tensor_list_to_tensor_buffer_conf, "out", "out")
    interpret_util.Forward(op_conf)

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #15
0
def square_sum(
    x: input_blob_util.ArgBlobDef, name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("SquareSum_")
    else:
        op_conf.name = name

    op_conf.square_sum_conf.x = x.unique_name
    op_conf.square_sum_conf.y = "y"

    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "y"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #16
0
def decode_ofrecord(
    ofrecord_dir: str,
    blobs: Sequence[BlobConf],
    batch_size: int = 1,
    data_part_num: int = 1,
    part_name_prefix: str = "part-",
    part_name_suffix_length: int = -1,
    shuffle: bool = False,
    buffer_size: int = 1024,
    name: str = None,
) -> Tuple[remote_blob_util.BlobDef]:
    print(
        "WARNING:",
        "oneflow.data.decode_ofrecord is deprecated, and NOT work in eager mode, please use: \n",
        "    1)   ofrecord = oneflow.data.ofrecord_reader(...) to read ofrecord; \n",
        "    2)   image = oneflow.data.ofrecord_image_decoder(...) to decode image; \n",
        "    3)   raw = oneflow.data.ofrecord_raw_decoder(...) to decode raw data like label; \n",
        traceback.format_stack()[-2],
    )
    assert not flow.eager_execution_enabled()

    if name is None:
        name = id_util.UniqueStr("Decode_")

    lbis = []

    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name

    op_conf.decode_ofrecord_conf.data_dir = ofrecord_dir
    op_conf.decode_ofrecord_conf.data_part_num = data_part_num
    op_conf.decode_ofrecord_conf.batch_size = batch_size
    op_conf.decode_ofrecord_conf.part_name_prefix = part_name_prefix
    op_conf.decode_ofrecord_conf.part_name_suffix_length = part_name_suffix_length
    if shuffle == True:
        op_conf.decode_ofrecord_conf.random_shuffle_conf.buffer_size = buffer_size
    for blob_conf in blobs:
        op_conf.decode_ofrecord_conf.blob.extend([blob_conf.to_proto()])
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = name
        lbi.blob_name = blob_conf.name
        lbis.append(lbi)

    interpret_util.ConsistentForward(op_conf)
    return tuple(map(lambda x: remote_blob_util.RemoteBlob(x), lbis))
Exemple #17
0
def dynamic_binary_concat(
    input_blob_list: Sequence[remote_blob_util.BlobDef],
    source_blob: input_blob_util.ArgBlobDef,
    source_sbp: str = "S:0",
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    if name is None:
        op_conf.name = id_util.UniqueStr("DynamicBinaryConcat_")
    else:
        op_conf.name = name

    in_lbns = []
    for in_blob in input_blob_list:
        in_lbns.append(in_blob.unique_name)

    getattr(op_conf.dynamic_binary_concat_conf, "in").extend(in_lbns)
    # op_conf.dynamic_binary_concat_conf.in[:] = in_lbns
    op_conf.dynamic_binary_concat_conf.out = "out"
    op_conf.dynamic_binary_concat_conf.out_data_type = (
        source_blob.dtype.oneflow_proto_dtype)
    op_conf.dynamic_binary_concat_conf.out_shape.dim.extend(
        list(source_blob.shape))
    if source_blob.batch_axis is not None:
        op_conf.dynamic_binary_concat_conf.out_batch_axis.value = source_blob.batch_axis
    else:
        op_conf.dynamic_binary_concat_conf.out_batch_axis.SetInParent()
    if "S" in source_sbp:
        axis = int(source_sbp.split(":")[-1])
        op_conf.dynamic_binary_concat_conf.out_sbp.split_parallel.axis = axis
    elif "B" in source_sbp:
        op_conf.dynamic_binary_concat_conf.out_sbp.broadcast_parallel.SetInParent(
        )
    elif "P" in source_sbp:
        op_conf.dynamic_binary_concat_conf.out_sbp.partial_sum_parallel.SetInParent(
        )
    else:
        print("Error! invalid sbp str:", source_sbp)
        op_conf.dynamic_binary_concat_conf.out_sbp.SetInParent()

    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    out_lbi.op_name = op_conf.name
    out_lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(out_lbi)
Exemple #18
0
def reshape(
    x: remote_blob_util.BlobDef, shape: Sequence[int], name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    r"""Reshapes a blob.

    Args:
        x: A `Blob`.
        shape: Shape of the output blob.
        name: A name for the operation (optional).
    Returns:
        A `Blob`, has the same type as `x`.
    """
    x = flow.cast_to_current_logical_view(x)
    assert isinstance(shape, tuple) or isinstance(shape, list)
    shape = list(shape)
    assert all(dim == -1 or dim > 0 for dim in shape)
    assert shape.count(-1) <= 1
    if not x.is_dynamic:
        if name is None:
            name = id_util.UniqueStr("Reshape_")
        return (
            flow.user_op_builder(name)
            .Op("reshape")
            .Input("in", [x])
            .Output("out")
            .Attr("shape", infer_shape(x, shape))
            .Build()
            .InferAndTryRun()
            .RemoteBlobList()[0]
        )
    else:
        op_conf = op_conf_util.OperatorConf()
        setattr(
            op_conf,
            "name",
            name if name is not None else id_util.UniqueStr("DynamicReshape_"),
        )
        setattr(op_conf.dynamic_reshape_conf, "in", x.unique_name)
        op_conf.dynamic_reshape_conf.shape.dim.extend(list(shape))
        setattr(op_conf.dynamic_reshape_conf, "out", "out")
        interpret_util.Forward(op_conf)
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = op_conf.name
        lbi.blob_name = "out"
        return remote_blob_util.RemoteBlob(lbi)
Exemple #19
0
 def build(builder):
     blob_object = GetEagerInterfaceBlob(op_name).blob_object
     lbi = logical_blob_id_util.LogicalBlobId()
     lbi.op_name = op_name
     op_attribute = sess.OpAttribute4InterfaceOpName(op_name)
     assert len(op_attribute.output_bns) == 1
     lbi.blob_name = op_attribute.output_bns[0]
     if blob_object.op_arg_parallel_attr.is_mirrored():
         remote_blob = remote_blob_util.EagerMirroredBlob(
             lbi, blob_object, job_name)
     else:
         remote_blob = remote_blob_util.EagerConsistentBlob(
             lbi, blob_object, job_name)
     if blob_object.op_arg_blob_attr.is_tensor_list:
         value = remote_blob.numpy_list()
     else:
         value = remote_blob.numpy()
     Yield(value)
    def RemoteBlobList(self):
        remote_blob_list = []
        for k in self.op_conf_.user_conf.output:
            if k not in self.output_arg_key_list_:
                raise ValueError(
                    "output_arg_name {} of {} op is not set in python op builder"
                    .format(k, self.op_conf_.name))

        for output_arg_name in self.output_arg_key_list_:
            assert output_arg_name in self.op_conf_.user_conf.output
            for i in range(
                    len(self.op_conf_.user_conf.output[output_arg_name].s)):
                lbi = logical_blob_id_util.LogicalBlobId()
                lbi.op_name = self.op_conf_.name
                lbi.blob_name = "{}_{}".format(output_arg_name, i)
                remote_blob_list.append(self.MakeRemoteBlob(lbi))

        return tuple(remote_blob_list)
def distribute_clone(x, name=None):
    if name is None:
        name = id_util.UniqueStr("DistributeClone_")
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    setattr(op_conf.distribute_clone_conf, "in", x.unique_name)
    parallel_size = oneflow.placement.current_scope().parallel_size
    op_conf.distribute_clone_conf.out.extend(
        ["out_%d" % i for i in range(parallel_size)])
    interpret_util.ConsistentForward(op_conf)
    ret = []
    for i in range(parallel_size):
        out = "out_%d" % i
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = op_conf.name
        lbi.blob_name = out
        ret.append(remote_blob_util.RemoteBlob(lbi))
    return tuple(ret)
Exemple #22
0
def dynamic_reshape(
    x: remote_blob_util.BlobDef, shape: Sequence[int], name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    assert isinstance(shape, tuple) or isinstance(shape, list)
    shape = list(shape)
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("DynamicReshape_"),
    )
    setattr(op_conf.dynamic_reshape_conf, "in", x.unique_name)
    op_conf.dynamic_reshape_conf.shape.dim.extend(list(shape))
    setattr(op_conf.dynamic_reshape_conf, "out", "out")
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #23
0
def elem_cnt(
    inputs: remote_blob_util.BlobDef,
    dtype: Optional[dtype_util.dtype] = None,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name",
            name if name is not None else id_util.UniqueStr("ElemCnt_"))
    op_conf.shape_elem_cnt_conf.x = inputs.unique_name

    op_conf.shape_elem_cnt_conf.exclude_axis_conf.SetInParent()
    if dtype is not None:
        op_conf.shape_elem_cnt_conf.data_type = dtype.oneflow_proto_dtype
    op_conf.shape_elem_cnt_conf.y = "y"
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "y")
    return remote_blob_util.RemoteBlob(out_lbi)
Exemple #24
0
def _GetReturnOpConfAndOutLbiAndScope(remote_blob, allow_cpu_return_op=True):
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = id_util.UniqueStr("Return_")
    setattr(op_conf.return_conf, "in", remote_blob.unique_name)
    op_conf.return_conf.out = "out"
    if allow_cpu_return_op:
        op_conf.device_tag = "cpu"
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    parallel_conf = placement_cfg.ParallelConf()
    parallel_conf.CopyFrom(remote_blob.parallel_conf)

    def BuildScope(old_scope, builder):
        return builder.BuildScopeWithNewParallelConf(old_scope, parallel_conf)

    sess = session_ctx.GetDefaultSession()
    scope = scope_util.MakeScope(BuildScope)
    return (op_conf, lbi, scope)
Exemple #25
0
def decode_random(
    shape: Sequence[int],
    dtype: flow.dtype,
    batch_size: int = 1,
    initializer: Optional[initializer_conf_util.InitializerConf] = None,
    tick: Optional[oneflow_api.BlobDesc] = None,
    name: Optional[str] = None,
) -> oneflow_api.BlobDesc:
    op_conf = op_conf_util.OperatorConf()

    if name is None:
        name = id_util.UniqueStr("DecodeRandom_")
    assert isinstance(name, str)
    op_conf.name = name

    assert isinstance(shape, (list, tuple))
    op_conf.decode_random_conf.shape.dim.extend(shape)

    assert dtype is not None
    setattr(
        op_conf.decode_random_conf,
        "data_type",
        oneflow_api.deprecated.GetProtoDtype4OfDtype(dtype),
    )

    op_conf.decode_random_conf.batch_size = batch_size

    if initializer is not None:
        op_conf.decode_random_conf.data_initializer.CopyFrom(initializer)
    else:
        op_conf.decode_random_conf.data_initializer.CopyFrom(
            flow.random_uniform_initializer())

    if tick:
        op_conf.decode_random_conf.tick = tick.unique_name
    op_conf.decode_random_conf.out = "out"

    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"

    interpret_util.ConsistentForward(op_conf)
    return remote_blob_util.RemoteBlob(lbi)
Exemple #26
0
def distribute_split(x, axis=0, name=None):
    if name is None:
        name = id_util.UniqueStr("DistributeSplit_")
    op_conf = op_conf_util.OperatorConf()
    op_conf.name = name
    setattr(op_conf.distribute_split_conf, "in", x.unique_name)
    op_conf.distribute_split_conf.axis = axis
    parallel_size = oneflow.current_scope(
    ).device_parallel_desc_symbol.parallel_num
    op_conf.distribute_split_conf.out.extend(
        ["out_%d" % i for i in range(parallel_size)])
    interpret_util.ConsistentForward(op_conf)
    ret = []
    for i in range(parallel_size):
        out = "out_%d" % i
        lbi = logical_blob_id_util.LogicalBlobId()
        lbi.op_name = op_conf.name
        lbi.blob_name = out
        ret.append(remote_blob_util.RemoteBlob(lbi))
    return tuple(ret)
Exemple #27
0
 def RemoteBlobList(self):
     remote_blob_list = []
     for k in self.op_conf_.user_conf.output:
         if k not in self.output_arg_key_list_:
             raise ValueError(
                 "output_arg_name {} of {} op is not set in python op builder"
                 .format(k, self.op_conf_.name))
     for output_arg_name in self.output_arg_key_list_:
         assert output_arg_name in self.op_conf_.user_conf.output
         for i in range(
                 len(self.op_conf_.user_conf.output[output_arg_name].s)):
             lbi = logical_blob_id_util.LogicalBlobId()
             lbi.op_name = self.op_conf_.name
             lbi.blob_name = "{}_{}".format(output_arg_name, i)
             remote_blob_obj = self.MakeRemoteBlob(lbi)
             remote_blob_list.append(remote_blob_obj)
             if flow.eager_execution_enabled():
                 gradient_util.GetDefaultBackwardBlobRegister(
                 ).TrySetObject4BlobName(remote_blob_obj.logical_blob_name,
                                         remote_blob_obj.blob_object)
     return tuple(remote_blob_list)
Exemple #28
0
def stack(
    inputs: Sequence[remote_blob_util.BlobDef], axis: int, name: Optional[str] = None
) -> remote_blob_util.BlobDef:
    if not isinstance(inputs, (list, tuple)):
        inputs = [inputs]

    if axis < 0:
        axis = axis + len(inputs[0].shape)

    assert axis == 0, "Only support dim0 stack now."

    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name or id_util.UniqueStr("Stack_"))
    getattr(op_conf.stack_conf, "in").extend([input.unique_name for input in inputs])
    setattr(op_conf.stack_conf, "axis", axis)
    setattr(op_conf.stack_conf, "out", "out")
    interpret_util.Forward(op_conf)
    lbi = logical_blob_id_util.LogicalBlobId()
    lbi.op_name = op_conf.name
    lbi.blob_name = "out"
    return remote_blob_util.RemoteBlob(lbi)
Exemple #29
0
def sync_dynamic_resize(
    inputs: remote_blob_util.BlobDef,
    size: remote_blob_util.BlobDef,
    name: Optional[str] = None,
) -> remote_blob_util.BlobDef:
    op_conf = op_conf_util.OperatorConf()
    setattr(
        op_conf,
        "name",
        name if name is not None else id_util.UniqueStr("SyncDynamicResize_"),
    )
    setattr(op_conf.sync_dynamic_resize_conf, "in", inputs.unique_name)
    setattr(op_conf.sync_dynamic_resize_conf, "size", size.unique_name)
    setattr(op_conf.sync_dynamic_resize_conf, "axis", 0)
    setattr(op_conf.sync_dynamic_resize_conf, "out", "out")
    setattr(op_conf.sync_dynamic_resize_conf, "eager", flow.eager_execution_enabled())
    interpret_util.Forward(op_conf)
    out_lbi = logical_blob_id_util.LogicalBlobId()
    setattr(out_lbi, "op_name", op_conf.name)
    setattr(out_lbi, "blob_name", "out")
    return remote_blob_util.RemoteBlob(out_lbi)
Exemple #30
0
def tensor_list_split(
        input_tensor_list: remote_blob_util.BlobDef,
        name: Optional[str] = None) -> Tuple[remote_blob_util.BlobDef]:
    if name is None:
        name = id_util.UniqueStr("TensorListSplit_")

    output_size = input_tensor_list.shape[0]
    op_conf = op_conf_util.OperatorConf()
    setattr(op_conf, "name", name)
    setattr(op_conf.tensor_list_split_conf, "in",
            input_tensor_list.unique_name)
    op_conf.tensor_list_split_conf.out.extend(
        ["out_{}".format(i) for i in range(output_size)])
    interpret_util.Forward(op_conf)
    ret = []
    for i in range(output_size):
        out_lbi = logical_blob_id_util.LogicalBlobId()
        setattr(out_lbi, "op_name", op_conf.name)
        setattr(out_lbi, "blob_name", "out_{}".format(i))
        ret.append(remote_blob_util.RemoteBlob(out_lbi))
    return tuple(ret)